1 #include "memoryorder.h"
8 #define CPP0X( feature )
10 typedef struct atomic_flag
13 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
14 void clear( memory_order = memory_order_seq_cst ) volatile;
15 void fence( memory_order ) const volatile;
17 CPP0X( atomic_flag() = default; )
18 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
19 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
26 #define ATOMIC_FLAG_INIT { false }
32 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
33 extern bool atomic_flag_test_and_set_explicit
34 ( volatile atomic_flag*, memory_order );
35 extern void atomic_flag_clear( volatile atomic_flag* );
36 extern void atomic_flag_clear_explicit
37 ( volatile atomic_flag*, memory_order );
38 extern void atomic_flag_fence
39 ( const volatile atomic_flag*, memory_order );
40 extern void __atomic_flag_wait__
41 ( volatile atomic_flag* );
42 extern void __atomic_flag_wait_explicit__
43 ( volatile atomic_flag*, memory_order );
51 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
52 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
54 inline void atomic_flag::clear( memory_order __x__ ) volatile
55 { atomic_flag_clear_explicit( this, __x__ ); }
57 inline void atomic_flag::fence( memory_order __x__ ) const volatile
58 { atomic_flag_fence( this, __x__ ); }
64 The remainder of the example implementation uses the following
65 macros. These macros exploit GNU extensions for value-returning
66 blocks (AKA statement expressions) and __typeof__.
68 The macros rely on data fields of atomic structs being named __f__.
69 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
70 __g__=flag, __m__=modified, __o__=operation, __r__=result,
71 __p__=pointer to field, __v__=value (for single evaluation),
72 __x__=memory-ordering, and __y__=memory-ordering.
75 #define _ATOMIC_LOAD_( __a__, __x__ ) \
76 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
77 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
80 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
81 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
82 __typeof__(__m__) __v__ = (__m__); \
83 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
87 #define _ATOMIC_INIT_( __a__, __m__ ) \
88 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
89 __typeof__(__m__) __v__ = (__m__); \
90 model_init_action((void *) __p__, (uint64_t) __v__); \
93 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
94 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
95 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
96 __typeof__(__m__) __v__ = (__m__); \
97 __typeof__((__a__)->__f__) __copy__= __old__; \
98 __copy__ __o__ __v__; \
99 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
102 /* No spurious failure for now */
103 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
105 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
106 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
107 __typeof__(__e__) __q__ = (__e__); \
108 __typeof__(__m__) __v__ = (__m__); \
110 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
111 if (__t__ == * __q__ ) { \
112 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
113 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
116 #define _ATOMIC_FENCE_( __a__, __x__ ) \
117 ({ model_fence_action(__x__);})
120 #define ATOMIC_CHAR_LOCK_FREE 1
121 #define ATOMIC_CHAR16_T_LOCK_FREE 1
122 #define ATOMIC_CHAR32_T_LOCK_FREE 1
123 #define ATOMIC_WCHAR_T_LOCK_FREE 1
124 #define ATOMIC_SHORT_LOCK_FREE 1
125 #define ATOMIC_INT_LOCK_FREE 1
126 #define ATOMIC_LONG_LOCK_FREE 1
127 #define ATOMIC_LLONG_LOCK_FREE 1
128 #define ATOMIC_ADDRESS_LOCK_FREE 1
130 typedef struct atomic_bool
133 bool is_lock_free() const volatile;
134 void store( bool, memory_order = memory_order_seq_cst ) volatile;
135 bool load( memory_order = memory_order_seq_cst ) volatile;
136 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
137 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
138 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
139 bool compare_exchange_weak ( bool&, bool,
140 memory_order = memory_order_seq_cst) volatile;
141 bool compare_exchange_strong ( bool&, bool,
142 memory_order = memory_order_seq_cst) volatile;
143 void fence( memory_order ) const volatile;
145 CPP0X( atomic_bool() = delete; )
146 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
147 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
148 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
150 bool operator =( bool __v__ ) volatile
151 { store( __v__ ); return __v__; }
153 friend void atomic_store_explicit( volatile atomic_bool*, bool,
155 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
156 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
158 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
159 memory_order, memory_order );
160 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
161 memory_order, memory_order );
162 friend void atomic_fence( const volatile atomic_bool*, memory_order );
170 typedef struct atomic_address
173 bool is_lock_free() const volatile;
174 void store( void*, memory_order = memory_order_seq_cst ) volatile;
175 void* load( memory_order = memory_order_seq_cst ) volatile;
176 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
177 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
178 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
179 bool compare_exchange_weak( void*&, void*,
180 memory_order = memory_order_seq_cst ) volatile;
181 bool compare_exchange_strong( void*&, void*,
182 memory_order = memory_order_seq_cst ) volatile;
183 void fence( memory_order ) const volatile;
184 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
185 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
187 CPP0X( atomic_address() = default; )
188 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
189 CPP0X( atomic_address( const atomic_address& ) = delete; )
190 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
192 void* operator =( void* __v__ ) volatile
193 { store( __v__ ); return __v__; }
195 void* operator +=( ptrdiff_t __v__ ) volatile
196 { return fetch_add( __v__ ); }
198 void* operator -=( ptrdiff_t __v__ ) volatile
199 { return fetch_sub( __v__ ); }
201 friend void atomic_store_explicit( volatile atomic_address*, void*,
203 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
204 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
206 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
207 void**, void*, memory_order, memory_order );
208 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
209 void**, void*, memory_order, memory_order );
210 friend void atomic_fence( const volatile atomic_address*, memory_order );
211 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
213 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
222 typedef struct atomic_char
225 bool is_lock_free() const volatile;
227 memory_order = memory_order_seq_cst ) volatile;
228 char load( memory_order = memory_order_seq_cst ) volatile;
230 memory_order = memory_order_seq_cst ) volatile;
231 bool compare_exchange_weak( char&, char,
232 memory_order, memory_order ) volatile;
233 bool compare_exchange_strong( char&, char,
234 memory_order, memory_order ) volatile;
235 bool compare_exchange_weak( char&, char,
236 memory_order = memory_order_seq_cst ) volatile;
237 bool compare_exchange_strong( char&, char,
238 memory_order = memory_order_seq_cst ) volatile;
239 void fence( memory_order ) const volatile;
240 char fetch_add( char,
241 memory_order = memory_order_seq_cst ) volatile;
242 char fetch_sub( char,
243 memory_order = memory_order_seq_cst ) volatile;
244 char fetch_and( char,
245 memory_order = memory_order_seq_cst ) volatile;
247 memory_order = memory_order_seq_cst ) volatile;
248 char fetch_xor( char,
249 memory_order = memory_order_seq_cst ) volatile;
251 CPP0X( atomic_char() = default; )
252 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
253 CPP0X( atomic_char( const atomic_char& ) = delete; )
254 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
256 char operator =( char __v__ ) volatile
257 { store( __v__ ); return __v__; }
259 char operator ++( int ) volatile
260 { return fetch_add( 1 ); }
262 char operator --( int ) volatile
263 { return fetch_sub( 1 ); }
265 char operator ++() volatile
266 { return fetch_add( 1 ) + 1; }
268 char operator --() volatile
269 { return fetch_sub( 1 ) - 1; }
271 char operator +=( char __v__ ) volatile
272 { return fetch_add( __v__ ) + __v__; }
274 char operator -=( char __v__ ) volatile
275 { return fetch_sub( __v__ ) - __v__; }
277 char operator &=( char __v__ ) volatile
278 { return fetch_and( __v__ ) & __v__; }
280 char operator |=( char __v__ ) volatile
281 { return fetch_or( __v__ ) | __v__; }
283 char operator ^=( char __v__ ) volatile
284 { return fetch_xor( __v__ ) ^ __v__; }
286 friend void atomic_store_explicit( volatile atomic_char*, char,
288 friend char atomic_load_explicit( volatile atomic_char*,
290 friend char atomic_exchange_explicit( volatile atomic_char*,
291 char, memory_order );
292 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
293 char*, char, memory_order, memory_order );
294 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
295 char*, char, memory_order, memory_order );
296 friend void atomic_fence( const volatile atomic_char*, memory_order );
297 friend char atomic_fetch_add_explicit( volatile atomic_char*,
298 char, memory_order );
299 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
300 char, memory_order );
301 friend char atomic_fetch_and_explicit( volatile atomic_char*,
302 char, memory_order );
303 friend char atomic_fetch_or_explicit( volatile atomic_char*,
304 char, memory_order );
305 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
306 char, memory_order );
314 typedef struct atomic_schar
317 bool is_lock_free() const volatile;
318 void store( signed char,
319 memory_order = memory_order_seq_cst ) volatile;
320 signed char load( memory_order = memory_order_seq_cst ) volatile;
321 signed char exchange( signed char,
322 memory_order = memory_order_seq_cst ) volatile;
323 bool compare_exchange_weak( signed char&, signed char,
324 memory_order, memory_order ) volatile;
325 bool compare_exchange_strong( signed char&, signed char,
326 memory_order, memory_order ) volatile;
327 bool compare_exchange_weak( signed char&, signed char,
328 memory_order = memory_order_seq_cst ) volatile;
329 bool compare_exchange_strong( signed char&, signed char,
330 memory_order = memory_order_seq_cst ) volatile;
331 void fence( memory_order ) const volatile;
332 signed char fetch_add( signed char,
333 memory_order = memory_order_seq_cst ) volatile;
334 signed char fetch_sub( signed char,
335 memory_order = memory_order_seq_cst ) volatile;
336 signed char fetch_and( signed char,
337 memory_order = memory_order_seq_cst ) volatile;
338 signed char fetch_or( signed char,
339 memory_order = memory_order_seq_cst ) volatile;
340 signed char fetch_xor( signed char,
341 memory_order = memory_order_seq_cst ) volatile;
343 CPP0X( atomic_schar() = default; )
344 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
345 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
346 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
348 signed char operator =( signed char __v__ ) volatile
349 { store( __v__ ); return __v__; }
351 signed char operator ++( int ) volatile
352 { return fetch_add( 1 ); }
354 signed char operator --( int ) volatile
355 { return fetch_sub( 1 ); }
357 signed char operator ++() volatile
358 { return fetch_add( 1 ) + 1; }
360 signed char operator --() volatile
361 { return fetch_sub( 1 ) - 1; }
363 signed char operator +=( signed char __v__ ) volatile
364 { return fetch_add( __v__ ) + __v__; }
366 signed char operator -=( signed char __v__ ) volatile
367 { return fetch_sub( __v__ ) - __v__; }
369 signed char operator &=( signed char __v__ ) volatile
370 { return fetch_and( __v__ ) & __v__; }
372 signed char operator |=( signed char __v__ ) volatile
373 { return fetch_or( __v__ ) | __v__; }
375 signed char operator ^=( signed char __v__ ) volatile
376 { return fetch_xor( __v__ ) ^ __v__; }
378 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
380 friend signed char atomic_load_explicit( volatile atomic_schar*,
382 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
383 signed char, memory_order );
384 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
385 signed char*, signed char, memory_order, memory_order );
386 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
387 signed char*, signed char, memory_order, memory_order );
388 friend void atomic_fence( const volatile atomic_schar*, memory_order );
389 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
390 signed char, memory_order );
391 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
392 signed char, memory_order );
393 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
394 signed char, memory_order );
395 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
396 signed char, memory_order );
397 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
398 signed char, memory_order );
406 typedef struct atomic_uchar
409 bool is_lock_free() const volatile;
410 void store( unsigned char,
411 memory_order = memory_order_seq_cst ) volatile;
412 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
413 unsigned char exchange( unsigned char,
414 memory_order = memory_order_seq_cst ) volatile;
415 bool compare_exchange_weak( unsigned char&, unsigned char,
416 memory_order, memory_order ) volatile;
417 bool compare_exchange_strong( unsigned char&, unsigned char,
418 memory_order, memory_order ) volatile;
419 bool compare_exchange_weak( unsigned char&, unsigned char,
420 memory_order = memory_order_seq_cst ) volatile;
421 bool compare_exchange_strong( unsigned char&, unsigned char,
422 memory_order = memory_order_seq_cst ) volatile;
423 void fence( memory_order ) const volatile;
424 unsigned char fetch_add( unsigned char,
425 memory_order = memory_order_seq_cst ) volatile;
426 unsigned char fetch_sub( unsigned char,
427 memory_order = memory_order_seq_cst ) volatile;
428 unsigned char fetch_and( unsigned char,
429 memory_order = memory_order_seq_cst ) volatile;
430 unsigned char fetch_or( unsigned char,
431 memory_order = memory_order_seq_cst ) volatile;
432 unsigned char fetch_xor( unsigned char,
433 memory_order = memory_order_seq_cst ) volatile;
435 CPP0X( atomic_uchar() = default; )
436 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
437 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
438 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
440 unsigned char operator =( unsigned char __v__ ) volatile
441 { store( __v__ ); return __v__; }
443 unsigned char operator ++( int ) volatile
444 { return fetch_add( 1 ); }
446 unsigned char operator --( int ) volatile
447 { return fetch_sub( 1 ); }
449 unsigned char operator ++() volatile
450 { return fetch_add( 1 ) + 1; }
452 unsigned char operator --() volatile
453 { return fetch_sub( 1 ) - 1; }
455 unsigned char operator +=( unsigned char __v__ ) volatile
456 { return fetch_add( __v__ ) + __v__; }
458 unsigned char operator -=( unsigned char __v__ ) volatile
459 { return fetch_sub( __v__ ) - __v__; }
461 unsigned char operator &=( unsigned char __v__ ) volatile
462 { return fetch_and( __v__ ) & __v__; }
464 unsigned char operator |=( unsigned char __v__ ) volatile
465 { return fetch_or( __v__ ) | __v__; }
467 unsigned char operator ^=( unsigned char __v__ ) volatile
468 { return fetch_xor( __v__ ) ^ __v__; }
470 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
472 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
474 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
475 unsigned char, memory_order );
476 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
477 unsigned char*, unsigned char, memory_order, memory_order );
478 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
479 unsigned char*, unsigned char, memory_order, memory_order );
480 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
481 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
482 unsigned char, memory_order );
483 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
484 unsigned char, memory_order );
485 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
486 unsigned char, memory_order );
487 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
488 unsigned char, memory_order );
489 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
490 unsigned char, memory_order );
498 typedef struct atomic_short
501 bool is_lock_free() const volatile;
503 memory_order = memory_order_seq_cst ) volatile;
504 short load( memory_order = memory_order_seq_cst ) volatile;
505 short exchange( short,
506 memory_order = memory_order_seq_cst ) volatile;
507 bool compare_exchange_weak( short&, short,
508 memory_order, memory_order ) volatile;
509 bool compare_exchange_strong( short&, short,
510 memory_order, memory_order ) volatile;
511 bool compare_exchange_weak( short&, short,
512 memory_order = memory_order_seq_cst ) volatile;
513 bool compare_exchange_strong( short&, short,
514 memory_order = memory_order_seq_cst ) volatile;
515 void fence( memory_order ) const volatile;
516 short fetch_add( short,
517 memory_order = memory_order_seq_cst ) volatile;
518 short fetch_sub( short,
519 memory_order = memory_order_seq_cst ) volatile;
520 short fetch_and( short,
521 memory_order = memory_order_seq_cst ) volatile;
522 short fetch_or( short,
523 memory_order = memory_order_seq_cst ) volatile;
524 short fetch_xor( short,
525 memory_order = memory_order_seq_cst ) volatile;
527 CPP0X( atomic_short() = default; )
528 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
529 CPP0X( atomic_short( const atomic_short& ) = delete; )
530 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
532 short operator =( short __v__ ) volatile
533 { store( __v__ ); return __v__; }
535 short operator ++( int ) volatile
536 { return fetch_add( 1 ); }
538 short operator --( int ) volatile
539 { return fetch_sub( 1 ); }
541 short operator ++() volatile
542 { return fetch_add( 1 ) + 1; }
544 short operator --() volatile
545 { return fetch_sub( 1 ) - 1; }
547 short operator +=( short __v__ ) volatile
548 { return fetch_add( __v__ ) + __v__; }
550 short operator -=( short __v__ ) volatile
551 { return fetch_sub( __v__ ) - __v__; }
553 short operator &=( short __v__ ) volatile
554 { return fetch_and( __v__ ) & __v__; }
556 short operator |=( short __v__ ) volatile
557 { return fetch_or( __v__ ) | __v__; }
559 short operator ^=( short __v__ ) volatile
560 { return fetch_xor( __v__ ) ^ __v__; }
562 friend void atomic_store_explicit( volatile atomic_short*, short,
564 friend short atomic_load_explicit( volatile atomic_short*,
566 friend short atomic_exchange_explicit( volatile atomic_short*,
567 short, memory_order );
568 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
569 short*, short, memory_order, memory_order );
570 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
571 short*, short, memory_order, memory_order );
572 friend void atomic_fence( const volatile atomic_short*, memory_order );
573 friend short atomic_fetch_add_explicit( volatile atomic_short*,
574 short, memory_order );
575 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
576 short, memory_order );
577 friend short atomic_fetch_and_explicit( volatile atomic_short*,
578 short, memory_order );
579 friend short atomic_fetch_or_explicit( volatile atomic_short*,
580 short, memory_order );
581 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
582 short, memory_order );
590 typedef struct atomic_ushort
593 bool is_lock_free() const volatile;
594 void store( unsigned short,
595 memory_order = memory_order_seq_cst ) volatile;
596 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
597 unsigned short exchange( unsigned short,
598 memory_order = memory_order_seq_cst ) volatile;
599 bool compare_exchange_weak( unsigned short&, unsigned short,
600 memory_order, memory_order ) volatile;
601 bool compare_exchange_strong( unsigned short&, unsigned short,
602 memory_order, memory_order ) volatile;
603 bool compare_exchange_weak( unsigned short&, unsigned short,
604 memory_order = memory_order_seq_cst ) volatile;
605 bool compare_exchange_strong( unsigned short&, unsigned short,
606 memory_order = memory_order_seq_cst ) volatile;
607 void fence( memory_order ) const volatile;
608 unsigned short fetch_add( unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
610 unsigned short fetch_sub( unsigned short,
611 memory_order = memory_order_seq_cst ) volatile;
612 unsigned short fetch_and( unsigned short,
613 memory_order = memory_order_seq_cst ) volatile;
614 unsigned short fetch_or( unsigned short,
615 memory_order = memory_order_seq_cst ) volatile;
616 unsigned short fetch_xor( unsigned short,
617 memory_order = memory_order_seq_cst ) volatile;
619 CPP0X( atomic_ushort() = default; )
620 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
621 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
622 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
624 unsigned short operator =( unsigned short __v__ ) volatile
625 { store( __v__ ); return __v__; }
627 unsigned short operator ++( int ) volatile
628 { return fetch_add( 1 ); }
630 unsigned short operator --( int ) volatile
631 { return fetch_sub( 1 ); }
633 unsigned short operator ++() volatile
634 { return fetch_add( 1 ) + 1; }
636 unsigned short operator --() volatile
637 { return fetch_sub( 1 ) - 1; }
639 unsigned short operator +=( unsigned short __v__ ) volatile
640 { return fetch_add( __v__ ) + __v__; }
642 unsigned short operator -=( unsigned short __v__ ) volatile
643 { return fetch_sub( __v__ ) - __v__; }
645 unsigned short operator &=( unsigned short __v__ ) volatile
646 { return fetch_and( __v__ ) & __v__; }
648 unsigned short operator |=( unsigned short __v__ ) volatile
649 { return fetch_or( __v__ ) | __v__; }
651 unsigned short operator ^=( unsigned short __v__ ) volatile
652 { return fetch_xor( __v__ ) ^ __v__; }
654 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
656 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
658 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
659 unsigned short, memory_order );
660 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
661 unsigned short*, unsigned short, memory_order, memory_order );
662 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
663 unsigned short*, unsigned short, memory_order, memory_order );
664 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
665 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
666 unsigned short, memory_order );
667 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
668 unsigned short, memory_order );
669 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
670 unsigned short, memory_order );
671 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
672 unsigned short, memory_order );
673 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
674 unsigned short, memory_order );
678 unsigned short __f__;
682 typedef struct atomic_int
685 bool is_lock_free() const volatile;
687 memory_order = memory_order_seq_cst ) volatile;
688 int load( memory_order = memory_order_seq_cst ) volatile;
690 memory_order = memory_order_seq_cst ) volatile;
691 bool compare_exchange_weak( int&, int,
692 memory_order, memory_order ) volatile;
693 bool compare_exchange_strong( int&, int,
694 memory_order, memory_order ) volatile;
695 bool compare_exchange_weak( int&, int,
696 memory_order = memory_order_seq_cst ) volatile;
697 bool compare_exchange_strong( int&, int,
698 memory_order = memory_order_seq_cst ) volatile;
699 void fence( memory_order ) const volatile;
701 memory_order = memory_order_seq_cst ) volatile;
703 memory_order = memory_order_seq_cst ) volatile;
705 memory_order = memory_order_seq_cst ) volatile;
707 memory_order = memory_order_seq_cst ) volatile;
709 memory_order = memory_order_seq_cst ) volatile;
711 CPP0X( atomic_int() = default; )
712 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
713 CPP0X( atomic_int( const atomic_int& ) = delete; )
714 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
716 int operator =( int __v__ ) volatile
717 { store( __v__ ); return __v__; }
719 int operator ++( int ) volatile
720 { return fetch_add( 1 ); }
722 int operator --( int ) volatile
723 { return fetch_sub( 1 ); }
725 int operator ++() volatile
726 { return fetch_add( 1 ) + 1; }
728 int operator --() volatile
729 { return fetch_sub( 1 ) - 1; }
731 int operator +=( int __v__ ) volatile
732 { return fetch_add( __v__ ) + __v__; }
734 int operator -=( int __v__ ) volatile
735 { return fetch_sub( __v__ ) - __v__; }
737 int operator &=( int __v__ ) volatile
738 { return fetch_and( __v__ ) & __v__; }
740 int operator |=( int __v__ ) volatile
741 { return fetch_or( __v__ ) | __v__; }
743 int operator ^=( int __v__ ) volatile
744 { return fetch_xor( __v__ ) ^ __v__; }
746 friend void atomic_store_explicit( volatile atomic_int*, int,
748 friend int atomic_load_explicit( volatile atomic_int*,
750 friend int atomic_exchange_explicit( volatile atomic_int*,
752 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
753 int*, int, memory_order, memory_order );
754 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
755 int*, int, memory_order, memory_order );
756 friend void atomic_fence( const volatile atomic_int*, memory_order );
757 friend int atomic_fetch_add_explicit( volatile atomic_int*,
759 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
761 friend int atomic_fetch_and_explicit( volatile atomic_int*,
763 friend int atomic_fetch_or_explicit( volatile atomic_int*,
765 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
774 typedef struct atomic_uint
777 bool is_lock_free() const volatile;
778 void store( unsigned int,
779 memory_order = memory_order_seq_cst ) volatile;
780 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
781 unsigned int exchange( unsigned int,
782 memory_order = memory_order_seq_cst ) volatile;
783 bool compare_exchange_weak( unsigned int&, unsigned int,
784 memory_order, memory_order ) volatile;
785 bool compare_exchange_strong( unsigned int&, unsigned int,
786 memory_order, memory_order ) volatile;
787 bool compare_exchange_weak( unsigned int&, unsigned int,
788 memory_order = memory_order_seq_cst ) volatile;
789 bool compare_exchange_strong( unsigned int&, unsigned int,
790 memory_order = memory_order_seq_cst ) volatile;
791 void fence( memory_order ) const volatile;
792 unsigned int fetch_add( unsigned int,
793 memory_order = memory_order_seq_cst ) volatile;
794 unsigned int fetch_sub( unsigned int,
795 memory_order = memory_order_seq_cst ) volatile;
796 unsigned int fetch_and( unsigned int,
797 memory_order = memory_order_seq_cst ) volatile;
798 unsigned int fetch_or( unsigned int,
799 memory_order = memory_order_seq_cst ) volatile;
800 unsigned int fetch_xor( unsigned int,
801 memory_order = memory_order_seq_cst ) volatile;
803 CPP0X( atomic_uint() = default; )
804 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
805 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
806 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
808 unsigned int operator =( unsigned int __v__ ) volatile
809 { store( __v__ ); return __v__; }
811 unsigned int operator ++( int ) volatile
812 { return fetch_add( 1 ); }
814 unsigned int operator --( int ) volatile
815 { return fetch_sub( 1 ); }
817 unsigned int operator ++() volatile
818 { return fetch_add( 1 ) + 1; }
820 unsigned int operator --() volatile
821 { return fetch_sub( 1 ) - 1; }
823 unsigned int operator +=( unsigned int __v__ ) volatile
824 { return fetch_add( __v__ ) + __v__; }
826 unsigned int operator -=( unsigned int __v__ ) volatile
827 { return fetch_sub( __v__ ) - __v__; }
829 unsigned int operator &=( unsigned int __v__ ) volatile
830 { return fetch_and( __v__ ) & __v__; }
832 unsigned int operator |=( unsigned int __v__ ) volatile
833 { return fetch_or( __v__ ) | __v__; }
835 unsigned int operator ^=( unsigned int __v__ ) volatile
836 { return fetch_xor( __v__ ) ^ __v__; }
838 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
840 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
842 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
843 unsigned int, memory_order );
844 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
845 unsigned int*, unsigned int, memory_order, memory_order );
846 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
847 unsigned int*, unsigned int, memory_order, memory_order );
848 friend void atomic_fence( const volatile atomic_uint*, memory_order );
849 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
850 unsigned int, memory_order );
851 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
852 unsigned int, memory_order );
853 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
854 unsigned int, memory_order );
855 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
856 unsigned int, memory_order );
857 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
858 unsigned int, memory_order );
866 typedef struct atomic_long
869 bool is_lock_free() const volatile;
871 memory_order = memory_order_seq_cst ) volatile;
872 long load( memory_order = memory_order_seq_cst ) volatile;
874 memory_order = memory_order_seq_cst ) volatile;
875 bool compare_exchange_weak( long&, long,
876 memory_order, memory_order ) volatile;
877 bool compare_exchange_strong( long&, long,
878 memory_order, memory_order ) volatile;
879 bool compare_exchange_weak( long&, long,
880 memory_order = memory_order_seq_cst ) volatile;
881 bool compare_exchange_strong( long&, long,
882 memory_order = memory_order_seq_cst ) volatile;
883 void fence( memory_order ) const volatile;
884 long fetch_add( long,
885 memory_order = memory_order_seq_cst ) volatile;
886 long fetch_sub( long,
887 memory_order = memory_order_seq_cst ) volatile;
888 long fetch_and( long,
889 memory_order = memory_order_seq_cst ) volatile;
891 memory_order = memory_order_seq_cst ) volatile;
892 long fetch_xor( long,
893 memory_order = memory_order_seq_cst ) volatile;
895 CPP0X( atomic_long() = default; )
896 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
897 CPP0X( atomic_long( const atomic_long& ) = delete; )
898 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
900 long operator =( long __v__ ) volatile
901 { store( __v__ ); return __v__; }
903 long operator ++( int ) volatile
904 { return fetch_add( 1 ); }
906 long operator --( int ) volatile
907 { return fetch_sub( 1 ); }
909 long operator ++() volatile
910 { return fetch_add( 1 ) + 1; }
912 long operator --() volatile
913 { return fetch_sub( 1 ) - 1; }
915 long operator +=( long __v__ ) volatile
916 { return fetch_add( __v__ ) + __v__; }
918 long operator -=( long __v__ ) volatile
919 { return fetch_sub( __v__ ) - __v__; }
921 long operator &=( long __v__ ) volatile
922 { return fetch_and( __v__ ) & __v__; }
924 long operator |=( long __v__ ) volatile
925 { return fetch_or( __v__ ) | __v__; }
927 long operator ^=( long __v__ ) volatile
928 { return fetch_xor( __v__ ) ^ __v__; }
930 friend void atomic_store_explicit( volatile atomic_long*, long,
932 friend long atomic_load_explicit( volatile atomic_long*,
934 friend long atomic_exchange_explicit( volatile atomic_long*,
935 long, memory_order );
936 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
937 long*, long, memory_order, memory_order );
938 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
939 long*, long, memory_order, memory_order );
940 friend void atomic_fence( const volatile atomic_long*, memory_order );
941 friend long atomic_fetch_add_explicit( volatile atomic_long*,
942 long, memory_order );
943 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
944 long, memory_order );
945 friend long atomic_fetch_and_explicit( volatile atomic_long*,
946 long, memory_order );
947 friend long atomic_fetch_or_explicit( volatile atomic_long*,
948 long, memory_order );
949 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
950 long, memory_order );
958 typedef struct atomic_ulong
961 bool is_lock_free() const volatile;
962 void store( unsigned long,
963 memory_order = memory_order_seq_cst ) volatile;
964 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
965 unsigned long exchange( unsigned long,
966 memory_order = memory_order_seq_cst ) volatile;
967 bool compare_exchange_weak( unsigned long&, unsigned long,
968 memory_order, memory_order ) volatile;
969 bool compare_exchange_strong( unsigned long&, unsigned long,
970 memory_order, memory_order ) volatile;
971 bool compare_exchange_weak( unsigned long&, unsigned long,
972 memory_order = memory_order_seq_cst ) volatile;
973 bool compare_exchange_strong( unsigned long&, unsigned long,
974 memory_order = memory_order_seq_cst ) volatile;
975 void fence( memory_order ) const volatile;
976 unsigned long fetch_add( unsigned long,
977 memory_order = memory_order_seq_cst ) volatile;
978 unsigned long fetch_sub( unsigned long,
979 memory_order = memory_order_seq_cst ) volatile;
980 unsigned long fetch_and( unsigned long,
981 memory_order = memory_order_seq_cst ) volatile;
982 unsigned long fetch_or( unsigned long,
983 memory_order = memory_order_seq_cst ) volatile;
984 unsigned long fetch_xor( unsigned long,
985 memory_order = memory_order_seq_cst ) volatile;
987 CPP0X( atomic_ulong() = default; )
988 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
989 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
990 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
992 unsigned long operator =( unsigned long __v__ ) volatile
993 { store( __v__ ); return __v__; }
995 unsigned long operator ++( int ) volatile
996 { return fetch_add( 1 ); }
998 unsigned long operator --( int ) volatile
999 { return fetch_sub( 1 ); }
1001 unsigned long operator ++() volatile
1002 { return fetch_add( 1 ) + 1; }
1004 unsigned long operator --() volatile
1005 { return fetch_sub( 1 ) - 1; }
1007 unsigned long operator +=( unsigned long __v__ ) volatile
1008 { return fetch_add( __v__ ) + __v__; }
1010 unsigned long operator -=( unsigned long __v__ ) volatile
1011 { return fetch_sub( __v__ ) - __v__; }
1013 unsigned long operator &=( unsigned long __v__ ) volatile
1014 { return fetch_and( __v__ ) & __v__; }
1016 unsigned long operator |=( unsigned long __v__ ) volatile
1017 { return fetch_or( __v__ ) | __v__; }
1019 unsigned long operator ^=( unsigned long __v__ ) volatile
1020 { return fetch_xor( __v__ ) ^ __v__; }
1022 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1024 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1026 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1027 unsigned long, memory_order );
1028 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1029 unsigned long*, unsigned long, memory_order, memory_order );
1030 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1031 unsigned long*, unsigned long, memory_order, memory_order );
1032 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
1033 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1034 unsigned long, memory_order );
1035 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1036 unsigned long, memory_order );
1037 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1038 unsigned long, memory_order );
1039 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1040 unsigned long, memory_order );
1041 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1042 unsigned long, memory_order );
1046 unsigned long __f__;
1050 typedef struct atomic_llong
1053 bool is_lock_free() const volatile;
1054 void store( long long,
1055 memory_order = memory_order_seq_cst ) volatile;
1056 long long load( memory_order = memory_order_seq_cst ) volatile;
1057 long long exchange( long long,
1058 memory_order = memory_order_seq_cst ) volatile;
1059 bool compare_exchange_weak( long long&, long long,
1060 memory_order, memory_order ) volatile;
1061 bool compare_exchange_strong( long long&, long long,
1062 memory_order, memory_order ) volatile;
1063 bool compare_exchange_weak( long long&, long long,
1064 memory_order = memory_order_seq_cst ) volatile;
1065 bool compare_exchange_strong( long long&, long long,
1066 memory_order = memory_order_seq_cst ) volatile;
1067 void fence( memory_order ) const volatile;
1068 long long fetch_add( long long,
1069 memory_order = memory_order_seq_cst ) volatile;
1070 long long fetch_sub( long long,
1071 memory_order = memory_order_seq_cst ) volatile;
1072 long long fetch_and( long long,
1073 memory_order = memory_order_seq_cst ) volatile;
1074 long long fetch_or( long long,
1075 memory_order = memory_order_seq_cst ) volatile;
1076 long long fetch_xor( long long,
1077 memory_order = memory_order_seq_cst ) volatile;
1079 CPP0X( atomic_llong() = default; )
1080 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1081 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1082 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1084 long long operator =( long long __v__ ) volatile
1085 { store( __v__ ); return __v__; }
1087 long long operator ++( int ) volatile
1088 { return fetch_add( 1 ); }
1090 long long operator --( int ) volatile
1091 { return fetch_sub( 1 ); }
1093 long long operator ++() volatile
1094 { return fetch_add( 1 ) + 1; }
1096 long long operator --() volatile
1097 { return fetch_sub( 1 ) - 1; }
1099 long long operator +=( long long __v__ ) volatile
1100 { return fetch_add( __v__ ) + __v__; }
1102 long long operator -=( long long __v__ ) volatile
1103 { return fetch_sub( __v__ ) - __v__; }
1105 long long operator &=( long long __v__ ) volatile
1106 { return fetch_and( __v__ ) & __v__; }
1108 long long operator |=( long long __v__ ) volatile
1109 { return fetch_or( __v__ ) | __v__; }
1111 long long operator ^=( long long __v__ ) volatile
1112 { return fetch_xor( __v__ ) ^ __v__; }
1114 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1116 friend long long atomic_load_explicit( volatile atomic_llong*,
1118 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1119 long long, memory_order );
1120 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1121 long long*, long long, memory_order, memory_order );
1122 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1123 long long*, long long, memory_order, memory_order );
1124 friend void atomic_fence( const volatile atomic_llong*, memory_order );
1125 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1126 long long, memory_order );
1127 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1128 long long, memory_order );
1129 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1130 long long, memory_order );
1131 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1132 long long, memory_order );
1133 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1134 long long, memory_order );
1142 typedef struct atomic_ullong
1145 bool is_lock_free() const volatile;
1146 void store( unsigned long long,
1147 memory_order = memory_order_seq_cst ) volatile;
1148 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1149 unsigned long long exchange( unsigned long long,
1150 memory_order = memory_order_seq_cst ) volatile;
1151 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1152 memory_order, memory_order ) volatile;
1153 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1154 memory_order, memory_order ) volatile;
1155 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1156 memory_order = memory_order_seq_cst ) volatile;
1157 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1158 memory_order = memory_order_seq_cst ) volatile;
1159 void fence( memory_order ) const volatile;
1160 unsigned long long fetch_add( unsigned long long,
1161 memory_order = memory_order_seq_cst ) volatile;
1162 unsigned long long fetch_sub( unsigned long long,
1163 memory_order = memory_order_seq_cst ) volatile;
1164 unsigned long long fetch_and( unsigned long long,
1165 memory_order = memory_order_seq_cst ) volatile;
1166 unsigned long long fetch_or( unsigned long long,
1167 memory_order = memory_order_seq_cst ) volatile;
1168 unsigned long long fetch_xor( unsigned long long,
1169 memory_order = memory_order_seq_cst ) volatile;
1171 CPP0X( atomic_ullong() = default; )
1172 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1173 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1174 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1176 unsigned long long operator =( unsigned long long __v__ ) volatile
1177 { store( __v__ ); return __v__; }
1179 unsigned long long operator ++( int ) volatile
1180 { return fetch_add( 1 ); }
1182 unsigned long long operator --( int ) volatile
1183 { return fetch_sub( 1 ); }
1185 unsigned long long operator ++() volatile
1186 { return fetch_add( 1 ) + 1; }
1188 unsigned long long operator --() volatile
1189 { return fetch_sub( 1 ) - 1; }
1191 unsigned long long operator +=( unsigned long long __v__ ) volatile
1192 { return fetch_add( __v__ ) + __v__; }
1194 unsigned long long operator -=( unsigned long long __v__ ) volatile
1195 { return fetch_sub( __v__ ) - __v__; }
1197 unsigned long long operator &=( unsigned long long __v__ ) volatile
1198 { return fetch_and( __v__ ) & __v__; }
1200 unsigned long long operator |=( unsigned long long __v__ ) volatile
1201 { return fetch_or( __v__ ) | __v__; }
1203 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1204 { return fetch_xor( __v__ ) ^ __v__; }
1206 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1208 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1210 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1211 unsigned long long, memory_order );
1212 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1213 unsigned long long*, unsigned long long, memory_order, memory_order );
1214 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1215 unsigned long long*, unsigned long long, memory_order, memory_order );
1216 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
1217 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1218 unsigned long long, memory_order );
1219 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1220 unsigned long long, memory_order );
1221 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1222 unsigned long long, memory_order );
1223 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1224 unsigned long long, memory_order );
1225 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1226 unsigned long long, memory_order );
1230 unsigned long long __f__;
1234 typedef atomic_schar atomic_int_least8_t;
1235 typedef atomic_uchar atomic_uint_least8_t;
1236 typedef atomic_short atomic_int_least16_t;
1237 typedef atomic_ushort atomic_uint_least16_t;
1238 typedef atomic_int atomic_int_least32_t;
1239 typedef atomic_uint atomic_uint_least32_t;
1240 typedef atomic_llong atomic_int_least64_t;
1241 typedef atomic_ullong atomic_uint_least64_t;
1243 typedef atomic_schar atomic_int_fast8_t;
1244 typedef atomic_uchar atomic_uint_fast8_t;
1245 typedef atomic_short atomic_int_fast16_t;
1246 typedef atomic_ushort atomic_uint_fast16_t;
1247 typedef atomic_int atomic_int_fast32_t;
1248 typedef atomic_uint atomic_uint_fast32_t;
1249 typedef atomic_llong atomic_int_fast64_t;
1250 typedef atomic_ullong atomic_uint_fast64_t;
1252 typedef atomic_long atomic_intptr_t;
1253 typedef atomic_ulong atomic_uintptr_t;
1255 typedef atomic_long atomic_ssize_t;
1256 typedef atomic_ulong atomic_size_t;
1258 typedef atomic_long atomic_ptrdiff_t;
1260 typedef atomic_llong atomic_intmax_t;
1261 typedef atomic_ullong atomic_uintmax_t;
1267 typedef struct atomic_wchar_t
1270 bool is_lock_free() const volatile;
1271 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1272 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1273 wchar_t exchange( wchar_t,
1274 memory_order = memory_order_seq_cst ) volatile;
1275 bool compare_exchange_weak( wchar_t&, wchar_t,
1276 memory_order, memory_order ) volatile;
1277 bool compare_exchange_strong( wchar_t&, wchar_t,
1278 memory_order, memory_order ) volatile;
1279 bool compare_exchange_weak( wchar_t&, wchar_t,
1280 memory_order = memory_order_seq_cst ) volatile;
1281 bool compare_exchange_strong( wchar_t&, wchar_t,
1282 memory_order = memory_order_seq_cst ) volatile;
1283 void fence( memory_order ) const volatile;
1284 wchar_t fetch_add( wchar_t,
1285 memory_order = memory_order_seq_cst ) volatile;
1286 wchar_t fetch_sub( wchar_t,
1287 memory_order = memory_order_seq_cst ) volatile;
1288 wchar_t fetch_and( wchar_t,
1289 memory_order = memory_order_seq_cst ) volatile;
1290 wchar_t fetch_or( wchar_t,
1291 memory_order = memory_order_seq_cst ) volatile;
1292 wchar_t fetch_xor( wchar_t,
1293 memory_order = memory_order_seq_cst ) volatile;
1295 CPP0X( atomic_wchar_t() = default; )
1296 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1297 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1298 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1300 wchar_t operator =( wchar_t __v__ ) volatile
1301 { store( __v__ ); return __v__; }
1303 wchar_t operator ++( int ) volatile
1304 { return fetch_add( 1 ); }
1306 wchar_t operator --( int ) volatile
1307 { return fetch_sub( 1 ); }
1309 wchar_t operator ++() volatile
1310 { return fetch_add( 1 ) + 1; }
1312 wchar_t operator --() volatile
1313 { return fetch_sub( 1 ) - 1; }
1315 wchar_t operator +=( wchar_t __v__ ) volatile
1316 { return fetch_add( __v__ ) + __v__; }
1318 wchar_t operator -=( wchar_t __v__ ) volatile
1319 { return fetch_sub( __v__ ) - __v__; }
1321 wchar_t operator &=( wchar_t __v__ ) volatile
1322 { return fetch_and( __v__ ) & __v__; }
1324 wchar_t operator |=( wchar_t __v__ ) volatile
1325 { return fetch_or( __v__ ) | __v__; }
1327 wchar_t operator ^=( wchar_t __v__ ) volatile
1328 { return fetch_xor( __v__ ) ^ __v__; }
1330 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1332 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1334 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1335 wchar_t, memory_order );
1336 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1337 wchar_t*, wchar_t, memory_order, memory_order );
1338 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1339 wchar_t*, wchar_t, memory_order, memory_order );
1340 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
1341 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1342 wchar_t, memory_order );
1343 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1344 wchar_t, memory_order );
1345 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1346 wchar_t, memory_order );
1347 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1348 wchar_t, memory_order );
1349 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1350 wchar_t, memory_order );
1360 typedef atomic_int_least16_t atomic_char16_t;
1361 typedef atomic_int_least32_t atomic_char32_t;
1362 typedef atomic_int_least32_t atomic_wchar_t;
1369 template< typename T >
1374 bool is_lock_free() const volatile;
1375 void store( T, memory_order = memory_order_seq_cst ) volatile;
1376 T load( memory_order = memory_order_seq_cst ) volatile;
1377 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1378 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1379 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1380 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1381 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1382 void fence( memory_order ) const volatile;
1384 CPP0X( atomic() = default; )
1385 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1386 CPP0X( atomic( const atomic& ) = delete; )
1387 atomic& operator =( const atomic& ) CPP0X(=delete);
1389 T operator =( T __v__ ) volatile
1390 { store( __v__ ); return __v__; }
1401 template<typename T> struct atomic< T* > : atomic_address
1403 T* load( memory_order = memory_order_seq_cst ) volatile;
1404 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1405 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1406 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1407 bool compare_exchange_weak( T*&, T*,
1408 memory_order = memory_order_seq_cst ) volatile;
1409 bool compare_exchange_strong( T*&, T*,
1410 memory_order = memory_order_seq_cst ) volatile;
1411 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1412 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1414 CPP0X( atomic() = default; )
1415 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1416 CPP0X( atomic( const atomic& ) = delete; )
1417 atomic& operator =( const atomic& ) CPP0X(=delete);
1419 T* operator =( T* __v__ ) volatile
1420 { store( __v__ ); return __v__; }
1422 T* operator ++( int ) volatile
1423 { return fetch_add( 1 ); }
1425 T* operator --( int ) volatile
1426 { return fetch_sub( 1 ); }
1428 T* operator ++() volatile
1429 { return fetch_add( 1 ) + 1; }
1431 T* operator --() volatile
1432 { return fetch_sub( 1 ) - 1; }
1434 T* operator +=( T* __v__ ) volatile
1435 { return fetch_add( __v__ ) + __v__; }
1437 T* operator -=( T* __v__ ) volatile
1438 { return fetch_sub( __v__ ) - __v__; }
1446 template<> struct atomic< bool > : atomic_bool
1448 CPP0X( atomic() = default; )
1449 CPP0X( constexpr explicit atomic( bool __v__ )
1450 : atomic_bool( __v__ ) { } )
1451 CPP0X( atomic( const atomic& ) = delete; )
1452 atomic& operator =( const atomic& ) CPP0X(=delete);
1454 bool operator =( bool __v__ ) volatile
1455 { store( __v__ ); return __v__; }
1459 template<> struct atomic< void* > : atomic_address
1461 CPP0X( atomic() = default; )
1462 CPP0X( constexpr explicit atomic( void* __v__ )
1463 : atomic_address( __v__ ) { } )
1464 CPP0X( atomic( const atomic& ) = delete; )
1465 atomic& operator =( const atomic& ) CPP0X(=delete);
1467 void* operator =( void* __v__ ) volatile
1468 { store( __v__ ); return __v__; }
1472 template<> struct atomic< char > : atomic_char
1474 CPP0X( atomic() = default; )
1475 CPP0X( constexpr explicit atomic( char __v__ )
1476 : atomic_char( __v__ ) { } )
1477 CPP0X( atomic( const atomic& ) = delete; )
1478 atomic& operator =( const atomic& ) CPP0X(=delete);
1480 char operator =( char __v__ ) volatile
1481 { store( __v__ ); return __v__; }
1485 template<> struct atomic< signed char > : atomic_schar
1487 CPP0X( atomic() = default; )
1488 CPP0X( constexpr explicit atomic( signed char __v__ )
1489 : atomic_schar( __v__ ) { } )
1490 CPP0X( atomic( const atomic& ) = delete; )
1491 atomic& operator =( const atomic& ) CPP0X(=delete);
1493 signed char operator =( signed char __v__ ) volatile
1494 { store( __v__ ); return __v__; }
1498 template<> struct atomic< unsigned char > : atomic_uchar
1500 CPP0X( atomic() = default; )
1501 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1502 : atomic_uchar( __v__ ) { } )
1503 CPP0X( atomic( const atomic& ) = delete; )
1504 atomic& operator =( const atomic& ) CPP0X(=delete);
1506 unsigned char operator =( unsigned char __v__ ) volatile
1507 { store( __v__ ); return __v__; }
1511 template<> struct atomic< short > : atomic_short
1513 CPP0X( atomic() = default; )
1514 CPP0X( constexpr explicit atomic( short __v__ )
1515 : atomic_short( __v__ ) { } )
1516 CPP0X( atomic( const atomic& ) = delete; )
1517 atomic& operator =( const atomic& ) CPP0X(=delete);
1519 short operator =( short __v__ ) volatile
1520 { store( __v__ ); return __v__; }
1524 template<> struct atomic< unsigned short > : atomic_ushort
1526 CPP0X( atomic() = default; )
1527 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1528 : atomic_ushort( __v__ ) { } )
1529 CPP0X( atomic( const atomic& ) = delete; )
1530 atomic& operator =( const atomic& ) CPP0X(=delete);
1532 unsigned short operator =( unsigned short __v__ ) volatile
1533 { store( __v__ ); return __v__; }
1537 template<> struct atomic< int > : atomic_int
1539 CPP0X( atomic() = default; )
1540 CPP0X( constexpr explicit atomic( int __v__ )
1541 : atomic_int( __v__ ) { } )
1542 CPP0X( atomic( const atomic& ) = delete; )
1543 atomic& operator =( const atomic& ) CPP0X(=delete);
1545 int operator =( int __v__ ) volatile
1546 { store( __v__ ); return __v__; }
1550 template<> struct atomic< unsigned int > : atomic_uint
1552 CPP0X( atomic() = default; )
1553 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1554 : atomic_uint( __v__ ) { } )
1555 CPP0X( atomic( const atomic& ) = delete; )
1556 atomic& operator =( const atomic& ) CPP0X(=delete);
1558 unsigned int operator =( unsigned int __v__ ) volatile
1559 { store( __v__ ); return __v__; }
1563 template<> struct atomic< long > : atomic_long
1565 CPP0X( atomic() = default; )
1566 CPP0X( constexpr explicit atomic( long __v__ )
1567 : atomic_long( __v__ ) { } )
1568 CPP0X( atomic( const atomic& ) = delete; )
1569 atomic& operator =( const atomic& ) CPP0X(=delete);
1571 long operator =( long __v__ ) volatile
1572 { store( __v__ ); return __v__; }
1576 template<> struct atomic< unsigned long > : atomic_ulong
1578 CPP0X( atomic() = default; )
1579 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1580 : atomic_ulong( __v__ ) { } )
1581 CPP0X( atomic( const atomic& ) = delete; )
1582 atomic& operator =( const atomic& ) CPP0X(=delete);
1584 unsigned long operator =( unsigned long __v__ ) volatile
1585 { store( __v__ ); return __v__; }
1589 template<> struct atomic< long long > : atomic_llong
1591 CPP0X( atomic() = default; )
1592 CPP0X( constexpr explicit atomic( long long __v__ )
1593 : atomic_llong( __v__ ) { } )
1594 CPP0X( atomic( const atomic& ) = delete; )
1595 atomic& operator =( const atomic& ) CPP0X(=delete);
1597 long long operator =( long long __v__ ) volatile
1598 { store( __v__ ); return __v__; }
1602 template<> struct atomic< unsigned long long > : atomic_ullong
1604 CPP0X( atomic() = default; )
1605 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1606 : atomic_ullong( __v__ ) { } )
1607 CPP0X( atomic( const atomic& ) = delete; )
1608 atomic& operator =( const atomic& ) CPP0X(=delete);
1610 unsigned long long operator =( unsigned long long __v__ ) volatile
1611 { store( __v__ ); return __v__; }
1615 template<> struct atomic< wchar_t > : atomic_wchar_t
1617 CPP0X( atomic() = default; )
1618 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1619 : atomic_wchar_t( __v__ ) { } )
1620 CPP0X( atomic( const atomic& ) = delete; )
1621 atomic& operator =( const atomic& ) CPP0X(=delete);
1623 wchar_t operator =( wchar_t __v__ ) volatile
1624 { store( __v__ ); return __v__; }
1634 inline bool atomic_is_lock_free
1635 ( const volatile atomic_bool* __a__ )
1638 inline bool atomic_load_explicit
1639 ( volatile atomic_bool* __a__, memory_order __x__ )
1640 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1642 inline bool atomic_load
1643 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1645 inline void atomic_store_explicit
1646 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1647 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1649 inline void atomic_store
1650 ( volatile atomic_bool* __a__, bool __m__ )
1651 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1653 inline bool atomic_exchange_explicit
1654 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1655 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1657 inline bool atomic_exchange
1658 ( volatile atomic_bool* __a__, bool __m__ )
1659 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1661 inline bool atomic_compare_exchange_weak_explicit
1662 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1663 memory_order __x__, memory_order __y__ )
1664 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1666 inline bool atomic_compare_exchange_strong_explicit
1667 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1668 memory_order __x__, memory_order __y__ )
1669 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1671 inline bool atomic_compare_exchange_weak
1672 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1673 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1674 memory_order_seq_cst, memory_order_seq_cst ); }
1676 inline bool atomic_compare_exchange_strong
1677 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1678 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1679 memory_order_seq_cst, memory_order_seq_cst ); }
1681 inline void atomic_fence
1682 ( const volatile atomic_bool* __a__, memory_order __x__ )
1683 { _ATOMIC_FENCE_( __a__, __x__ ); }
1686 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1689 inline void* atomic_load_explicit
1690 ( volatile atomic_address* __a__, memory_order __x__ )
1691 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1693 inline void* atomic_load( volatile atomic_address* __a__ )
1694 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1696 inline void atomic_store_explicit
1697 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1698 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1700 inline void atomic_store
1701 ( volatile atomic_address* __a__, void* __m__ )
1702 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1704 inline void* atomic_exchange_explicit
1705 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1706 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1708 inline void* atomic_exchange
1709 ( volatile atomic_address* __a__, void* __m__ )
1710 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1712 inline bool atomic_compare_exchange_weak_explicit
1713 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1714 memory_order __x__, memory_order __y__ )
1715 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1717 inline bool atomic_compare_exchange_strong_explicit
1718 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1719 memory_order __x__, memory_order __y__ )
1720 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1722 inline bool atomic_compare_exchange_weak
1723 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1724 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1725 memory_order_seq_cst, memory_order_seq_cst ); }
1727 inline bool atomic_compare_exchange_strong
1728 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1729 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1730 memory_order_seq_cst, memory_order_seq_cst ); }
1732 inline void atomic_fence
1733 ( const volatile atomic_address* __a__, memory_order __x__ )
1734 { _ATOMIC_FENCE_( __a__, __x__ ); }
1737 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1740 inline char atomic_load_explicit
1741 ( volatile atomic_char* __a__, memory_order __x__ )
1742 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1744 inline char atomic_load( volatile atomic_char* __a__ )
1745 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1747 inline void atomic_store_explicit
1748 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1749 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1751 inline void atomic_store
1752 ( volatile atomic_char* __a__, char __m__ )
1753 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1755 inline char atomic_exchange_explicit
1756 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1757 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1759 inline char atomic_exchange
1760 ( volatile atomic_char* __a__, char __m__ )
1761 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1763 inline bool atomic_compare_exchange_weak_explicit
1764 ( volatile atomic_char* __a__, char* __e__, char __m__,
1765 memory_order __x__, memory_order __y__ )
1766 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1768 inline bool atomic_compare_exchange_strong_explicit
1769 ( volatile atomic_char* __a__, char* __e__, char __m__,
1770 memory_order __x__, memory_order __y__ )
1771 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1773 inline bool atomic_compare_exchange_weak
1774 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1775 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1776 memory_order_seq_cst, memory_order_seq_cst ); }
1778 inline bool atomic_compare_exchange_strong
1779 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1780 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1781 memory_order_seq_cst, memory_order_seq_cst ); }
1783 inline void atomic_fence
1784 ( const volatile atomic_char* __a__, memory_order __x__ )
1785 { _ATOMIC_FENCE_( __a__, __x__ ); }
1788 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1791 inline signed char atomic_load_explicit
1792 ( volatile atomic_schar* __a__, memory_order __x__ )
1793 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1795 inline signed char atomic_load( volatile atomic_schar* __a__ )
1796 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1798 inline void atomic_store_explicit
1799 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1800 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1802 inline void atomic_store
1803 ( volatile atomic_schar* __a__, signed char __m__ )
1804 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1806 inline signed char atomic_exchange_explicit
1807 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1808 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1810 inline signed char atomic_exchange
1811 ( volatile atomic_schar* __a__, signed char __m__ )
1812 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1814 inline bool atomic_compare_exchange_weak_explicit
1815 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1816 memory_order __x__, memory_order __y__ )
1817 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1819 inline bool atomic_compare_exchange_strong_explicit
1820 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1821 memory_order __x__, memory_order __y__ )
1822 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1824 inline bool atomic_compare_exchange_weak
1825 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1826 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1827 memory_order_seq_cst, memory_order_seq_cst ); }
1829 inline bool atomic_compare_exchange_strong
1830 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1831 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1832 memory_order_seq_cst, memory_order_seq_cst ); }
1834 inline void atomic_fence
1835 ( const volatile atomic_schar* __a__, memory_order __x__ )
1836 { _ATOMIC_FENCE_( __a__, __x__ ); }
1839 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1842 inline unsigned char atomic_load_explicit
1843 ( volatile atomic_uchar* __a__, memory_order __x__ )
1844 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1846 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1847 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1849 inline void atomic_store_explicit
1850 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1851 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1853 inline void atomic_store
1854 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1855 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1857 inline unsigned char atomic_exchange_explicit
1858 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1859 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1861 inline unsigned char atomic_exchange
1862 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1863 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1865 inline bool atomic_compare_exchange_weak_explicit
1866 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1867 memory_order __x__, memory_order __y__ )
1868 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1870 inline bool atomic_compare_exchange_strong_explicit
1871 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1872 memory_order __x__, memory_order __y__ )
1873 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1875 inline bool atomic_compare_exchange_weak
1876 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1877 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1878 memory_order_seq_cst, memory_order_seq_cst ); }
1880 inline bool atomic_compare_exchange_strong
1881 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1882 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1883 memory_order_seq_cst, memory_order_seq_cst ); }
1885 inline void atomic_fence
1886 ( const volatile atomic_uchar* __a__, memory_order __x__ )
1887 { _ATOMIC_FENCE_( __a__, __x__ ); }
1890 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1893 inline short atomic_load_explicit
1894 ( volatile atomic_short* __a__, memory_order __x__ )
1895 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1897 inline short atomic_load( volatile atomic_short* __a__ )
1898 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1900 inline void atomic_store_explicit
1901 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1902 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1904 inline void atomic_store
1905 ( volatile atomic_short* __a__, short __m__ )
1906 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1908 inline short atomic_exchange_explicit
1909 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1910 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1912 inline short atomic_exchange
1913 ( volatile atomic_short* __a__, short __m__ )
1914 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1916 inline bool atomic_compare_exchange_weak_explicit
1917 ( volatile atomic_short* __a__, short* __e__, short __m__,
1918 memory_order __x__, memory_order __y__ )
1919 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1921 inline bool atomic_compare_exchange_strong_explicit
1922 ( volatile atomic_short* __a__, short* __e__, short __m__,
1923 memory_order __x__, memory_order __y__ )
1924 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1926 inline bool atomic_compare_exchange_weak
1927 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1928 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1929 memory_order_seq_cst, memory_order_seq_cst ); }
1931 inline bool atomic_compare_exchange_strong
1932 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1933 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1934 memory_order_seq_cst, memory_order_seq_cst ); }
1936 inline void atomic_fence
1937 ( const volatile atomic_short* __a__, memory_order __x__ )
1938 { _ATOMIC_FENCE_( __a__, __x__ ); }
1941 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1944 inline unsigned short atomic_load_explicit
1945 ( volatile atomic_ushort* __a__, memory_order __x__ )
1946 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1948 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1949 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1951 inline void atomic_store_explicit
1952 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1953 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1955 inline void atomic_store
1956 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1957 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1959 inline unsigned short atomic_exchange_explicit
1960 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1961 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1963 inline unsigned short atomic_exchange
1964 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1965 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1967 inline bool atomic_compare_exchange_weak_explicit
1968 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1969 memory_order __x__, memory_order __y__ )
1970 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1972 inline bool atomic_compare_exchange_strong_explicit
1973 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1974 memory_order __x__, memory_order __y__ )
1975 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1977 inline bool atomic_compare_exchange_weak
1978 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1979 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1980 memory_order_seq_cst, memory_order_seq_cst ); }
1982 inline bool atomic_compare_exchange_strong
1983 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1984 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1985 memory_order_seq_cst, memory_order_seq_cst ); }
1987 inline void atomic_fence
1988 ( const volatile atomic_ushort* __a__, memory_order __x__ )
1989 { _ATOMIC_FENCE_( __a__, __x__ ); }
1992 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1995 inline int atomic_load_explicit
1996 ( volatile atomic_int* __a__, memory_order __x__ )
1997 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1999 inline int atomic_load( volatile atomic_int* __a__ )
2000 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2002 inline void atomic_store_explicit
2003 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2004 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2006 inline void atomic_store
2007 ( volatile atomic_int* __a__, int __m__ )
2008 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2010 inline int atomic_exchange_explicit
2011 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2012 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2014 inline int atomic_exchange
2015 ( volatile atomic_int* __a__, int __m__ )
2016 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2018 inline bool atomic_compare_exchange_weak_explicit
2019 ( volatile atomic_int* __a__, int* __e__, int __m__,
2020 memory_order __x__, memory_order __y__ )
2021 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2023 inline bool atomic_compare_exchange_strong_explicit
2024 ( volatile atomic_int* __a__, int* __e__, int __m__,
2025 memory_order __x__, memory_order __y__ )
2026 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2028 inline bool atomic_compare_exchange_weak
2029 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2030 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2031 memory_order_seq_cst, memory_order_seq_cst ); }
2033 inline bool atomic_compare_exchange_strong
2034 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2035 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2036 memory_order_seq_cst, memory_order_seq_cst ); }
2038 inline void atomic_fence
2039 ( const volatile atomic_int* __a__, memory_order __x__ )
2040 { _ATOMIC_FENCE_( __a__, __x__ ); }
2043 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2046 inline unsigned int atomic_load_explicit
2047 ( volatile atomic_uint* __a__, memory_order __x__ )
2048 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2050 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2051 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2053 inline void atomic_store_explicit
2054 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2055 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2057 inline void atomic_store
2058 ( volatile atomic_uint* __a__, unsigned int __m__ )
2059 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2061 inline unsigned int atomic_exchange_explicit
2062 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2063 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2065 inline unsigned int atomic_exchange
2066 ( volatile atomic_uint* __a__, unsigned int __m__ )
2067 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2069 inline bool atomic_compare_exchange_weak_explicit
2070 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2071 memory_order __x__, memory_order __y__ )
2072 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2074 inline bool atomic_compare_exchange_strong_explicit
2075 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2076 memory_order __x__, memory_order __y__ )
2077 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2079 inline bool atomic_compare_exchange_weak
2080 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2081 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2082 memory_order_seq_cst, memory_order_seq_cst ); }
2084 inline bool atomic_compare_exchange_strong
2085 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2086 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2087 memory_order_seq_cst, memory_order_seq_cst ); }
2089 inline void atomic_fence
2090 ( const volatile atomic_uint* __a__, memory_order __x__ )
2091 { _ATOMIC_FENCE_( __a__, __x__ ); }
2094 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2097 inline long atomic_load_explicit
2098 ( volatile atomic_long* __a__, memory_order __x__ )
2099 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2101 inline long atomic_load( volatile atomic_long* __a__ )
2102 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2104 inline void atomic_store_explicit
2105 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2106 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2108 inline void atomic_store
2109 ( volatile atomic_long* __a__, long __m__ )
2110 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2112 inline long atomic_exchange_explicit
2113 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2114 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2116 inline long atomic_exchange
2117 ( volatile atomic_long* __a__, long __m__ )
2118 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2120 inline bool atomic_compare_exchange_weak_explicit
2121 ( volatile atomic_long* __a__, long* __e__, long __m__,
2122 memory_order __x__, memory_order __y__ )
2123 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2125 inline bool atomic_compare_exchange_strong_explicit
2126 ( volatile atomic_long* __a__, long* __e__, long __m__,
2127 memory_order __x__, memory_order __y__ )
2128 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2130 inline bool atomic_compare_exchange_weak
2131 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2132 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2133 memory_order_seq_cst, memory_order_seq_cst ); }
2135 inline bool atomic_compare_exchange_strong
2136 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2137 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2138 memory_order_seq_cst, memory_order_seq_cst ); }
2140 inline void atomic_fence
2141 ( const volatile atomic_long* __a__, memory_order __x__ )
2142 { _ATOMIC_FENCE_( __a__, __x__ ); }
2145 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2148 inline unsigned long atomic_load_explicit
2149 ( volatile atomic_ulong* __a__, memory_order __x__ )
2150 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2152 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2153 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2155 inline void atomic_store_explicit
2156 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2157 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2159 inline void atomic_store
2160 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2161 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2163 inline unsigned long atomic_exchange_explicit
2164 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2165 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2167 inline unsigned long atomic_exchange
2168 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2169 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2171 inline bool atomic_compare_exchange_weak_explicit
2172 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2173 memory_order __x__, memory_order __y__ )
2174 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2176 inline bool atomic_compare_exchange_strong_explicit
2177 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2178 memory_order __x__, memory_order __y__ )
2179 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2181 inline bool atomic_compare_exchange_weak
2182 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2183 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2184 memory_order_seq_cst, memory_order_seq_cst ); }
2186 inline bool atomic_compare_exchange_strong
2187 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2188 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2189 memory_order_seq_cst, memory_order_seq_cst ); }
2191 inline void atomic_fence
2192 ( const volatile atomic_ulong* __a__, memory_order __x__ )
2193 { _ATOMIC_FENCE_( __a__, __x__ ); }
2196 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2199 inline long long atomic_load_explicit
2200 ( volatile atomic_llong* __a__, memory_order __x__ )
2201 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2203 inline long long atomic_load( volatile atomic_llong* __a__ )
2204 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2206 inline void atomic_store_explicit
2207 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2208 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2210 inline void atomic_store
2211 ( volatile atomic_llong* __a__, long long __m__ )
2212 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2214 inline long long atomic_exchange_explicit
2215 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2216 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2218 inline long long atomic_exchange
2219 ( volatile atomic_llong* __a__, long long __m__ )
2220 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2222 inline bool atomic_compare_exchange_weak_explicit
2223 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2224 memory_order __x__, memory_order __y__ )
2225 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2227 inline bool atomic_compare_exchange_strong_explicit
2228 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2229 memory_order __x__, memory_order __y__ )
2230 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2232 inline bool atomic_compare_exchange_weak
2233 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2234 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2235 memory_order_seq_cst, memory_order_seq_cst ); }
2237 inline bool atomic_compare_exchange_strong
2238 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2239 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2240 memory_order_seq_cst, memory_order_seq_cst ); }
2242 inline void atomic_fence
2243 ( const volatile atomic_llong* __a__, memory_order __x__ )
2244 { _ATOMIC_FENCE_( __a__, __x__ ); }
2247 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2250 inline unsigned long long atomic_load_explicit
2251 ( volatile atomic_ullong* __a__, memory_order __x__ )
2252 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2254 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2255 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2257 inline void atomic_store_explicit
2258 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2259 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2261 inline void atomic_store
2262 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2263 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2265 inline unsigned long long atomic_exchange_explicit
2266 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2267 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2269 inline unsigned long long atomic_exchange
2270 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2271 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2273 inline bool atomic_compare_exchange_weak_explicit
2274 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2275 memory_order __x__, memory_order __y__ )
2276 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2278 inline bool atomic_compare_exchange_strong_explicit
2279 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2280 memory_order __x__, memory_order __y__ )
2281 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2283 inline bool atomic_compare_exchange_weak
2284 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2285 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2286 memory_order_seq_cst, memory_order_seq_cst ); }
2288 inline bool atomic_compare_exchange_strong
2289 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2290 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2291 memory_order_seq_cst, memory_order_seq_cst ); }
2293 inline void atomic_fence
2294 ( const volatile atomic_ullong* __a__, memory_order __x__ )
2295 { _ATOMIC_FENCE_( __a__, __x__ ); }
2298 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2301 inline wchar_t atomic_load_explicit
2302 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2303 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2305 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2306 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2308 inline void atomic_store_explicit
2309 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2310 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2312 inline void atomic_store
2313 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2314 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2316 inline wchar_t atomic_exchange_explicit
2317 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2318 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2320 inline wchar_t atomic_exchange
2321 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2322 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2324 inline bool atomic_compare_exchange_weak_explicit
2325 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2326 memory_order __x__, memory_order __y__ )
2327 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2329 inline bool atomic_compare_exchange_strong_explicit
2330 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2331 memory_order __x__, memory_order __y__ )
2332 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2334 inline bool atomic_compare_exchange_weak
2335 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2336 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2337 memory_order_seq_cst, memory_order_seq_cst ); }
2339 inline bool atomic_compare_exchange_strong
2340 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2341 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2342 memory_order_seq_cst, memory_order_seq_cst ); }
2344 inline void atomic_fence
2345 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
2346 { _ATOMIC_FENCE_( __a__, __x__ ); }
2349 inline void* atomic_fetch_add_explicit
2350 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2352 void* volatile* __p__ = &((__a__)->__f__);
2353 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2354 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2357 inline void* atomic_fetch_add
2358 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2359 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2362 inline void* atomic_fetch_sub_explicit
2363 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2365 void* volatile* __p__ = &((__a__)->__f__);
2366 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2367 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2370 inline void* atomic_fetch_sub
2371 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2372 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2374 inline char atomic_fetch_add_explicit
2375 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2376 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2378 inline char atomic_fetch_add
2379 ( volatile atomic_char* __a__, char __m__ )
2380 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2383 inline char atomic_fetch_sub_explicit
2384 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2385 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2387 inline char atomic_fetch_sub
2388 ( volatile atomic_char* __a__, char __m__ )
2389 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2392 inline char atomic_fetch_and_explicit
2393 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2394 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2396 inline char atomic_fetch_and
2397 ( volatile atomic_char* __a__, char __m__ )
2398 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2401 inline char atomic_fetch_or_explicit
2402 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2403 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2405 inline char atomic_fetch_or
2406 ( volatile atomic_char* __a__, char __m__ )
2407 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2410 inline char atomic_fetch_xor_explicit
2411 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2412 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2414 inline char atomic_fetch_xor
2415 ( volatile atomic_char* __a__, char __m__ )
2416 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2419 inline signed char atomic_fetch_add_explicit
2420 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2421 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2423 inline signed char atomic_fetch_add
2424 ( volatile atomic_schar* __a__, signed char __m__ )
2425 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2428 inline signed char atomic_fetch_sub_explicit
2429 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2430 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2432 inline signed char atomic_fetch_sub
2433 ( volatile atomic_schar* __a__, signed char __m__ )
2434 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2437 inline signed char atomic_fetch_and_explicit
2438 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2439 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2441 inline signed char atomic_fetch_and
2442 ( volatile atomic_schar* __a__, signed char __m__ )
2443 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2446 inline signed char atomic_fetch_or_explicit
2447 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2448 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2450 inline signed char atomic_fetch_or
2451 ( volatile atomic_schar* __a__, signed char __m__ )
2452 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2455 inline signed char atomic_fetch_xor_explicit
2456 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2457 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2459 inline signed char atomic_fetch_xor
2460 ( volatile atomic_schar* __a__, signed char __m__ )
2461 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2464 inline unsigned char atomic_fetch_add_explicit
2465 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2466 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2468 inline unsigned char atomic_fetch_add
2469 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2470 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2473 inline unsigned char atomic_fetch_sub_explicit
2474 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2475 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2477 inline unsigned char atomic_fetch_sub
2478 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2479 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2482 inline unsigned char atomic_fetch_and_explicit
2483 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2484 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2486 inline unsigned char atomic_fetch_and
2487 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2488 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2491 inline unsigned char atomic_fetch_or_explicit
2492 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2493 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2495 inline unsigned char atomic_fetch_or
2496 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2497 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2500 inline unsigned char atomic_fetch_xor_explicit
2501 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2502 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2504 inline unsigned char atomic_fetch_xor
2505 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2506 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2509 inline short atomic_fetch_add_explicit
2510 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2511 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2513 inline short atomic_fetch_add
2514 ( volatile atomic_short* __a__, short __m__ )
2515 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2518 inline short atomic_fetch_sub_explicit
2519 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2520 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2522 inline short atomic_fetch_sub
2523 ( volatile atomic_short* __a__, short __m__ )
2524 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2527 inline short atomic_fetch_and_explicit
2528 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2529 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2531 inline short atomic_fetch_and
2532 ( volatile atomic_short* __a__, short __m__ )
2533 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2536 inline short atomic_fetch_or_explicit
2537 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2538 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2540 inline short atomic_fetch_or
2541 ( volatile atomic_short* __a__, short __m__ )
2542 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2545 inline short atomic_fetch_xor_explicit
2546 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2547 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2549 inline short atomic_fetch_xor
2550 ( volatile atomic_short* __a__, short __m__ )
2551 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2554 inline unsigned short atomic_fetch_add_explicit
2555 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2556 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2558 inline unsigned short atomic_fetch_add
2559 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2560 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2563 inline unsigned short atomic_fetch_sub_explicit
2564 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2565 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2567 inline unsigned short atomic_fetch_sub
2568 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2569 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2572 inline unsigned short atomic_fetch_and_explicit
2573 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2574 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2576 inline unsigned short atomic_fetch_and
2577 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2578 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2581 inline unsigned short atomic_fetch_or_explicit
2582 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2583 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2585 inline unsigned short atomic_fetch_or
2586 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2587 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2590 inline unsigned short atomic_fetch_xor_explicit
2591 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2592 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2594 inline unsigned short atomic_fetch_xor
2595 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2596 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2599 inline int atomic_fetch_add_explicit
2600 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2601 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2603 inline int atomic_fetch_add
2604 ( volatile atomic_int* __a__, int __m__ )
2605 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2608 inline int atomic_fetch_sub_explicit
2609 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2610 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2612 inline int atomic_fetch_sub
2613 ( volatile atomic_int* __a__, int __m__ )
2614 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2617 inline int atomic_fetch_and_explicit
2618 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2619 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2621 inline int atomic_fetch_and
2622 ( volatile atomic_int* __a__, int __m__ )
2623 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2626 inline int atomic_fetch_or_explicit
2627 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2628 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2630 inline int atomic_fetch_or
2631 ( volatile atomic_int* __a__, int __m__ )
2632 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2635 inline int atomic_fetch_xor_explicit
2636 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2637 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2639 inline int atomic_fetch_xor
2640 ( volatile atomic_int* __a__, int __m__ )
2641 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2644 inline unsigned int atomic_fetch_add_explicit
2645 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2646 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2648 inline unsigned int atomic_fetch_add
2649 ( volatile atomic_uint* __a__, unsigned int __m__ )
2650 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2653 inline unsigned int atomic_fetch_sub_explicit
2654 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2655 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2657 inline unsigned int atomic_fetch_sub
2658 ( volatile atomic_uint* __a__, unsigned int __m__ )
2659 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2662 inline unsigned int atomic_fetch_and_explicit
2663 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2664 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2666 inline unsigned int atomic_fetch_and
2667 ( volatile atomic_uint* __a__, unsigned int __m__ )
2668 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2671 inline unsigned int atomic_fetch_or_explicit
2672 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2673 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2675 inline unsigned int atomic_fetch_or
2676 ( volatile atomic_uint* __a__, unsigned int __m__ )
2677 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2680 inline unsigned int atomic_fetch_xor_explicit
2681 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2682 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2684 inline unsigned int atomic_fetch_xor
2685 ( volatile atomic_uint* __a__, unsigned int __m__ )
2686 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2689 inline long atomic_fetch_add_explicit
2690 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2691 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2693 inline long atomic_fetch_add
2694 ( volatile atomic_long* __a__, long __m__ )
2695 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2698 inline long atomic_fetch_sub_explicit
2699 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2700 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2702 inline long atomic_fetch_sub
2703 ( volatile atomic_long* __a__, long __m__ )
2704 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2707 inline long atomic_fetch_and_explicit
2708 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2709 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2711 inline long atomic_fetch_and
2712 ( volatile atomic_long* __a__, long __m__ )
2713 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2716 inline long atomic_fetch_or_explicit
2717 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2718 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2720 inline long atomic_fetch_or
2721 ( volatile atomic_long* __a__, long __m__ )
2722 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2725 inline long atomic_fetch_xor_explicit
2726 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2727 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2729 inline long atomic_fetch_xor
2730 ( volatile atomic_long* __a__, long __m__ )
2731 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2734 inline unsigned long atomic_fetch_add_explicit
2735 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2736 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2738 inline unsigned long atomic_fetch_add
2739 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2740 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2743 inline unsigned long atomic_fetch_sub_explicit
2744 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2745 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2747 inline unsigned long atomic_fetch_sub
2748 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2749 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2752 inline unsigned long atomic_fetch_and_explicit
2753 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2754 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2756 inline unsigned long atomic_fetch_and
2757 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2758 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2761 inline unsigned long atomic_fetch_or_explicit
2762 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2763 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2765 inline unsigned long atomic_fetch_or
2766 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2767 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2770 inline unsigned long atomic_fetch_xor_explicit
2771 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2772 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2774 inline unsigned long atomic_fetch_xor
2775 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2776 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2779 inline long long atomic_fetch_add_explicit
2780 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2781 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2783 inline long long atomic_fetch_add
2784 ( volatile atomic_llong* __a__, long long __m__ )
2785 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2788 inline long long atomic_fetch_sub_explicit
2789 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2790 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2792 inline long long atomic_fetch_sub
2793 ( volatile atomic_llong* __a__, long long __m__ )
2794 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2797 inline long long atomic_fetch_and_explicit
2798 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2799 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2801 inline long long atomic_fetch_and
2802 ( volatile atomic_llong* __a__, long long __m__ )
2803 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2806 inline long long atomic_fetch_or_explicit
2807 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2808 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2810 inline long long atomic_fetch_or
2811 ( volatile atomic_llong* __a__, long long __m__ )
2812 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2815 inline long long atomic_fetch_xor_explicit
2816 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2817 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2819 inline long long atomic_fetch_xor
2820 ( volatile atomic_llong* __a__, long long __m__ )
2821 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2824 inline unsigned long long atomic_fetch_add_explicit
2825 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2826 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2828 inline unsigned long long atomic_fetch_add
2829 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2830 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2833 inline unsigned long long atomic_fetch_sub_explicit
2834 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2835 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2837 inline unsigned long long atomic_fetch_sub
2838 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2839 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2842 inline unsigned long long atomic_fetch_and_explicit
2843 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2844 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2846 inline unsigned long long atomic_fetch_and
2847 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2848 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2851 inline unsigned long long atomic_fetch_or_explicit
2852 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2853 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2855 inline unsigned long long atomic_fetch_or
2856 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2857 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2860 inline unsigned long long atomic_fetch_xor_explicit
2861 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2862 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2864 inline unsigned long long atomic_fetch_xor
2865 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2866 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2869 inline wchar_t atomic_fetch_add_explicit
2870 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2871 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2873 inline wchar_t atomic_fetch_add
2874 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2875 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2878 inline wchar_t atomic_fetch_sub_explicit
2879 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2880 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2882 inline wchar_t atomic_fetch_sub
2883 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2884 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2887 inline wchar_t atomic_fetch_and_explicit
2888 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2889 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2891 inline wchar_t atomic_fetch_and
2892 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2893 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2896 inline wchar_t atomic_fetch_or_explicit
2897 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2898 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2900 inline wchar_t atomic_fetch_or
2901 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2902 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2905 inline wchar_t atomic_fetch_xor_explicit
2906 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2907 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2909 inline wchar_t atomic_fetch_xor
2910 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2911 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2917 #define atomic_is_lock_free( __a__ ) \
2920 #define atomic_load( __a__ ) \
2921 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2923 #define atomic_load_explicit( __a__, __x__ ) \
2924 _ATOMIC_LOAD_( __a__, __x__ )
2926 #define atomic_init( __a__, __m__ ) \
2927 _ATOMIC_INIT_( __a__, __m__ )
2929 #define atomic_store( __a__, __m__ ) \
2930 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2932 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2933 _ATOMIC_STORE_( __a__, __m__, __x__ )
2935 #define atomic_exchange( __a__, __m__ ) \
2936 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2938 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
2939 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2941 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
2942 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
2944 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
2945 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2947 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2948 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
2950 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2951 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2953 #define atomic_fence( __a__, __x__ ) \
2954 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
2957 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2958 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2960 #define atomic_fetch_add( __a__, __m__ ) \
2961 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2964 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2965 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2967 #define atomic_fetch_sub( __a__, __m__ ) \
2968 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2971 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2972 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2974 #define atomic_fetch_and( __a__, __m__ ) \
2975 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2978 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2979 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2981 #define atomic_fetch_or( __a__, __m__ ) \
2982 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2985 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2986 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2988 #define atomic_fetch_xor( __a__, __m__ ) \
2989 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2998 inline bool atomic_bool::is_lock_free() const volatile
3001 inline void atomic_bool::store
3002 ( bool __m__, memory_order __x__ ) volatile
3003 { atomic_store_explicit( this, __m__, __x__ ); }
3005 inline bool atomic_bool::load
3006 ( memory_order __x__ ) volatile
3007 { return atomic_load_explicit( this, __x__ ); }
3009 inline bool atomic_bool::exchange
3010 ( bool __m__, memory_order __x__ ) volatile
3011 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3013 inline bool atomic_bool::compare_exchange_weak
3014 ( bool& __e__, bool __m__,
3015 memory_order __x__, memory_order __y__ ) volatile
3016 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3018 inline bool atomic_bool::compare_exchange_strong
3019 ( bool& __e__, bool __m__,
3020 memory_order __x__, memory_order __y__ ) volatile
3021 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3023 inline bool atomic_bool::compare_exchange_weak
3024 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3025 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3026 __x__ == memory_order_acq_rel ? memory_order_acquire :
3027 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3029 inline bool atomic_bool::compare_exchange_strong
3030 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3031 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3032 __x__ == memory_order_acq_rel ? memory_order_acquire :
3033 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3035 inline void atomic_bool::fence
3036 ( memory_order __x__ ) const volatile
3037 { return atomic_fence( this, __x__ ); }
3040 inline bool atomic_address::is_lock_free() const volatile
3043 inline void atomic_address::store
3044 ( void* __m__, memory_order __x__ ) volatile
3045 { atomic_store_explicit( this, __m__, __x__ ); }
3047 inline void* atomic_address::load
3048 ( memory_order __x__ ) volatile
3049 { return atomic_load_explicit( this, __x__ ); }
3051 inline void* atomic_address::exchange
3052 ( void* __m__, memory_order __x__ ) volatile
3053 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3055 inline bool atomic_address::compare_exchange_weak
3056 ( void*& __e__, void* __m__,
3057 memory_order __x__, memory_order __y__ ) volatile
3058 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3060 inline bool atomic_address::compare_exchange_strong
3061 ( void*& __e__, void* __m__,
3062 memory_order __x__, memory_order __y__ ) volatile
3063 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3065 inline bool atomic_address::compare_exchange_weak
3066 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3067 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3068 __x__ == memory_order_acq_rel ? memory_order_acquire :
3069 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3071 inline bool atomic_address::compare_exchange_strong
3072 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3073 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3074 __x__ == memory_order_acq_rel ? memory_order_acquire :
3075 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3077 inline void atomic_address::fence
3078 ( memory_order __x__ ) const volatile
3079 { return atomic_fence( this, __x__ ); }
3082 inline bool atomic_char::is_lock_free() const volatile
3085 inline void atomic_char::store
3086 ( char __m__, memory_order __x__ ) volatile
3087 { atomic_store_explicit( this, __m__, __x__ ); }
3089 inline char atomic_char::load
3090 ( memory_order __x__ ) volatile
3091 { return atomic_load_explicit( this, __x__ ); }
3093 inline char atomic_char::exchange
3094 ( char __m__, memory_order __x__ ) volatile
3095 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3097 inline bool atomic_char::compare_exchange_weak
3098 ( char& __e__, char __m__,
3099 memory_order __x__, memory_order __y__ ) volatile
3100 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3102 inline bool atomic_char::compare_exchange_strong
3103 ( char& __e__, char __m__,
3104 memory_order __x__, memory_order __y__ ) volatile
3105 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3107 inline bool atomic_char::compare_exchange_weak
3108 ( char& __e__, char __m__, memory_order __x__ ) volatile
3109 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3110 __x__ == memory_order_acq_rel ? memory_order_acquire :
3111 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3113 inline bool atomic_char::compare_exchange_strong
3114 ( char& __e__, char __m__, memory_order __x__ ) volatile
3115 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3116 __x__ == memory_order_acq_rel ? memory_order_acquire :
3117 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3119 inline void atomic_char::fence
3120 ( memory_order __x__ ) const volatile
3121 { return atomic_fence( this, __x__ ); }
3124 inline bool atomic_schar::is_lock_free() const volatile
3127 inline void atomic_schar::store
3128 ( signed char __m__, memory_order __x__ ) volatile
3129 { atomic_store_explicit( this, __m__, __x__ ); }
3131 inline signed char atomic_schar::load
3132 ( memory_order __x__ ) volatile
3133 { return atomic_load_explicit( this, __x__ ); }
3135 inline signed char atomic_schar::exchange
3136 ( signed char __m__, memory_order __x__ ) volatile
3137 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3139 inline bool atomic_schar::compare_exchange_weak
3140 ( signed char& __e__, signed char __m__,
3141 memory_order __x__, memory_order __y__ ) volatile
3142 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3144 inline bool atomic_schar::compare_exchange_strong
3145 ( signed char& __e__, signed char __m__,
3146 memory_order __x__, memory_order __y__ ) volatile
3147 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3149 inline bool atomic_schar::compare_exchange_weak
3150 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3151 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3152 __x__ == memory_order_acq_rel ? memory_order_acquire :
3153 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3155 inline bool atomic_schar::compare_exchange_strong
3156 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3157 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3158 __x__ == memory_order_acq_rel ? memory_order_acquire :
3159 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3161 inline void atomic_schar::fence
3162 ( memory_order __x__ ) const volatile
3163 { return atomic_fence( this, __x__ ); }
3166 inline bool atomic_uchar::is_lock_free() const volatile
3169 inline void atomic_uchar::store
3170 ( unsigned char __m__, memory_order __x__ ) volatile
3171 { atomic_store_explicit( this, __m__, __x__ ); }
3173 inline unsigned char atomic_uchar::load
3174 ( memory_order __x__ ) volatile
3175 { return atomic_load_explicit( this, __x__ ); }
3177 inline unsigned char atomic_uchar::exchange
3178 ( unsigned char __m__, memory_order __x__ ) volatile
3179 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3181 inline bool atomic_uchar::compare_exchange_weak
3182 ( unsigned char& __e__, unsigned char __m__,
3183 memory_order __x__, memory_order __y__ ) volatile
3184 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3186 inline bool atomic_uchar::compare_exchange_strong
3187 ( unsigned char& __e__, unsigned char __m__,
3188 memory_order __x__, memory_order __y__ ) volatile
3189 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3191 inline bool atomic_uchar::compare_exchange_weak
3192 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3193 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3194 __x__ == memory_order_acq_rel ? memory_order_acquire :
3195 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3197 inline bool atomic_uchar::compare_exchange_strong
3198 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3199 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3200 __x__ == memory_order_acq_rel ? memory_order_acquire :
3201 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3203 inline void atomic_uchar::fence
3204 ( memory_order __x__ ) const volatile
3205 { return atomic_fence( this, __x__ ); }
3208 inline bool atomic_short::is_lock_free() const volatile
3211 inline void atomic_short::store
3212 ( short __m__, memory_order __x__ ) volatile
3213 { atomic_store_explicit( this, __m__, __x__ ); }
3215 inline short atomic_short::load
3216 ( memory_order __x__ ) volatile
3217 { return atomic_load_explicit( this, __x__ ); }
3219 inline short atomic_short::exchange
3220 ( short __m__, memory_order __x__ ) volatile
3221 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3223 inline bool atomic_short::compare_exchange_weak
3224 ( short& __e__, short __m__,
3225 memory_order __x__, memory_order __y__ ) volatile
3226 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3228 inline bool atomic_short::compare_exchange_strong
3229 ( short& __e__, short __m__,
3230 memory_order __x__, memory_order __y__ ) volatile
3231 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3233 inline bool atomic_short::compare_exchange_weak
3234 ( short& __e__, short __m__, memory_order __x__ ) volatile
3235 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3236 __x__ == memory_order_acq_rel ? memory_order_acquire :
3237 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3239 inline bool atomic_short::compare_exchange_strong
3240 ( short& __e__, short __m__, memory_order __x__ ) volatile
3241 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3242 __x__ == memory_order_acq_rel ? memory_order_acquire :
3243 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3245 inline void atomic_short::fence
3246 ( memory_order __x__ ) const volatile
3247 { return atomic_fence( this, __x__ ); }
3250 inline bool atomic_ushort::is_lock_free() const volatile
3253 inline void atomic_ushort::store
3254 ( unsigned short __m__, memory_order __x__ ) volatile
3255 { atomic_store_explicit( this, __m__, __x__ ); }
3257 inline unsigned short atomic_ushort::load
3258 ( memory_order __x__ ) volatile
3259 { return atomic_load_explicit( this, __x__ ); }
3261 inline unsigned short atomic_ushort::exchange
3262 ( unsigned short __m__, memory_order __x__ ) volatile
3263 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3265 inline bool atomic_ushort::compare_exchange_weak
3266 ( unsigned short& __e__, unsigned short __m__,
3267 memory_order __x__, memory_order __y__ ) volatile
3268 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3270 inline bool atomic_ushort::compare_exchange_strong
3271 ( unsigned short& __e__, unsigned short __m__,
3272 memory_order __x__, memory_order __y__ ) volatile
3273 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3275 inline bool atomic_ushort::compare_exchange_weak
3276 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3277 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3278 __x__ == memory_order_acq_rel ? memory_order_acquire :
3279 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3281 inline bool atomic_ushort::compare_exchange_strong
3282 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3283 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3284 __x__ == memory_order_acq_rel ? memory_order_acquire :
3285 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3287 inline void atomic_ushort::fence
3288 ( memory_order __x__ ) const volatile
3289 { return atomic_fence( this, __x__ ); }
3292 inline bool atomic_int::is_lock_free() const volatile
3295 inline void atomic_int::store
3296 ( int __m__, memory_order __x__ ) volatile
3297 { atomic_store_explicit( this, __m__, __x__ ); }
3299 inline int atomic_int::load
3300 ( memory_order __x__ ) volatile
3301 { return atomic_load_explicit( this, __x__ ); }
3303 inline int atomic_int::exchange
3304 ( int __m__, memory_order __x__ ) volatile
3305 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3307 inline bool atomic_int::compare_exchange_weak
3308 ( int& __e__, int __m__,
3309 memory_order __x__, memory_order __y__ ) volatile
3310 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3312 inline bool atomic_int::compare_exchange_strong
3313 ( int& __e__, int __m__,
3314 memory_order __x__, memory_order __y__ ) volatile
3315 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3317 inline bool atomic_int::compare_exchange_weak
3318 ( int& __e__, int __m__, memory_order __x__ ) volatile
3319 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3320 __x__ == memory_order_acq_rel ? memory_order_acquire :
3321 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3323 inline bool atomic_int::compare_exchange_strong
3324 ( int& __e__, int __m__, memory_order __x__ ) volatile
3325 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3326 __x__ == memory_order_acq_rel ? memory_order_acquire :
3327 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3329 inline void atomic_int::fence
3330 ( memory_order __x__ ) const volatile
3331 { return atomic_fence( this, __x__ ); }
3334 inline bool atomic_uint::is_lock_free() const volatile
3337 inline void atomic_uint::store
3338 ( unsigned int __m__, memory_order __x__ ) volatile
3339 { atomic_store_explicit( this, __m__, __x__ ); }
3341 inline unsigned int atomic_uint::load
3342 ( memory_order __x__ ) volatile
3343 { return atomic_load_explicit( this, __x__ ); }
3345 inline unsigned int atomic_uint::exchange
3346 ( unsigned int __m__, memory_order __x__ ) volatile
3347 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3349 inline bool atomic_uint::compare_exchange_weak
3350 ( unsigned int& __e__, unsigned int __m__,
3351 memory_order __x__, memory_order __y__ ) volatile
3352 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3354 inline bool atomic_uint::compare_exchange_strong
3355 ( unsigned int& __e__, unsigned int __m__,
3356 memory_order __x__, memory_order __y__ ) volatile
3357 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3359 inline bool atomic_uint::compare_exchange_weak
3360 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3361 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3362 __x__ == memory_order_acq_rel ? memory_order_acquire :
3363 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3365 inline bool atomic_uint::compare_exchange_strong
3366 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3367 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3368 __x__ == memory_order_acq_rel ? memory_order_acquire :
3369 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3371 inline void atomic_uint::fence
3372 ( memory_order __x__ ) const volatile
3373 { return atomic_fence( this, __x__ ); }
3376 inline bool atomic_long::is_lock_free() const volatile
3379 inline void atomic_long::store
3380 ( long __m__, memory_order __x__ ) volatile
3381 { atomic_store_explicit( this, __m__, __x__ ); }
3383 inline long atomic_long::load
3384 ( memory_order __x__ ) volatile
3385 { return atomic_load_explicit( this, __x__ ); }
3387 inline long atomic_long::exchange
3388 ( long __m__, memory_order __x__ ) volatile
3389 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3391 inline bool atomic_long::compare_exchange_weak
3392 ( long& __e__, long __m__,
3393 memory_order __x__, memory_order __y__ ) volatile
3394 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3396 inline bool atomic_long::compare_exchange_strong
3397 ( long& __e__, long __m__,
3398 memory_order __x__, memory_order __y__ ) volatile
3399 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3401 inline bool atomic_long::compare_exchange_weak
3402 ( long& __e__, long __m__, memory_order __x__ ) volatile
3403 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3404 __x__ == memory_order_acq_rel ? memory_order_acquire :
3405 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3407 inline bool atomic_long::compare_exchange_strong
3408 ( long& __e__, long __m__, memory_order __x__ ) volatile
3409 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3410 __x__ == memory_order_acq_rel ? memory_order_acquire :
3411 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3413 inline void atomic_long::fence
3414 ( memory_order __x__ ) const volatile
3415 { return atomic_fence( this, __x__ ); }
3418 inline bool atomic_ulong::is_lock_free() const volatile
3421 inline void atomic_ulong::store
3422 ( unsigned long __m__, memory_order __x__ ) volatile
3423 { atomic_store_explicit( this, __m__, __x__ ); }
3425 inline unsigned long atomic_ulong::load
3426 ( memory_order __x__ ) volatile
3427 { return atomic_load_explicit( this, __x__ ); }
3429 inline unsigned long atomic_ulong::exchange
3430 ( unsigned long __m__, memory_order __x__ ) volatile
3431 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3433 inline bool atomic_ulong::compare_exchange_weak
3434 ( unsigned long& __e__, unsigned long __m__,
3435 memory_order __x__, memory_order __y__ ) volatile
3436 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3438 inline bool atomic_ulong::compare_exchange_strong
3439 ( unsigned long& __e__, unsigned long __m__,
3440 memory_order __x__, memory_order __y__ ) volatile
3441 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3443 inline bool atomic_ulong::compare_exchange_weak
3444 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3445 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3446 __x__ == memory_order_acq_rel ? memory_order_acquire :
3447 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3449 inline bool atomic_ulong::compare_exchange_strong
3450 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3451 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3452 __x__ == memory_order_acq_rel ? memory_order_acquire :
3453 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3455 inline void atomic_ulong::fence
3456 ( memory_order __x__ ) const volatile
3457 { return atomic_fence( this, __x__ ); }
3460 inline bool atomic_llong::is_lock_free() const volatile
3463 inline void atomic_llong::store
3464 ( long long __m__, memory_order __x__ ) volatile
3465 { atomic_store_explicit( this, __m__, __x__ ); }
3467 inline long long atomic_llong::load
3468 ( memory_order __x__ ) volatile
3469 { return atomic_load_explicit( this, __x__ ); }
3471 inline long long atomic_llong::exchange
3472 ( long long __m__, memory_order __x__ ) volatile
3473 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3475 inline bool atomic_llong::compare_exchange_weak
3476 ( long long& __e__, long long __m__,
3477 memory_order __x__, memory_order __y__ ) volatile
3478 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3480 inline bool atomic_llong::compare_exchange_strong
3481 ( long long& __e__, long long __m__,
3482 memory_order __x__, memory_order __y__ ) volatile
3483 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3485 inline bool atomic_llong::compare_exchange_weak
3486 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3487 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3488 __x__ == memory_order_acq_rel ? memory_order_acquire :
3489 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3491 inline bool atomic_llong::compare_exchange_strong
3492 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3493 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3494 __x__ == memory_order_acq_rel ? memory_order_acquire :
3495 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3497 inline void atomic_llong::fence
3498 ( memory_order __x__ ) const volatile
3499 { return atomic_fence( this, __x__ ); }
3502 inline bool atomic_ullong::is_lock_free() const volatile
3505 inline void atomic_ullong::store
3506 ( unsigned long long __m__, memory_order __x__ ) volatile
3507 { atomic_store_explicit( this, __m__, __x__ ); }
3509 inline unsigned long long atomic_ullong::load
3510 ( memory_order __x__ ) volatile
3511 { return atomic_load_explicit( this, __x__ ); }
3513 inline unsigned long long atomic_ullong::exchange
3514 ( unsigned long long __m__, memory_order __x__ ) volatile
3515 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3517 inline bool atomic_ullong::compare_exchange_weak
3518 ( unsigned long long& __e__, unsigned long long __m__,
3519 memory_order __x__, memory_order __y__ ) volatile
3520 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3522 inline bool atomic_ullong::compare_exchange_strong
3523 ( unsigned long long& __e__, unsigned long long __m__,
3524 memory_order __x__, memory_order __y__ ) volatile
3525 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3527 inline bool atomic_ullong::compare_exchange_weak
3528 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3529 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3530 __x__ == memory_order_acq_rel ? memory_order_acquire :
3531 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3533 inline bool atomic_ullong::compare_exchange_strong
3534 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3535 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3536 __x__ == memory_order_acq_rel ? memory_order_acquire :
3537 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3539 inline void atomic_ullong::fence
3540 ( memory_order __x__ ) const volatile
3541 { return atomic_fence( this, __x__ ); }
3544 inline bool atomic_wchar_t::is_lock_free() const volatile
3547 inline void atomic_wchar_t::store
3548 ( wchar_t __m__, memory_order __x__ ) volatile
3549 { atomic_store_explicit( this, __m__, __x__ ); }
3551 inline wchar_t atomic_wchar_t::load
3552 ( memory_order __x__ ) volatile
3553 { return atomic_load_explicit( this, __x__ ); }
3555 inline wchar_t atomic_wchar_t::exchange
3556 ( wchar_t __m__, memory_order __x__ ) volatile
3557 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3559 inline bool atomic_wchar_t::compare_exchange_weak
3560 ( wchar_t& __e__, wchar_t __m__,
3561 memory_order __x__, memory_order __y__ ) volatile
3562 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3564 inline bool atomic_wchar_t::compare_exchange_strong
3565 ( wchar_t& __e__, wchar_t __m__,
3566 memory_order __x__, memory_order __y__ ) volatile
3567 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3569 inline bool atomic_wchar_t::compare_exchange_weak
3570 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3571 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3572 __x__ == memory_order_acq_rel ? memory_order_acquire :
3573 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3575 inline bool atomic_wchar_t::compare_exchange_strong
3576 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3577 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3578 __x__ == memory_order_acq_rel ? memory_order_acquire :
3579 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3581 inline void atomic_wchar_t::fence
3582 ( memory_order __x__ ) const volatile
3583 { return atomic_fence( this, __x__ ); }
3586 template< typename T >
3587 inline bool atomic<T>::is_lock_free() const volatile
3590 template< typename T >
3591 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3592 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3594 template< typename T >
3595 inline T atomic<T>::load( memory_order __x__ ) volatile
3596 { return _ATOMIC_LOAD_( this, __x__ ); }
3598 template< typename T >
3599 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3600 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3602 template< typename T >
3603 inline bool atomic<T>::compare_exchange_weak
3604 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3605 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3607 template< typename T >
3608 inline bool atomic<T>::compare_exchange_strong
3609 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3610 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3612 template< typename T >
3613 inline bool atomic<T>::compare_exchange_weak
3614 ( T& __r__, T __v__, memory_order __x__ ) volatile
3615 { return compare_exchange_weak( __r__, __v__, __x__,
3616 __x__ == memory_order_acq_rel ? memory_order_acquire :
3617 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3619 template< typename T >
3620 inline bool atomic<T>::compare_exchange_strong
3621 ( T& __r__, T __v__, memory_order __x__ ) volatile
3622 { return compare_exchange_strong( __r__, __v__, __x__,
3623 __x__ == memory_order_acq_rel ? memory_order_acquire :
3624 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3627 inline void* atomic_address::fetch_add
3628 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3629 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3631 inline void* atomic_address::fetch_sub
3632 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3633 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3636 inline char atomic_char::fetch_add
3637 ( char __m__, memory_order __x__ ) volatile
3638 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3641 inline char atomic_char::fetch_sub
3642 ( char __m__, memory_order __x__ ) volatile
3643 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3646 inline char atomic_char::fetch_and
3647 ( char __m__, memory_order __x__ ) volatile
3648 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3651 inline char atomic_char::fetch_or
3652 ( char __m__, memory_order __x__ ) volatile
3653 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3656 inline char atomic_char::fetch_xor
3657 ( char __m__, memory_order __x__ ) volatile
3658 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3661 inline signed char atomic_schar::fetch_add
3662 ( signed char __m__, memory_order __x__ ) volatile
3663 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3666 inline signed char atomic_schar::fetch_sub
3667 ( signed char __m__, memory_order __x__ ) volatile
3668 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3671 inline signed char atomic_schar::fetch_and
3672 ( signed char __m__, memory_order __x__ ) volatile
3673 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3676 inline signed char atomic_schar::fetch_or
3677 ( signed char __m__, memory_order __x__ ) volatile
3678 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3681 inline signed char atomic_schar::fetch_xor
3682 ( signed char __m__, memory_order __x__ ) volatile
3683 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3686 inline unsigned char atomic_uchar::fetch_add
3687 ( unsigned char __m__, memory_order __x__ ) volatile
3688 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3691 inline unsigned char atomic_uchar::fetch_sub
3692 ( unsigned char __m__, memory_order __x__ ) volatile
3693 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3696 inline unsigned char atomic_uchar::fetch_and
3697 ( unsigned char __m__, memory_order __x__ ) volatile
3698 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3701 inline unsigned char atomic_uchar::fetch_or
3702 ( unsigned char __m__, memory_order __x__ ) volatile
3703 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3706 inline unsigned char atomic_uchar::fetch_xor
3707 ( unsigned char __m__, memory_order __x__ ) volatile
3708 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3711 inline short atomic_short::fetch_add
3712 ( short __m__, memory_order __x__ ) volatile
3713 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3716 inline short atomic_short::fetch_sub
3717 ( short __m__, memory_order __x__ ) volatile
3718 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3721 inline short atomic_short::fetch_and
3722 ( short __m__, memory_order __x__ ) volatile
3723 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3726 inline short atomic_short::fetch_or
3727 ( short __m__, memory_order __x__ ) volatile
3728 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3731 inline short atomic_short::fetch_xor
3732 ( short __m__, memory_order __x__ ) volatile
3733 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3736 inline unsigned short atomic_ushort::fetch_add
3737 ( unsigned short __m__, memory_order __x__ ) volatile
3738 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3741 inline unsigned short atomic_ushort::fetch_sub
3742 ( unsigned short __m__, memory_order __x__ ) volatile
3743 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3746 inline unsigned short atomic_ushort::fetch_and
3747 ( unsigned short __m__, memory_order __x__ ) volatile
3748 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3751 inline unsigned short atomic_ushort::fetch_or
3752 ( unsigned short __m__, memory_order __x__ ) volatile
3753 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3756 inline unsigned short atomic_ushort::fetch_xor
3757 ( unsigned short __m__, memory_order __x__ ) volatile
3758 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3761 inline int atomic_int::fetch_add
3762 ( int __m__, memory_order __x__ ) volatile
3763 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3766 inline int atomic_int::fetch_sub
3767 ( int __m__, memory_order __x__ ) volatile
3768 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3771 inline int atomic_int::fetch_and
3772 ( int __m__, memory_order __x__ ) volatile
3773 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3776 inline int atomic_int::fetch_or
3777 ( int __m__, memory_order __x__ ) volatile
3778 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3781 inline int atomic_int::fetch_xor
3782 ( int __m__, memory_order __x__ ) volatile
3783 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3786 inline unsigned int atomic_uint::fetch_add
3787 ( unsigned int __m__, memory_order __x__ ) volatile
3788 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3791 inline unsigned int atomic_uint::fetch_sub
3792 ( unsigned int __m__, memory_order __x__ ) volatile
3793 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3796 inline unsigned int atomic_uint::fetch_and
3797 ( unsigned int __m__, memory_order __x__ ) volatile
3798 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3801 inline unsigned int atomic_uint::fetch_or
3802 ( unsigned int __m__, memory_order __x__ ) volatile
3803 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3806 inline unsigned int atomic_uint::fetch_xor
3807 ( unsigned int __m__, memory_order __x__ ) volatile
3808 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3811 inline long atomic_long::fetch_add
3812 ( long __m__, memory_order __x__ ) volatile
3813 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3816 inline long atomic_long::fetch_sub
3817 ( long __m__, memory_order __x__ ) volatile
3818 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3821 inline long atomic_long::fetch_and
3822 ( long __m__, memory_order __x__ ) volatile
3823 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3826 inline long atomic_long::fetch_or
3827 ( long __m__, memory_order __x__ ) volatile
3828 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3831 inline long atomic_long::fetch_xor
3832 ( long __m__, memory_order __x__ ) volatile
3833 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3836 inline unsigned long atomic_ulong::fetch_add
3837 ( unsigned long __m__, memory_order __x__ ) volatile
3838 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3841 inline unsigned long atomic_ulong::fetch_sub
3842 ( unsigned long __m__, memory_order __x__ ) volatile
3843 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3846 inline unsigned long atomic_ulong::fetch_and
3847 ( unsigned long __m__, memory_order __x__ ) volatile
3848 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3851 inline unsigned long atomic_ulong::fetch_or
3852 ( unsigned long __m__, memory_order __x__ ) volatile
3853 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3856 inline unsigned long atomic_ulong::fetch_xor
3857 ( unsigned long __m__, memory_order __x__ ) volatile
3858 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3861 inline long long atomic_llong::fetch_add
3862 ( long long __m__, memory_order __x__ ) volatile
3863 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3866 inline long long atomic_llong::fetch_sub
3867 ( long long __m__, memory_order __x__ ) volatile
3868 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3871 inline long long atomic_llong::fetch_and
3872 ( long long __m__, memory_order __x__ ) volatile
3873 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3876 inline long long atomic_llong::fetch_or
3877 ( long long __m__, memory_order __x__ ) volatile
3878 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3881 inline long long atomic_llong::fetch_xor
3882 ( long long __m__, memory_order __x__ ) volatile
3883 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3886 inline unsigned long long atomic_ullong::fetch_add
3887 ( unsigned long long __m__, memory_order __x__ ) volatile
3888 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3891 inline unsigned long long atomic_ullong::fetch_sub
3892 ( unsigned long long __m__, memory_order __x__ ) volatile
3893 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3896 inline unsigned long long atomic_ullong::fetch_and
3897 ( unsigned long long __m__, memory_order __x__ ) volatile
3898 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3901 inline unsigned long long atomic_ullong::fetch_or
3902 ( unsigned long long __m__, memory_order __x__ ) volatile
3903 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3906 inline unsigned long long atomic_ullong::fetch_xor
3907 ( unsigned long long __m__, memory_order __x__ ) volatile
3908 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3911 inline wchar_t atomic_wchar_t::fetch_add
3912 ( wchar_t __m__, memory_order __x__ ) volatile
3913 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3916 inline wchar_t atomic_wchar_t::fetch_sub
3917 ( wchar_t __m__, memory_order __x__ ) volatile
3918 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3921 inline wchar_t atomic_wchar_t::fetch_and
3922 ( wchar_t __m__, memory_order __x__ ) volatile
3923 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3926 inline wchar_t atomic_wchar_t::fetch_or
3927 ( wchar_t __m__, memory_order __x__ ) volatile
3928 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3931 inline wchar_t atomic_wchar_t::fetch_xor
3932 ( wchar_t __m__, memory_order __x__ ) volatile
3933 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3936 template< typename T >
3937 T* atomic<T*>::load( memory_order __x__ ) volatile
3938 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3940 template< typename T >
3941 T* atomic<T*>::exchange( T* __v__, memory_order __x__ ) volatile
3942 { return static_cast<T*>( atomic_address::exchange( __v__, __x__ ) ); }
3944 template< typename T >
3945 bool atomic<T*>::compare_exchange_weak
3946 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3947 { return atomic_address::compare_exchange_weak( *reinterpret_cast<void**>( &__r__ ),
3948 static_cast<void*>( __v__ ), __x__, __y__ ); }
3949 //{ return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3951 template< typename T >
3952 bool atomic<T*>::compare_exchange_strong
3953 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3954 { return atomic_address::compare_exchange_strong( *reinterpret_cast<void**>( &__r__ ),
3955 static_cast<void*>( __v__ ), __x__, __y__ ); }
3956 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3958 template< typename T >
3959 bool atomic<T*>::compare_exchange_weak
3960 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3961 { return compare_exchange_weak( __r__, __v__, __x__,
3962 __x__ == memory_order_acq_rel ? memory_order_acquire :
3963 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3965 template< typename T >
3966 bool atomic<T*>::compare_exchange_strong
3967 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3968 { return compare_exchange_strong( __r__, __v__, __x__,
3969 __x__ == memory_order_acq_rel ? memory_order_acquire :
3970 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3972 template< typename T >
3973 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3974 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3976 template< typename T >
3977 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3978 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }