1 #include "memoryorder.h"
8 #define CPP0X( feature )
10 typedef struct atomic_flag
13 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
14 void clear( memory_order = memory_order_seq_cst ) volatile;
15 void fence( memory_order ) const volatile;
17 CPP0X( atomic_flag() = default; )
18 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
19 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
26 #define ATOMIC_FLAG_INIT { false }
32 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
33 extern bool atomic_flag_test_and_set_explicit
34 ( volatile atomic_flag*, memory_order );
35 extern void atomic_flag_clear( volatile atomic_flag* );
36 extern void atomic_flag_clear_explicit
37 ( volatile atomic_flag*, memory_order );
38 extern void atomic_flag_fence
39 ( const volatile atomic_flag*, memory_order );
40 extern void __atomic_flag_wait__
41 ( volatile atomic_flag* );
42 extern void __atomic_flag_wait_explicit__
43 ( volatile atomic_flag*, memory_order );
51 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
52 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
54 inline void atomic_flag::clear( memory_order __x__ ) volatile
55 { atomic_flag_clear_explicit( this, __x__ ); }
57 inline void atomic_flag::fence( memory_order __x__ ) const volatile
58 { atomic_flag_fence( this, __x__ ); }
64 The remainder of the example implementation uses the following
65 macros. These macros exploit GNU extensions for value-returning
66 blocks (AKA statement expressions) and __typeof__.
68 The macros rely on data fields of atomic structs being named __f__.
69 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
70 __g__=flag, __m__=modified, __o__=operation, __r__=result,
71 __p__=pointer to field, __v__=value (for single evaluation),
72 __x__=memory-ordering, and __y__=memory-ordering.
75 #define _ATOMIC_LOAD_( __a__, __x__ ) \
76 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
77 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
80 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
81 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
82 __typeof__(__m__) __v__ = (__m__); \
83 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
87 #define _ATOMIC_INIT_( __a__, __m__ ) \
88 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
89 __typeof__(__m__) __v__ = (__m__); \
90 model_init_action((void *) __p__, (uint64_t) __v__); \
93 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
94 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
95 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
96 __typeof__(__m__) __v__ = (__m__); \
97 __typeof__((__a__)->__f__) __copy__= __old__; \
98 __copy__ __o__ __v__; \
99 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
102 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
103 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
104 __typeof__(__e__) __q__ = (__e__); \
105 __typeof__(__m__) __v__ = (__m__); \
107 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
108 if (__t__ == * __q__ ) { \
109 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
110 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
113 #define _ATOMIC_FENCE_( __a__, __x__ ) \
114 ({ model_fence_action(__x__);})
117 #define ATOMIC_CHAR_LOCK_FREE 1
118 #define ATOMIC_CHAR16_T_LOCK_FREE 1
119 #define ATOMIC_CHAR32_T_LOCK_FREE 1
120 #define ATOMIC_WCHAR_T_LOCK_FREE 1
121 #define ATOMIC_SHORT_LOCK_FREE 1
122 #define ATOMIC_INT_LOCK_FREE 1
123 #define ATOMIC_LONG_LOCK_FREE 1
124 #define ATOMIC_LLONG_LOCK_FREE 1
125 #define ATOMIC_ADDRESS_LOCK_FREE 1
127 typedef struct atomic_bool
130 bool is_lock_free() const volatile;
131 void store( bool, memory_order = memory_order_seq_cst ) volatile;
132 bool load( memory_order = memory_order_seq_cst ) volatile;
133 bool swap( bool, memory_order = memory_order_seq_cst ) volatile;
134 bool compare_swap ( bool&, bool, memory_order, memory_order ) volatile;
135 bool compare_swap ( bool&, bool,
136 memory_order = memory_order_seq_cst) volatile;
137 void fence( memory_order ) const volatile;
139 CPP0X( atomic_bool() = delete; )
140 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
141 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
142 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
144 bool operator =( bool __v__ ) volatile
145 { store( __v__ ); return __v__; }
147 friend void atomic_store_explicit( volatile atomic_bool*, bool,
149 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
150 friend bool atomic_swap_explicit( volatile atomic_bool*, bool,
152 friend bool atomic_compare_swap_explicit( volatile atomic_bool*, bool*, bool,
153 memory_order, memory_order );
154 friend void atomic_fence( const volatile atomic_bool*, memory_order );
162 typedef struct atomic_address
165 bool is_lock_free() const volatile;
166 void store( void*, memory_order = memory_order_seq_cst ) volatile;
167 void* load( memory_order = memory_order_seq_cst ) volatile;
168 void* swap( void*, memory_order = memory_order_seq_cst ) volatile;
169 bool compare_swap( void*&, void*, memory_order, memory_order ) volatile;
170 bool compare_swap( void*&, void*,
171 memory_order = memory_order_seq_cst ) volatile;
172 void fence( memory_order ) const volatile;
173 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
174 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
176 CPP0X( atomic_address() = default; )
177 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
178 CPP0X( atomic_address( const atomic_address& ) = delete; )
179 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
181 void* operator =( void* __v__ ) volatile
182 { store( __v__ ); return __v__; }
184 void* operator +=( ptrdiff_t __v__ ) volatile
185 { return fetch_add( __v__ ); }
187 void* operator -=( ptrdiff_t __v__ ) volatile
188 { return fetch_sub( __v__ ); }
190 friend void atomic_store_explicit( volatile atomic_address*, void*,
192 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
193 friend void* atomic_swap_explicit( volatile atomic_address*, void*,
195 friend bool atomic_compare_swap_explicit( volatile atomic_address*,
196 void**, void*, memory_order, memory_order );
197 friend void atomic_fence( const volatile atomic_address*, memory_order );
198 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
200 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
209 typedef struct atomic_char
212 bool is_lock_free() const volatile;
214 memory_order = memory_order_seq_cst ) volatile;
215 char load( memory_order = memory_order_seq_cst ) volatile;
217 memory_order = memory_order_seq_cst ) volatile;
218 bool compare_swap( char&, char,
219 memory_order, memory_order ) volatile;
220 bool compare_swap( char&, char,
221 memory_order = memory_order_seq_cst ) volatile;
222 void fence( memory_order ) const volatile;
223 char fetch_add( char,
224 memory_order = memory_order_seq_cst ) volatile;
225 char fetch_sub( char,
226 memory_order = memory_order_seq_cst ) volatile;
227 char fetch_and( char,
228 memory_order = memory_order_seq_cst ) volatile;
230 memory_order = memory_order_seq_cst ) volatile;
231 char fetch_xor( char,
232 memory_order = memory_order_seq_cst ) volatile;
234 CPP0X( atomic_char() = default; )
235 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
236 CPP0X( atomic_char( const atomic_char& ) = delete; )
237 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
239 char operator =( char __v__ ) volatile
240 { store( __v__ ); return __v__; }
242 char operator ++( int ) volatile
243 { return fetch_add( 1 ); }
245 char operator --( int ) volatile
246 { return fetch_sub( 1 ); }
248 char operator ++() volatile
249 { return fetch_add( 1 ) + 1; }
251 char operator --() volatile
252 { return fetch_sub( 1 ) - 1; }
254 char operator +=( char __v__ ) volatile
255 { return fetch_add( __v__ ) + __v__; }
257 char operator -=( char __v__ ) volatile
258 { return fetch_sub( __v__ ) - __v__; }
260 char operator &=( char __v__ ) volatile
261 { return fetch_and( __v__ ) & __v__; }
263 char operator |=( char __v__ ) volatile
264 { return fetch_or( __v__ ) | __v__; }
266 char operator ^=( char __v__ ) volatile
267 { return fetch_xor( __v__ ) ^ __v__; }
269 friend void atomic_store_explicit( volatile atomic_char*, char,
271 friend char atomic_load_explicit( volatile atomic_char*,
273 friend char atomic_swap_explicit( volatile atomic_char*,
274 char, memory_order );
275 friend bool atomic_compare_swap_explicit( volatile atomic_char*,
276 char*, char, memory_order, memory_order );
277 friend void atomic_fence( const volatile atomic_char*, memory_order );
278 friend char atomic_fetch_add_explicit( volatile atomic_char*,
279 char, memory_order );
280 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
281 char, memory_order );
282 friend char atomic_fetch_and_explicit( volatile atomic_char*,
283 char, memory_order );
284 friend char atomic_fetch_or_explicit( volatile atomic_char*,
285 char, memory_order );
286 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
287 char, memory_order );
295 typedef struct atomic_schar
298 bool is_lock_free() const volatile;
299 void store( signed char,
300 memory_order = memory_order_seq_cst ) volatile;
301 signed char load( memory_order = memory_order_seq_cst ) volatile;
302 signed char swap( signed char,
303 memory_order = memory_order_seq_cst ) volatile;
304 bool compare_swap( signed char&, signed char,
305 memory_order, memory_order ) volatile;
306 bool compare_swap( signed char&, signed char,
307 memory_order = memory_order_seq_cst ) volatile;
308 void fence( memory_order ) const volatile;
309 signed char fetch_add( signed char,
310 memory_order = memory_order_seq_cst ) volatile;
311 signed char fetch_sub( signed char,
312 memory_order = memory_order_seq_cst ) volatile;
313 signed char fetch_and( signed char,
314 memory_order = memory_order_seq_cst ) volatile;
315 signed char fetch_or( signed char,
316 memory_order = memory_order_seq_cst ) volatile;
317 signed char fetch_xor( signed char,
318 memory_order = memory_order_seq_cst ) volatile;
320 CPP0X( atomic_schar() = default; )
321 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
322 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
323 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
325 signed char operator =( signed char __v__ ) volatile
326 { store( __v__ ); return __v__; }
328 signed char operator ++( int ) volatile
329 { return fetch_add( 1 ); }
331 signed char operator --( int ) volatile
332 { return fetch_sub( 1 ); }
334 signed char operator ++() volatile
335 { return fetch_add( 1 ) + 1; }
337 signed char operator --() volatile
338 { return fetch_sub( 1 ) - 1; }
340 signed char operator +=( signed char __v__ ) volatile
341 { return fetch_add( __v__ ) + __v__; }
343 signed char operator -=( signed char __v__ ) volatile
344 { return fetch_sub( __v__ ) - __v__; }
346 signed char operator &=( signed char __v__ ) volatile
347 { return fetch_and( __v__ ) & __v__; }
349 signed char operator |=( signed char __v__ ) volatile
350 { return fetch_or( __v__ ) | __v__; }
352 signed char operator ^=( signed char __v__ ) volatile
353 { return fetch_xor( __v__ ) ^ __v__; }
355 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
357 friend signed char atomic_load_explicit( volatile atomic_schar*,
359 friend signed char atomic_swap_explicit( volatile atomic_schar*,
360 signed char, memory_order );
361 friend bool atomic_compare_swap_explicit( volatile atomic_schar*,
362 signed char*, signed char, memory_order, memory_order );
363 friend void atomic_fence( const volatile atomic_schar*, memory_order );
364 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
365 signed char, memory_order );
366 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
367 signed char, memory_order );
368 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
369 signed char, memory_order );
370 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
371 signed char, memory_order );
372 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
373 signed char, memory_order );
381 typedef struct atomic_uchar
384 bool is_lock_free() const volatile;
385 void store( unsigned char,
386 memory_order = memory_order_seq_cst ) volatile;
387 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
388 unsigned char swap( unsigned char,
389 memory_order = memory_order_seq_cst ) volatile;
390 bool compare_swap( unsigned char&, unsigned char,
391 memory_order, memory_order ) volatile;
392 bool compare_swap( unsigned char&, unsigned char,
393 memory_order = memory_order_seq_cst ) volatile;
394 void fence( memory_order ) const volatile;
395 unsigned char fetch_add( unsigned char,
396 memory_order = memory_order_seq_cst ) volatile;
397 unsigned char fetch_sub( unsigned char,
398 memory_order = memory_order_seq_cst ) volatile;
399 unsigned char fetch_and( unsigned char,
400 memory_order = memory_order_seq_cst ) volatile;
401 unsigned char fetch_or( unsigned char,
402 memory_order = memory_order_seq_cst ) volatile;
403 unsigned char fetch_xor( unsigned char,
404 memory_order = memory_order_seq_cst ) volatile;
406 CPP0X( atomic_uchar() = default; )
407 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
408 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
409 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
411 unsigned char operator =( unsigned char __v__ ) volatile
412 { store( __v__ ); return __v__; }
414 unsigned char operator ++( int ) volatile
415 { return fetch_add( 1 ); }
417 unsigned char operator --( int ) volatile
418 { return fetch_sub( 1 ); }
420 unsigned char operator ++() volatile
421 { return fetch_add( 1 ) + 1; }
423 unsigned char operator --() volatile
424 { return fetch_sub( 1 ) - 1; }
426 unsigned char operator +=( unsigned char __v__ ) volatile
427 { return fetch_add( __v__ ) + __v__; }
429 unsigned char operator -=( unsigned char __v__ ) volatile
430 { return fetch_sub( __v__ ) - __v__; }
432 unsigned char operator &=( unsigned char __v__ ) volatile
433 { return fetch_and( __v__ ) & __v__; }
435 unsigned char operator |=( unsigned char __v__ ) volatile
436 { return fetch_or( __v__ ) | __v__; }
438 unsigned char operator ^=( unsigned char __v__ ) volatile
439 { return fetch_xor( __v__ ) ^ __v__; }
441 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
443 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
445 friend unsigned char atomic_swap_explicit( volatile atomic_uchar*,
446 unsigned char, memory_order );
447 friend bool atomic_compare_swap_explicit( volatile atomic_uchar*,
448 unsigned char*, unsigned char, memory_order, memory_order );
449 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
450 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
451 unsigned char, memory_order );
452 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
453 unsigned char, memory_order );
454 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
455 unsigned char, memory_order );
456 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
457 unsigned char, memory_order );
458 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
459 unsigned char, memory_order );
467 typedef struct atomic_short
470 bool is_lock_free() const volatile;
472 memory_order = memory_order_seq_cst ) volatile;
473 short load( memory_order = memory_order_seq_cst ) volatile;
475 memory_order = memory_order_seq_cst ) volatile;
476 bool compare_swap( short&, short,
477 memory_order, memory_order ) volatile;
478 bool compare_swap( short&, short,
479 memory_order = memory_order_seq_cst ) volatile;
480 void fence( memory_order ) const volatile;
481 short fetch_add( short,
482 memory_order = memory_order_seq_cst ) volatile;
483 short fetch_sub( short,
484 memory_order = memory_order_seq_cst ) volatile;
485 short fetch_and( short,
486 memory_order = memory_order_seq_cst ) volatile;
487 short fetch_or( short,
488 memory_order = memory_order_seq_cst ) volatile;
489 short fetch_xor( short,
490 memory_order = memory_order_seq_cst ) volatile;
492 CPP0X( atomic_short() = default; )
493 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
494 CPP0X( atomic_short( const atomic_short& ) = delete; )
495 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
497 short operator =( short __v__ ) volatile
498 { store( __v__ ); return __v__; }
500 short operator ++( int ) volatile
501 { return fetch_add( 1 ); }
503 short operator --( int ) volatile
504 { return fetch_sub( 1 ); }
506 short operator ++() volatile
507 { return fetch_add( 1 ) + 1; }
509 short operator --() volatile
510 { return fetch_sub( 1 ) - 1; }
512 short operator +=( short __v__ ) volatile
513 { return fetch_add( __v__ ) + __v__; }
515 short operator -=( short __v__ ) volatile
516 { return fetch_sub( __v__ ) - __v__; }
518 short operator &=( short __v__ ) volatile
519 { return fetch_and( __v__ ) & __v__; }
521 short operator |=( short __v__ ) volatile
522 { return fetch_or( __v__ ) | __v__; }
524 short operator ^=( short __v__ ) volatile
525 { return fetch_xor( __v__ ) ^ __v__; }
527 friend void atomic_store_explicit( volatile atomic_short*, short,
529 friend short atomic_load_explicit( volatile atomic_short*,
531 friend short atomic_swap_explicit( volatile atomic_short*,
532 short, memory_order );
533 friend bool atomic_compare_swap_explicit( volatile atomic_short*,
534 short*, short, memory_order, memory_order );
535 friend void atomic_fence( const volatile atomic_short*, memory_order );
536 friend short atomic_fetch_add_explicit( volatile atomic_short*,
537 short, memory_order );
538 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
539 short, memory_order );
540 friend short atomic_fetch_and_explicit( volatile atomic_short*,
541 short, memory_order );
542 friend short atomic_fetch_or_explicit( volatile atomic_short*,
543 short, memory_order );
544 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
545 short, memory_order );
553 typedef struct atomic_ushort
556 bool is_lock_free() const volatile;
557 void store( unsigned short,
558 memory_order = memory_order_seq_cst ) volatile;
559 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
560 unsigned short swap( unsigned short,
561 memory_order = memory_order_seq_cst ) volatile;
562 bool compare_swap( unsigned short&, unsigned short,
563 memory_order, memory_order ) volatile;
564 bool compare_swap( unsigned short&, unsigned short,
565 memory_order = memory_order_seq_cst ) volatile;
566 void fence( memory_order ) const volatile;
567 unsigned short fetch_add( unsigned short,
568 memory_order = memory_order_seq_cst ) volatile;
569 unsigned short fetch_sub( unsigned short,
570 memory_order = memory_order_seq_cst ) volatile;
571 unsigned short fetch_and( unsigned short,
572 memory_order = memory_order_seq_cst ) volatile;
573 unsigned short fetch_or( unsigned short,
574 memory_order = memory_order_seq_cst ) volatile;
575 unsigned short fetch_xor( unsigned short,
576 memory_order = memory_order_seq_cst ) volatile;
578 CPP0X( atomic_ushort() = default; )
579 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
580 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
581 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
583 unsigned short operator =( unsigned short __v__ ) volatile
584 { store( __v__ ); return __v__; }
586 unsigned short operator ++( int ) volatile
587 { return fetch_add( 1 ); }
589 unsigned short operator --( int ) volatile
590 { return fetch_sub( 1 ); }
592 unsigned short operator ++() volatile
593 { return fetch_add( 1 ) + 1; }
595 unsigned short operator --() volatile
596 { return fetch_sub( 1 ) - 1; }
598 unsigned short operator +=( unsigned short __v__ ) volatile
599 { return fetch_add( __v__ ) + __v__; }
601 unsigned short operator -=( unsigned short __v__ ) volatile
602 { return fetch_sub( __v__ ) - __v__; }
604 unsigned short operator &=( unsigned short __v__ ) volatile
605 { return fetch_and( __v__ ) & __v__; }
607 unsigned short operator |=( unsigned short __v__ ) volatile
608 { return fetch_or( __v__ ) | __v__; }
610 unsigned short operator ^=( unsigned short __v__ ) volatile
611 { return fetch_xor( __v__ ) ^ __v__; }
613 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
615 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
617 friend unsigned short atomic_swap_explicit( volatile atomic_ushort*,
618 unsigned short, memory_order );
619 friend bool atomic_compare_swap_explicit( volatile atomic_ushort*,
620 unsigned short*, unsigned short, memory_order, memory_order );
621 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
622 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
623 unsigned short, memory_order );
624 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
625 unsigned short, memory_order );
626 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
627 unsigned short, memory_order );
628 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
629 unsigned short, memory_order );
630 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
631 unsigned short, memory_order );
635 unsigned short __f__;
639 typedef struct atomic_int
642 bool is_lock_free() const volatile;
644 memory_order = memory_order_seq_cst ) volatile;
645 int load( memory_order = memory_order_seq_cst ) volatile;
647 memory_order = memory_order_seq_cst ) volatile;
648 bool compare_swap( int&, int,
649 memory_order, memory_order ) volatile;
650 bool compare_swap( int&, int,
651 memory_order = memory_order_seq_cst ) volatile;
652 void fence( memory_order ) const volatile;
654 memory_order = memory_order_seq_cst ) volatile;
656 memory_order = memory_order_seq_cst ) volatile;
658 memory_order = memory_order_seq_cst ) volatile;
660 memory_order = memory_order_seq_cst ) volatile;
662 memory_order = memory_order_seq_cst ) volatile;
664 CPP0X( atomic_int() = default; )
665 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
666 CPP0X( atomic_int( const atomic_int& ) = delete; )
667 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
669 int operator =( int __v__ ) volatile
670 { store( __v__ ); return __v__; }
672 int operator ++( int ) volatile
673 { return fetch_add( 1 ); }
675 int operator --( int ) volatile
676 { return fetch_sub( 1 ); }
678 int operator ++() volatile
679 { return fetch_add( 1 ) + 1; }
681 int operator --() volatile
682 { return fetch_sub( 1 ) - 1; }
684 int operator +=( int __v__ ) volatile
685 { return fetch_add( __v__ ) + __v__; }
687 int operator -=( int __v__ ) volatile
688 { return fetch_sub( __v__ ) - __v__; }
690 int operator &=( int __v__ ) volatile
691 { return fetch_and( __v__ ) & __v__; }
693 int operator |=( int __v__ ) volatile
694 { return fetch_or( __v__ ) | __v__; }
696 int operator ^=( int __v__ ) volatile
697 { return fetch_xor( __v__ ) ^ __v__; }
699 friend void atomic_store_explicit( volatile atomic_int*, int,
701 friend int atomic_load_explicit( volatile atomic_int*,
703 friend int atomic_swap_explicit( volatile atomic_int*,
705 friend bool atomic_compare_swap_explicit( volatile atomic_int*,
706 int*, int, memory_order, memory_order );
707 friend void atomic_fence( const volatile atomic_int*, memory_order );
708 friend int atomic_fetch_add_explicit( volatile atomic_int*,
710 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
712 friend int atomic_fetch_and_explicit( volatile atomic_int*,
714 friend int atomic_fetch_or_explicit( volatile atomic_int*,
716 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
725 typedef struct atomic_uint
728 bool is_lock_free() const volatile;
729 void store( unsigned int,
730 memory_order = memory_order_seq_cst ) volatile;
731 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
732 unsigned int swap( unsigned int,
733 memory_order = memory_order_seq_cst ) volatile;
734 bool compare_swap( unsigned int&, unsigned int,
735 memory_order, memory_order ) volatile;
736 bool compare_swap( unsigned int&, unsigned int,
737 memory_order = memory_order_seq_cst ) volatile;
738 void fence( memory_order ) const volatile;
739 unsigned int fetch_add( unsigned int,
740 memory_order = memory_order_seq_cst ) volatile;
741 unsigned int fetch_sub( unsigned int,
742 memory_order = memory_order_seq_cst ) volatile;
743 unsigned int fetch_and( unsigned int,
744 memory_order = memory_order_seq_cst ) volatile;
745 unsigned int fetch_or( unsigned int,
746 memory_order = memory_order_seq_cst ) volatile;
747 unsigned int fetch_xor( unsigned int,
748 memory_order = memory_order_seq_cst ) volatile;
750 CPP0X( atomic_uint() = default; )
751 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
752 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
753 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
755 unsigned int operator =( unsigned int __v__ ) volatile
756 { store( __v__ ); return __v__; }
758 unsigned int operator ++( int ) volatile
759 { return fetch_add( 1 ); }
761 unsigned int operator --( int ) volatile
762 { return fetch_sub( 1 ); }
764 unsigned int operator ++() volatile
765 { return fetch_add( 1 ) + 1; }
767 unsigned int operator --() volatile
768 { return fetch_sub( 1 ) - 1; }
770 unsigned int operator +=( unsigned int __v__ ) volatile
771 { return fetch_add( __v__ ) + __v__; }
773 unsigned int operator -=( unsigned int __v__ ) volatile
774 { return fetch_sub( __v__ ) - __v__; }
776 unsigned int operator &=( unsigned int __v__ ) volatile
777 { return fetch_and( __v__ ) & __v__; }
779 unsigned int operator |=( unsigned int __v__ ) volatile
780 { return fetch_or( __v__ ) | __v__; }
782 unsigned int operator ^=( unsigned int __v__ ) volatile
783 { return fetch_xor( __v__ ) ^ __v__; }
785 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
787 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
789 friend unsigned int atomic_swap_explicit( volatile atomic_uint*,
790 unsigned int, memory_order );
791 friend bool atomic_compare_swap_explicit( volatile atomic_uint*,
792 unsigned int*, unsigned int, memory_order, memory_order );
793 friend void atomic_fence( const volatile atomic_uint*, memory_order );
794 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
795 unsigned int, memory_order );
796 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
797 unsigned int, memory_order );
798 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
799 unsigned int, memory_order );
800 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
801 unsigned int, memory_order );
802 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
803 unsigned int, memory_order );
811 typedef struct atomic_long
814 bool is_lock_free() const volatile;
816 memory_order = memory_order_seq_cst ) volatile;
817 long load( memory_order = memory_order_seq_cst ) volatile;
819 memory_order = memory_order_seq_cst ) volatile;
820 bool compare_swap( long&, long,
821 memory_order, memory_order ) volatile;
822 bool compare_swap( long&, long,
823 memory_order = memory_order_seq_cst ) volatile;
824 void fence( memory_order ) const volatile;
825 long fetch_add( long,
826 memory_order = memory_order_seq_cst ) volatile;
827 long fetch_sub( long,
828 memory_order = memory_order_seq_cst ) volatile;
829 long fetch_and( long,
830 memory_order = memory_order_seq_cst ) volatile;
832 memory_order = memory_order_seq_cst ) volatile;
833 long fetch_xor( long,
834 memory_order = memory_order_seq_cst ) volatile;
836 CPP0X( atomic_long() = default; )
837 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
838 CPP0X( atomic_long( const atomic_long& ) = delete; )
839 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
841 long operator =( long __v__ ) volatile
842 { store( __v__ ); return __v__; }
844 long operator ++( int ) volatile
845 { return fetch_add( 1 ); }
847 long operator --( int ) volatile
848 { return fetch_sub( 1 ); }
850 long operator ++() volatile
851 { return fetch_add( 1 ) + 1; }
853 long operator --() volatile
854 { return fetch_sub( 1 ) - 1; }
856 long operator +=( long __v__ ) volatile
857 { return fetch_add( __v__ ) + __v__; }
859 long operator -=( long __v__ ) volatile
860 { return fetch_sub( __v__ ) - __v__; }
862 long operator &=( long __v__ ) volatile
863 { return fetch_and( __v__ ) & __v__; }
865 long operator |=( long __v__ ) volatile
866 { return fetch_or( __v__ ) | __v__; }
868 long operator ^=( long __v__ ) volatile
869 { return fetch_xor( __v__ ) ^ __v__; }
871 friend void atomic_store_explicit( volatile atomic_long*, long,
873 friend long atomic_load_explicit( volatile atomic_long*,
875 friend long atomic_swap_explicit( volatile atomic_long*,
876 long, memory_order );
877 friend bool atomic_compare_swap_explicit( volatile atomic_long*,
878 long*, long, memory_order, memory_order );
879 friend void atomic_fence( const volatile atomic_long*, memory_order );
880 friend long atomic_fetch_add_explicit( volatile atomic_long*,
881 long, memory_order );
882 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
883 long, memory_order );
884 friend long atomic_fetch_and_explicit( volatile atomic_long*,
885 long, memory_order );
886 friend long atomic_fetch_or_explicit( volatile atomic_long*,
887 long, memory_order );
888 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
889 long, memory_order );
897 typedef struct atomic_ulong
900 bool is_lock_free() const volatile;
901 void store( unsigned long,
902 memory_order = memory_order_seq_cst ) volatile;
903 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
904 unsigned long swap( unsigned long,
905 memory_order = memory_order_seq_cst ) volatile;
906 bool compare_swap( unsigned long&, unsigned long,
907 memory_order, memory_order ) volatile;
908 bool compare_swap( unsigned long&, unsigned long,
909 memory_order = memory_order_seq_cst ) volatile;
910 void fence( memory_order ) const volatile;
911 unsigned long fetch_add( unsigned long,
912 memory_order = memory_order_seq_cst ) volatile;
913 unsigned long fetch_sub( unsigned long,
914 memory_order = memory_order_seq_cst ) volatile;
915 unsigned long fetch_and( unsigned long,
916 memory_order = memory_order_seq_cst ) volatile;
917 unsigned long fetch_or( unsigned long,
918 memory_order = memory_order_seq_cst ) volatile;
919 unsigned long fetch_xor( unsigned long,
920 memory_order = memory_order_seq_cst ) volatile;
922 CPP0X( atomic_ulong() = default; )
923 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
924 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
925 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
927 unsigned long operator =( unsigned long __v__ ) volatile
928 { store( __v__ ); return __v__; }
930 unsigned long operator ++( int ) volatile
931 { return fetch_add( 1 ); }
933 unsigned long operator --( int ) volatile
934 { return fetch_sub( 1 ); }
936 unsigned long operator ++() volatile
937 { return fetch_add( 1 ) + 1; }
939 unsigned long operator --() volatile
940 { return fetch_sub( 1 ) - 1; }
942 unsigned long operator +=( unsigned long __v__ ) volatile
943 { return fetch_add( __v__ ) + __v__; }
945 unsigned long operator -=( unsigned long __v__ ) volatile
946 { return fetch_sub( __v__ ) - __v__; }
948 unsigned long operator &=( unsigned long __v__ ) volatile
949 { return fetch_and( __v__ ) & __v__; }
951 unsigned long operator |=( unsigned long __v__ ) volatile
952 { return fetch_or( __v__ ) | __v__; }
954 unsigned long operator ^=( unsigned long __v__ ) volatile
955 { return fetch_xor( __v__ ) ^ __v__; }
957 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
959 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
961 friend unsigned long atomic_swap_explicit( volatile atomic_ulong*,
962 unsigned long, memory_order );
963 friend bool atomic_compare_swap_explicit( volatile atomic_ulong*,
964 unsigned long*, unsigned long, memory_order, memory_order );
965 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
966 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
967 unsigned long, memory_order );
968 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
969 unsigned long, memory_order );
970 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
971 unsigned long, memory_order );
972 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
973 unsigned long, memory_order );
974 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
975 unsigned long, memory_order );
983 typedef struct atomic_llong
986 bool is_lock_free() const volatile;
987 void store( long long,
988 memory_order = memory_order_seq_cst ) volatile;
989 long long load( memory_order = memory_order_seq_cst ) volatile;
990 long long swap( long long,
991 memory_order = memory_order_seq_cst ) volatile;
992 bool compare_swap( long long&, long long,
993 memory_order, memory_order ) volatile;
994 bool compare_swap( long long&, long long,
995 memory_order = memory_order_seq_cst ) volatile;
996 void fence( memory_order ) const volatile;
997 long long fetch_add( long long,
998 memory_order = memory_order_seq_cst ) volatile;
999 long long fetch_sub( long long,
1000 memory_order = memory_order_seq_cst ) volatile;
1001 long long fetch_and( long long,
1002 memory_order = memory_order_seq_cst ) volatile;
1003 long long fetch_or( long long,
1004 memory_order = memory_order_seq_cst ) volatile;
1005 long long fetch_xor( long long,
1006 memory_order = memory_order_seq_cst ) volatile;
1008 CPP0X( atomic_llong() = default; )
1009 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1010 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1011 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1013 long long operator =( long long __v__ ) volatile
1014 { store( __v__ ); return __v__; }
1016 long long operator ++( int ) volatile
1017 { return fetch_add( 1 ); }
1019 long long operator --( int ) volatile
1020 { return fetch_sub( 1 ); }
1022 long long operator ++() volatile
1023 { return fetch_add( 1 ) + 1; }
1025 long long operator --() volatile
1026 { return fetch_sub( 1 ) - 1; }
1028 long long operator +=( long long __v__ ) volatile
1029 { return fetch_add( __v__ ) + __v__; }
1031 long long operator -=( long long __v__ ) volatile
1032 { return fetch_sub( __v__ ) - __v__; }
1034 long long operator &=( long long __v__ ) volatile
1035 { return fetch_and( __v__ ) & __v__; }
1037 long long operator |=( long long __v__ ) volatile
1038 { return fetch_or( __v__ ) | __v__; }
1040 long long operator ^=( long long __v__ ) volatile
1041 { return fetch_xor( __v__ ) ^ __v__; }
1043 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1045 friend long long atomic_load_explicit( volatile atomic_llong*,
1047 friend long long atomic_swap_explicit( volatile atomic_llong*,
1048 long long, memory_order );
1049 friend bool atomic_compare_swap_explicit( volatile atomic_llong*,
1050 long long*, long long, memory_order, memory_order );
1051 friend void atomic_fence( const volatile atomic_llong*, memory_order );
1052 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1053 long long, memory_order );
1054 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1055 long long, memory_order );
1056 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1057 long long, memory_order );
1058 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1059 long long, memory_order );
1060 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1061 long long, memory_order );
1069 typedef struct atomic_ullong
1072 bool is_lock_free() const volatile;
1073 void store( unsigned long long,
1074 memory_order = memory_order_seq_cst ) volatile;
1075 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1076 unsigned long long swap( unsigned long long,
1077 memory_order = memory_order_seq_cst ) volatile;
1078 bool compare_swap( unsigned long long&, unsigned long long,
1079 memory_order, memory_order ) volatile;
1080 bool compare_swap( unsigned long long&, unsigned long long,
1081 memory_order = memory_order_seq_cst ) volatile;
1082 void fence( memory_order ) const volatile;
1083 unsigned long long fetch_add( unsigned long long,
1084 memory_order = memory_order_seq_cst ) volatile;
1085 unsigned long long fetch_sub( unsigned long long,
1086 memory_order = memory_order_seq_cst ) volatile;
1087 unsigned long long fetch_and( unsigned long long,
1088 memory_order = memory_order_seq_cst ) volatile;
1089 unsigned long long fetch_or( unsigned long long,
1090 memory_order = memory_order_seq_cst ) volatile;
1091 unsigned long long fetch_xor( unsigned long long,
1092 memory_order = memory_order_seq_cst ) volatile;
1094 CPP0X( atomic_ullong() = default; )
1095 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1096 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1097 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1099 unsigned long long operator =( unsigned long long __v__ ) volatile
1100 { store( __v__ ); return __v__; }
1102 unsigned long long operator ++( int ) volatile
1103 { return fetch_add( 1 ); }
1105 unsigned long long operator --( int ) volatile
1106 { return fetch_sub( 1 ); }
1108 unsigned long long operator ++() volatile
1109 { return fetch_add( 1 ) + 1; }
1111 unsigned long long operator --() volatile
1112 { return fetch_sub( 1 ) - 1; }
1114 unsigned long long operator +=( unsigned long long __v__ ) volatile
1115 { return fetch_add( __v__ ) + __v__; }
1117 unsigned long long operator -=( unsigned long long __v__ ) volatile
1118 { return fetch_sub( __v__ ) - __v__; }
1120 unsigned long long operator &=( unsigned long long __v__ ) volatile
1121 { return fetch_and( __v__ ) & __v__; }
1123 unsigned long long operator |=( unsigned long long __v__ ) volatile
1124 { return fetch_or( __v__ ) | __v__; }
1126 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1127 { return fetch_xor( __v__ ) ^ __v__; }
1129 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1131 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1133 friend unsigned long long atomic_swap_explicit( volatile atomic_ullong*,
1134 unsigned long long, memory_order );
1135 friend bool atomic_compare_swap_explicit( volatile atomic_ullong*,
1136 unsigned long long*, unsigned long long, memory_order, memory_order );
1137 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
1138 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1139 unsigned long long, memory_order );
1140 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1141 unsigned long long, memory_order );
1142 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1143 unsigned long long, memory_order );
1144 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1145 unsigned long long, memory_order );
1146 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1147 unsigned long long, memory_order );
1151 unsigned long long __f__;
1155 typedef atomic_schar atomic_int_least8_t;
1156 typedef atomic_uchar atomic_uint_least8_t;
1157 typedef atomic_short atomic_int_least16_t;
1158 typedef atomic_ushort atomic_uint_least16_t;
1159 typedef atomic_int atomic_int_least32_t;
1160 typedef atomic_uint atomic_uint_least32_t;
1161 typedef atomic_llong atomic_int_least64_t;
1162 typedef atomic_ullong atomic_uint_least64_t;
1164 typedef atomic_schar atomic_int_fast8_t;
1165 typedef atomic_uchar atomic_uint_fast8_t;
1166 typedef atomic_short atomic_int_fast16_t;
1167 typedef atomic_ushort atomic_uint_fast16_t;
1168 typedef atomic_int atomic_int_fast32_t;
1169 typedef atomic_uint atomic_uint_fast32_t;
1170 typedef atomic_llong atomic_int_fast64_t;
1171 typedef atomic_ullong atomic_uint_fast64_t;
1173 typedef atomic_long atomic_intptr_t;
1174 typedef atomic_ulong atomic_uintptr_t;
1176 typedef atomic_long atomic_ssize_t;
1177 typedef atomic_ulong atomic_size_t;
1179 typedef atomic_long atomic_ptrdiff_t;
1181 typedef atomic_llong atomic_intmax_t;
1182 typedef atomic_ullong atomic_uintmax_t;
1188 typedef struct atomic_wchar_t
1191 bool is_lock_free() const volatile;
1192 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1193 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1194 wchar_t swap( wchar_t,
1195 memory_order = memory_order_seq_cst ) volatile;
1196 bool compare_swap( wchar_t&, wchar_t,
1197 memory_order, memory_order ) volatile;
1198 bool compare_swap( wchar_t&, wchar_t,
1199 memory_order = memory_order_seq_cst ) volatile;
1200 void fence( memory_order ) const volatile;
1201 wchar_t fetch_add( wchar_t,
1202 memory_order = memory_order_seq_cst ) volatile;
1203 wchar_t fetch_sub( wchar_t,
1204 memory_order = memory_order_seq_cst ) volatile;
1205 wchar_t fetch_and( wchar_t,
1206 memory_order = memory_order_seq_cst ) volatile;
1207 wchar_t fetch_or( wchar_t,
1208 memory_order = memory_order_seq_cst ) volatile;
1209 wchar_t fetch_xor( wchar_t,
1210 memory_order = memory_order_seq_cst ) volatile;
1212 CPP0X( atomic_wchar_t() = default; )
1213 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1214 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1215 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1217 wchar_t operator =( wchar_t __v__ ) volatile
1218 { store( __v__ ); return __v__; }
1220 wchar_t operator ++( int ) volatile
1221 { return fetch_add( 1 ); }
1223 wchar_t operator --( int ) volatile
1224 { return fetch_sub( 1 ); }
1226 wchar_t operator ++() volatile
1227 { return fetch_add( 1 ) + 1; }
1229 wchar_t operator --() volatile
1230 { return fetch_sub( 1 ) - 1; }
1232 wchar_t operator +=( wchar_t __v__ ) volatile
1233 { return fetch_add( __v__ ) + __v__; }
1235 wchar_t operator -=( wchar_t __v__ ) volatile
1236 { return fetch_sub( __v__ ) - __v__; }
1238 wchar_t operator &=( wchar_t __v__ ) volatile
1239 { return fetch_and( __v__ ) & __v__; }
1241 wchar_t operator |=( wchar_t __v__ ) volatile
1242 { return fetch_or( __v__ ) | __v__; }
1244 wchar_t operator ^=( wchar_t __v__ ) volatile
1245 { return fetch_xor( __v__ ) ^ __v__; }
1247 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1249 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1251 friend wchar_t atomic_swap_explicit( volatile atomic_wchar_t*,
1252 wchar_t, memory_order );
1253 friend bool atomic_compare_swap_explicit( volatile atomic_wchar_t*,
1254 wchar_t*, wchar_t, memory_order, memory_order );
1255 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
1256 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1257 wchar_t, memory_order );
1258 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1259 wchar_t, memory_order );
1260 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1261 wchar_t, memory_order );
1262 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1263 wchar_t, memory_order );
1264 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1265 wchar_t, memory_order );
1275 typedef atomic_int_least16_t atomic_char16_t;
1276 typedef atomic_int_least32_t atomic_char32_t;
1277 typedef atomic_int_least32_t atomic_wchar_t;
1284 template< typename T >
1289 bool is_lock_free() const volatile;
1290 void store( T, memory_order = memory_order_seq_cst ) volatile;
1291 T load( memory_order = memory_order_seq_cst ) volatile;
1292 T swap( T __v__, memory_order = memory_order_seq_cst ) volatile;
1293 bool compare_swap( T&, T, memory_order, memory_order ) volatile;
1294 bool compare_swap( T&, T, memory_order = memory_order_seq_cst ) volatile;
1295 void fence( memory_order ) const volatile;
1297 CPP0X( atomic() = default; )
1298 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1299 CPP0X( atomic( const atomic& ) = delete; )
1300 atomic& operator =( const atomic& ) CPP0X(=delete);
1302 T operator =( T __v__ ) volatile
1303 { store( __v__ ); return __v__; }
1314 template<typename T> struct atomic< T* > : atomic_address
1316 T* load( memory_order = memory_order_seq_cst ) volatile;
1317 T* swap( T*, memory_order = memory_order_seq_cst ) volatile;
1318 bool compare_swap( T*&, T*, memory_order, memory_order ) volatile;
1319 bool compare_swap( T*&, T*,
1320 memory_order = memory_order_seq_cst ) volatile;
1321 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1322 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1324 CPP0X( atomic() = default; )
1325 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1326 CPP0X( atomic( const atomic& ) = delete; )
1327 atomic& operator =( const atomic& ) CPP0X(=delete);
1329 T* operator =( T* __v__ ) volatile
1330 { store( __v__ ); return __v__; }
1332 T* operator ++( int ) volatile
1333 { return fetch_add( 1 ); }
1335 T* operator --( int ) volatile
1336 { return fetch_sub( 1 ); }
1338 T* operator ++() volatile
1339 { return fetch_add( 1 ) + 1; }
1341 T* operator --() volatile
1342 { return fetch_sub( 1 ) - 1; }
1344 T* operator +=( T* __v__ ) volatile
1345 { return fetch_add( __v__ ) + __v__; }
1347 T* operator -=( T* __v__ ) volatile
1348 { return fetch_sub( __v__ ) - __v__; }
1356 template<> struct atomic< bool > : atomic_bool
1358 CPP0X( atomic() = default; )
1359 CPP0X( constexpr explicit atomic( bool __v__ )
1360 : atomic_bool( __v__ ) { } )
1361 CPP0X( atomic( const atomic& ) = delete; )
1362 atomic& operator =( const atomic& ) CPP0X(=delete);
1364 bool operator =( bool __v__ ) volatile
1365 { store( __v__ ); return __v__; }
1369 template<> struct atomic< void* > : atomic_address
1371 CPP0X( atomic() = default; )
1372 CPP0X( constexpr explicit atomic( void* __v__ )
1373 : atomic_address( __v__ ) { } )
1374 CPP0X( atomic( const atomic& ) = delete; )
1375 atomic& operator =( const atomic& ) CPP0X(=delete);
1377 void* operator =( void* __v__ ) volatile
1378 { store( __v__ ); return __v__; }
1382 template<> struct atomic< char > : atomic_char
1384 CPP0X( atomic() = default; )
1385 CPP0X( constexpr explicit atomic( char __v__ )
1386 : atomic_char( __v__ ) { } )
1387 CPP0X( atomic( const atomic& ) = delete; )
1388 atomic& operator =( const atomic& ) CPP0X(=delete);
1390 char operator =( char __v__ ) volatile
1391 { store( __v__ ); return __v__; }
1395 template<> struct atomic< signed char > : atomic_schar
1397 CPP0X( atomic() = default; )
1398 CPP0X( constexpr explicit atomic( signed char __v__ )
1399 : atomic_schar( __v__ ) { } )
1400 CPP0X( atomic( const atomic& ) = delete; )
1401 atomic& operator =( const atomic& ) CPP0X(=delete);
1403 signed char operator =( signed char __v__ ) volatile
1404 { store( __v__ ); return __v__; }
1408 template<> struct atomic< unsigned char > : atomic_uchar
1410 CPP0X( atomic() = default; )
1411 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1412 : atomic_uchar( __v__ ) { } )
1413 CPP0X( atomic( const atomic& ) = delete; )
1414 atomic& operator =( const atomic& ) CPP0X(=delete);
1416 unsigned char operator =( unsigned char __v__ ) volatile
1417 { store( __v__ ); return __v__; }
1421 template<> struct atomic< short > : atomic_short
1423 CPP0X( atomic() = default; )
1424 CPP0X( constexpr explicit atomic( short __v__ )
1425 : atomic_short( __v__ ) { } )
1426 CPP0X( atomic( const atomic& ) = delete; )
1427 atomic& operator =( const atomic& ) CPP0X(=delete);
1429 short operator =( short __v__ ) volatile
1430 { store( __v__ ); return __v__; }
1434 template<> struct atomic< unsigned short > : atomic_ushort
1436 CPP0X( atomic() = default; )
1437 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1438 : atomic_ushort( __v__ ) { } )
1439 CPP0X( atomic( const atomic& ) = delete; )
1440 atomic& operator =( const atomic& ) CPP0X(=delete);
1442 unsigned short operator =( unsigned short __v__ ) volatile
1443 { store( __v__ ); return __v__; }
1447 template<> struct atomic< int > : atomic_int
1449 CPP0X( atomic() = default; )
1450 CPP0X( constexpr explicit atomic( int __v__ )
1451 : atomic_int( __v__ ) { } )
1452 CPP0X( atomic( const atomic& ) = delete; )
1453 atomic& operator =( const atomic& ) CPP0X(=delete);
1455 int operator =( int __v__ ) volatile
1456 { store( __v__ ); return __v__; }
1460 template<> struct atomic< unsigned int > : atomic_uint
1462 CPP0X( atomic() = default; )
1463 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1464 : atomic_uint( __v__ ) { } )
1465 CPP0X( atomic( const atomic& ) = delete; )
1466 atomic& operator =( const atomic& ) CPP0X(=delete);
1468 unsigned int operator =( unsigned int __v__ ) volatile
1469 { store( __v__ ); return __v__; }
1473 template<> struct atomic< long > : atomic_long
1475 CPP0X( atomic() = default; )
1476 CPP0X( constexpr explicit atomic( long __v__ )
1477 : atomic_long( __v__ ) { } )
1478 CPP0X( atomic( const atomic& ) = delete; )
1479 atomic& operator =( const atomic& ) CPP0X(=delete);
1481 long operator =( long __v__ ) volatile
1482 { store( __v__ ); return __v__; }
1486 template<> struct atomic< unsigned long > : atomic_ulong
1488 CPP0X( atomic() = default; )
1489 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1490 : atomic_ulong( __v__ ) { } )
1491 CPP0X( atomic( const atomic& ) = delete; )
1492 atomic& operator =( const atomic& ) CPP0X(=delete);
1494 unsigned long operator =( unsigned long __v__ ) volatile
1495 { store( __v__ ); return __v__; }
1499 template<> struct atomic< long long > : atomic_llong
1501 CPP0X( atomic() = default; )
1502 CPP0X( constexpr explicit atomic( long long __v__ )
1503 : atomic_llong( __v__ ) { } )
1504 CPP0X( atomic( const atomic& ) = delete; )
1505 atomic& operator =( const atomic& ) CPP0X(=delete);
1507 long long operator =( long long __v__ ) volatile
1508 { store( __v__ ); return __v__; }
1512 template<> struct atomic< unsigned long long > : atomic_ullong
1514 CPP0X( atomic() = default; )
1515 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1516 : atomic_ullong( __v__ ) { } )
1517 CPP0X( atomic( const atomic& ) = delete; )
1518 atomic& operator =( const atomic& ) CPP0X(=delete);
1520 unsigned long long operator =( unsigned long long __v__ ) volatile
1521 { store( __v__ ); return __v__; }
1525 template<> struct atomic< wchar_t > : atomic_wchar_t
1527 CPP0X( atomic() = default; )
1528 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1529 : atomic_wchar_t( __v__ ) { } )
1530 CPP0X( atomic( const atomic& ) = delete; )
1531 atomic& operator =( const atomic& ) CPP0X(=delete);
1533 wchar_t operator =( wchar_t __v__ ) volatile
1534 { store( __v__ ); return __v__; }
1544 inline bool atomic_is_lock_free
1545 ( const volatile atomic_bool* __a__ )
1548 inline bool atomic_load_explicit
1549 ( volatile atomic_bool* __a__, memory_order __x__ )
1550 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1552 inline bool atomic_load
1553 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1555 inline void atomic_store_explicit
1556 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1557 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1559 inline void atomic_store
1560 ( volatile atomic_bool* __a__, bool __m__ )
1561 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1563 inline bool atomic_swap_explicit
1564 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1565 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1567 inline bool atomic_swap
1568 ( volatile atomic_bool* __a__, bool __m__ )
1569 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1571 inline bool atomic_compare_swap_explicit
1572 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1573 memory_order __x__, memory_order __y__ )
1574 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1576 inline bool atomic_compare_swap
1577 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1578 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1579 memory_order_seq_cst, memory_order_seq_cst ); }
1581 inline void atomic_fence
1582 ( const volatile atomic_bool* __a__, memory_order __x__ )
1583 { _ATOMIC_FENCE_( __a__, __x__ ); }
1586 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1589 inline void* atomic_load_explicit
1590 ( volatile atomic_address* __a__, memory_order __x__ )
1591 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1593 inline void* atomic_load( volatile atomic_address* __a__ )
1594 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1596 inline void atomic_store_explicit
1597 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1598 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1600 inline void atomic_store
1601 ( volatile atomic_address* __a__, void* __m__ )
1602 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1604 inline void* atomic_swap_explicit
1605 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1606 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1608 inline void* atomic_swap
1609 ( volatile atomic_address* __a__, void* __m__ )
1610 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1612 inline bool atomic_compare_swap_explicit
1613 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1614 memory_order __x__, memory_order __y__ )
1615 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1617 inline bool atomic_compare_swap
1618 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1619 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1620 memory_order_seq_cst, memory_order_seq_cst ); }
1622 inline void atomic_fence
1623 ( const volatile atomic_address* __a__, memory_order __x__ )
1624 { _ATOMIC_FENCE_( __a__, __x__ ); }
1627 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1630 inline char atomic_load_explicit
1631 ( volatile atomic_char* __a__, memory_order __x__ )
1632 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1634 inline char atomic_load( volatile atomic_char* __a__ )
1635 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1637 inline void atomic_store_explicit
1638 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1639 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1641 inline void atomic_store
1642 ( volatile atomic_char* __a__, char __m__ )
1643 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1645 inline char atomic_swap_explicit
1646 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1647 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1649 inline char atomic_swap
1650 ( volatile atomic_char* __a__, char __m__ )
1651 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1653 inline bool atomic_compare_swap_explicit
1654 ( volatile atomic_char* __a__, char* __e__, char __m__,
1655 memory_order __x__, memory_order __y__ )
1656 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1658 inline bool atomic_compare_swap
1659 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1660 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1661 memory_order_seq_cst, memory_order_seq_cst ); }
1663 inline void atomic_fence
1664 ( const volatile atomic_char* __a__, memory_order __x__ )
1665 { _ATOMIC_FENCE_( __a__, __x__ ); }
1668 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1671 inline signed char atomic_load_explicit
1672 ( volatile atomic_schar* __a__, memory_order __x__ )
1673 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1675 inline signed char atomic_load( volatile atomic_schar* __a__ )
1676 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1678 inline void atomic_store_explicit
1679 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1680 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1682 inline void atomic_store
1683 ( volatile atomic_schar* __a__, signed char __m__ )
1684 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1686 inline signed char atomic_swap_explicit
1687 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1688 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1690 inline signed char atomic_swap
1691 ( volatile atomic_schar* __a__, signed char __m__ )
1692 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1694 inline bool atomic_compare_swap_explicit
1695 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1696 memory_order __x__, memory_order __y__ )
1697 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1699 inline bool atomic_compare_swap
1700 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1701 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1702 memory_order_seq_cst, memory_order_seq_cst ); }
1704 inline void atomic_fence
1705 ( const volatile atomic_schar* __a__, memory_order __x__ )
1706 { _ATOMIC_FENCE_( __a__, __x__ ); }
1709 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1712 inline unsigned char atomic_load_explicit
1713 ( volatile atomic_uchar* __a__, memory_order __x__ )
1714 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1716 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1717 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1719 inline void atomic_store_explicit
1720 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1721 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1723 inline void atomic_store
1724 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1725 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1727 inline unsigned char atomic_swap_explicit
1728 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1729 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1731 inline unsigned char atomic_swap
1732 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1733 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1735 inline bool atomic_compare_swap_explicit
1736 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1737 memory_order __x__, memory_order __y__ )
1738 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1740 inline bool atomic_compare_swap
1741 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1742 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1743 memory_order_seq_cst, memory_order_seq_cst ); }
1745 inline void atomic_fence
1746 ( const volatile atomic_uchar* __a__, memory_order __x__ )
1747 { _ATOMIC_FENCE_( __a__, __x__ ); }
1750 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1753 inline short atomic_load_explicit
1754 ( volatile atomic_short* __a__, memory_order __x__ )
1755 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1757 inline short atomic_load( volatile atomic_short* __a__ )
1758 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1760 inline void atomic_store_explicit
1761 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1762 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1764 inline void atomic_store
1765 ( volatile atomic_short* __a__, short __m__ )
1766 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1768 inline short atomic_swap_explicit
1769 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1770 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1772 inline short atomic_swap
1773 ( volatile atomic_short* __a__, short __m__ )
1774 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1776 inline bool atomic_compare_swap_explicit
1777 ( volatile atomic_short* __a__, short* __e__, short __m__,
1778 memory_order __x__, memory_order __y__ )
1779 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1781 inline bool atomic_compare_swap
1782 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1783 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1784 memory_order_seq_cst, memory_order_seq_cst ); }
1786 inline void atomic_fence
1787 ( const volatile atomic_short* __a__, memory_order __x__ )
1788 { _ATOMIC_FENCE_( __a__, __x__ ); }
1791 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1794 inline unsigned short atomic_load_explicit
1795 ( volatile atomic_ushort* __a__, memory_order __x__ )
1796 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1798 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1799 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1801 inline void atomic_store_explicit
1802 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1803 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1805 inline void atomic_store
1806 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1807 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1809 inline unsigned short atomic_swap_explicit
1810 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1811 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1813 inline unsigned short atomic_swap
1814 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1815 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1817 inline bool atomic_compare_swap_explicit
1818 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1819 memory_order __x__, memory_order __y__ )
1820 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1822 inline bool atomic_compare_swap
1823 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1824 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1825 memory_order_seq_cst, memory_order_seq_cst ); }
1827 inline void atomic_fence
1828 ( const volatile atomic_ushort* __a__, memory_order __x__ )
1829 { _ATOMIC_FENCE_( __a__, __x__ ); }
1832 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1835 inline int atomic_load_explicit
1836 ( volatile atomic_int* __a__, memory_order __x__ )
1837 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1839 inline int atomic_load( volatile atomic_int* __a__ )
1840 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1842 inline void atomic_store_explicit
1843 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1844 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1846 inline void atomic_store
1847 ( volatile atomic_int* __a__, int __m__ )
1848 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1850 inline int atomic_swap_explicit
1851 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1852 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1854 inline int atomic_swap
1855 ( volatile atomic_int* __a__, int __m__ )
1856 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1858 inline bool atomic_compare_swap_explicit
1859 ( volatile atomic_int* __a__, int* __e__, int __m__,
1860 memory_order __x__, memory_order __y__ )
1861 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1863 inline bool atomic_compare_swap
1864 ( volatile atomic_int* __a__, int* __e__, int __m__ )
1865 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1866 memory_order_seq_cst, memory_order_seq_cst ); }
1868 inline void atomic_fence
1869 ( const volatile atomic_int* __a__, memory_order __x__ )
1870 { _ATOMIC_FENCE_( __a__, __x__ ); }
1873 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
1876 inline unsigned int atomic_load_explicit
1877 ( volatile atomic_uint* __a__, memory_order __x__ )
1878 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1880 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
1881 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1883 inline void atomic_store_explicit
1884 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1885 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1887 inline void atomic_store
1888 ( volatile atomic_uint* __a__, unsigned int __m__ )
1889 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1891 inline unsigned int atomic_swap_explicit
1892 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1893 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1895 inline unsigned int atomic_swap
1896 ( volatile atomic_uint* __a__, unsigned int __m__ )
1897 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1899 inline bool atomic_compare_swap_explicit
1900 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
1901 memory_order __x__, memory_order __y__ )
1902 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1904 inline bool atomic_compare_swap
1905 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
1906 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1907 memory_order_seq_cst, memory_order_seq_cst ); }
1909 inline void atomic_fence
1910 ( const volatile atomic_uint* __a__, memory_order __x__ )
1911 { _ATOMIC_FENCE_( __a__, __x__ ); }
1914 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
1917 inline long atomic_load_explicit
1918 ( volatile atomic_long* __a__, memory_order __x__ )
1919 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1921 inline long atomic_load( volatile atomic_long* __a__ )
1922 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1924 inline void atomic_store_explicit
1925 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1926 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1928 inline void atomic_store
1929 ( volatile atomic_long* __a__, long __m__ )
1930 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1932 inline long atomic_swap_explicit
1933 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1934 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1936 inline long atomic_swap
1937 ( volatile atomic_long* __a__, long __m__ )
1938 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1940 inline bool atomic_compare_swap_explicit
1941 ( volatile atomic_long* __a__, long* __e__, long __m__,
1942 memory_order __x__, memory_order __y__ )
1943 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1945 inline bool atomic_compare_swap
1946 ( volatile atomic_long* __a__, long* __e__, long __m__ )
1947 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1948 memory_order_seq_cst, memory_order_seq_cst ); }
1950 inline void atomic_fence
1951 ( const volatile atomic_long* __a__, memory_order __x__ )
1952 { _ATOMIC_FENCE_( __a__, __x__ ); }
1955 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
1958 inline unsigned long atomic_load_explicit
1959 ( volatile atomic_ulong* __a__, memory_order __x__ )
1960 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1962 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
1963 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1965 inline void atomic_store_explicit
1966 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1967 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1969 inline void atomic_store
1970 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1971 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1973 inline unsigned long atomic_swap_explicit
1974 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1975 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1977 inline unsigned long atomic_swap
1978 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1979 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1981 inline bool atomic_compare_swap_explicit
1982 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
1983 memory_order __x__, memory_order __y__ )
1984 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1986 inline bool atomic_compare_swap
1987 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
1988 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1989 memory_order_seq_cst, memory_order_seq_cst ); }
1991 inline void atomic_fence
1992 ( const volatile atomic_ulong* __a__, memory_order __x__ )
1993 { _ATOMIC_FENCE_( __a__, __x__ ); }
1996 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
1999 inline long long atomic_load_explicit
2000 ( volatile atomic_llong* __a__, memory_order __x__ )
2001 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2003 inline long long atomic_load( volatile atomic_llong* __a__ )
2004 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2006 inline void atomic_store_explicit
2007 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2008 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2010 inline void atomic_store
2011 ( volatile atomic_llong* __a__, long long __m__ )
2012 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2014 inline long long atomic_swap_explicit
2015 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2016 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2018 inline long long atomic_swap
2019 ( volatile atomic_llong* __a__, long long __m__ )
2020 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2022 inline bool atomic_compare_swap_explicit
2023 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2024 memory_order __x__, memory_order __y__ )
2025 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2027 inline bool atomic_compare_swap
2028 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2029 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2030 memory_order_seq_cst, memory_order_seq_cst ); }
2032 inline void atomic_fence
2033 ( const volatile atomic_llong* __a__, memory_order __x__ )
2034 { _ATOMIC_FENCE_( __a__, __x__ ); }
2037 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2040 inline unsigned long long atomic_load_explicit
2041 ( volatile atomic_ullong* __a__, memory_order __x__ )
2042 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2044 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2045 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2047 inline void atomic_store_explicit
2048 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2049 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2051 inline void atomic_store
2052 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2053 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2055 inline unsigned long long atomic_swap_explicit
2056 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2057 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2059 inline unsigned long long atomic_swap
2060 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2061 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2063 inline bool atomic_compare_swap_explicit
2064 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2065 memory_order __x__, memory_order __y__ )
2066 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2068 inline bool atomic_compare_swap
2069 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2070 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2071 memory_order_seq_cst, memory_order_seq_cst ); }
2073 inline void atomic_fence
2074 ( const volatile atomic_ullong* __a__, memory_order __x__ )
2075 { _ATOMIC_FENCE_( __a__, __x__ ); }
2078 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2081 inline wchar_t atomic_load_explicit
2082 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2083 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2085 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2086 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2088 inline void atomic_store_explicit
2089 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2090 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2092 inline void atomic_store
2093 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2094 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2096 inline wchar_t atomic_swap_explicit
2097 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2098 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2100 inline wchar_t atomic_swap
2101 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2102 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2104 inline bool atomic_compare_swap_explicit
2105 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2106 memory_order __x__, memory_order __y__ )
2107 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2109 inline bool atomic_compare_swap
2110 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2111 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2112 memory_order_seq_cst, memory_order_seq_cst ); }
2114 inline void atomic_fence
2115 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
2116 { _ATOMIC_FENCE_( __a__, __x__ ); }
2119 inline void* atomic_fetch_add_explicit
2120 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2122 void* volatile* __p__ = &((__a__)->__f__);
2123 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2124 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2127 inline void* atomic_fetch_add
2128 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2129 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2132 inline void* atomic_fetch_sub_explicit
2133 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2135 void* volatile* __p__ = &((__a__)->__f__);
2136 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2137 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2140 inline void* atomic_fetch_sub
2141 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2142 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2144 inline char atomic_fetch_add_explicit
2145 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2146 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2148 inline char atomic_fetch_add
2149 ( volatile atomic_char* __a__, char __m__ )
2150 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2153 inline char atomic_fetch_sub_explicit
2154 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2155 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2157 inline char atomic_fetch_sub
2158 ( volatile atomic_char* __a__, char __m__ )
2159 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2162 inline char atomic_fetch_and_explicit
2163 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2164 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2166 inline char atomic_fetch_and
2167 ( volatile atomic_char* __a__, char __m__ )
2168 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2171 inline char atomic_fetch_or_explicit
2172 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2173 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2175 inline char atomic_fetch_or
2176 ( volatile atomic_char* __a__, char __m__ )
2177 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2180 inline char atomic_fetch_xor_explicit
2181 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2182 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2184 inline char atomic_fetch_xor
2185 ( volatile atomic_char* __a__, char __m__ )
2186 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2189 inline signed char atomic_fetch_add_explicit
2190 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2191 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2193 inline signed char atomic_fetch_add
2194 ( volatile atomic_schar* __a__, signed char __m__ )
2195 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2198 inline signed char atomic_fetch_sub_explicit
2199 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2200 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2202 inline signed char atomic_fetch_sub
2203 ( volatile atomic_schar* __a__, signed char __m__ )
2204 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2207 inline signed char atomic_fetch_and_explicit
2208 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2209 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2211 inline signed char atomic_fetch_and
2212 ( volatile atomic_schar* __a__, signed char __m__ )
2213 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2216 inline signed char atomic_fetch_or_explicit
2217 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2218 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2220 inline signed char atomic_fetch_or
2221 ( volatile atomic_schar* __a__, signed char __m__ )
2222 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2225 inline signed char atomic_fetch_xor_explicit
2226 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2227 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2229 inline signed char atomic_fetch_xor
2230 ( volatile atomic_schar* __a__, signed char __m__ )
2231 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2234 inline unsigned char atomic_fetch_add_explicit
2235 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2236 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2238 inline unsigned char atomic_fetch_add
2239 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2240 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2243 inline unsigned char atomic_fetch_sub_explicit
2244 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2245 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2247 inline unsigned char atomic_fetch_sub
2248 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2249 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2252 inline unsigned char atomic_fetch_and_explicit
2253 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2254 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2256 inline unsigned char atomic_fetch_and
2257 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2258 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2261 inline unsigned char atomic_fetch_or_explicit
2262 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2263 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2265 inline unsigned char atomic_fetch_or
2266 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2267 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2270 inline unsigned char atomic_fetch_xor_explicit
2271 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2272 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2274 inline unsigned char atomic_fetch_xor
2275 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2276 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2279 inline short atomic_fetch_add_explicit
2280 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2281 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2283 inline short atomic_fetch_add
2284 ( volatile atomic_short* __a__, short __m__ )
2285 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2288 inline short atomic_fetch_sub_explicit
2289 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2290 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2292 inline short atomic_fetch_sub
2293 ( volatile atomic_short* __a__, short __m__ )
2294 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2297 inline short atomic_fetch_and_explicit
2298 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2299 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2301 inline short atomic_fetch_and
2302 ( volatile atomic_short* __a__, short __m__ )
2303 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2306 inline short atomic_fetch_or_explicit
2307 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2308 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2310 inline short atomic_fetch_or
2311 ( volatile atomic_short* __a__, short __m__ )
2312 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2315 inline short atomic_fetch_xor_explicit
2316 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2317 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2319 inline short atomic_fetch_xor
2320 ( volatile atomic_short* __a__, short __m__ )
2321 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2324 inline unsigned short atomic_fetch_add_explicit
2325 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2326 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2328 inline unsigned short atomic_fetch_add
2329 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2330 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2333 inline unsigned short atomic_fetch_sub_explicit
2334 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2335 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2337 inline unsigned short atomic_fetch_sub
2338 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2339 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2342 inline unsigned short atomic_fetch_and_explicit
2343 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2344 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2346 inline unsigned short atomic_fetch_and
2347 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2348 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2351 inline unsigned short atomic_fetch_or_explicit
2352 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2353 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2355 inline unsigned short atomic_fetch_or
2356 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2357 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2360 inline unsigned short atomic_fetch_xor_explicit
2361 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2362 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2364 inline unsigned short atomic_fetch_xor
2365 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2366 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2369 inline int atomic_fetch_add_explicit
2370 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2371 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2373 inline int atomic_fetch_add
2374 ( volatile atomic_int* __a__, int __m__ )
2375 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2378 inline int atomic_fetch_sub_explicit
2379 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2380 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2382 inline int atomic_fetch_sub
2383 ( volatile atomic_int* __a__, int __m__ )
2384 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2387 inline int atomic_fetch_and_explicit
2388 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2389 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2391 inline int atomic_fetch_and
2392 ( volatile atomic_int* __a__, int __m__ )
2393 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2396 inline int atomic_fetch_or_explicit
2397 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2398 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2400 inline int atomic_fetch_or
2401 ( volatile atomic_int* __a__, int __m__ )
2402 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2405 inline int atomic_fetch_xor_explicit
2406 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2407 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2409 inline int atomic_fetch_xor
2410 ( volatile atomic_int* __a__, int __m__ )
2411 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2414 inline unsigned int atomic_fetch_add_explicit
2415 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2416 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2418 inline unsigned int atomic_fetch_add
2419 ( volatile atomic_uint* __a__, unsigned int __m__ )
2420 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2423 inline unsigned int atomic_fetch_sub_explicit
2424 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2425 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2427 inline unsigned int atomic_fetch_sub
2428 ( volatile atomic_uint* __a__, unsigned int __m__ )
2429 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2432 inline unsigned int atomic_fetch_and_explicit
2433 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2434 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2436 inline unsigned int atomic_fetch_and
2437 ( volatile atomic_uint* __a__, unsigned int __m__ )
2438 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2441 inline unsigned int atomic_fetch_or_explicit
2442 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2443 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2445 inline unsigned int atomic_fetch_or
2446 ( volatile atomic_uint* __a__, unsigned int __m__ )
2447 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2450 inline unsigned int atomic_fetch_xor_explicit
2451 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2452 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2454 inline unsigned int atomic_fetch_xor
2455 ( volatile atomic_uint* __a__, unsigned int __m__ )
2456 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2459 inline long atomic_fetch_add_explicit
2460 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2461 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2463 inline long atomic_fetch_add
2464 ( volatile atomic_long* __a__, long __m__ )
2465 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2468 inline long atomic_fetch_sub_explicit
2469 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2470 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2472 inline long atomic_fetch_sub
2473 ( volatile atomic_long* __a__, long __m__ )
2474 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2477 inline long atomic_fetch_and_explicit
2478 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2479 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2481 inline long atomic_fetch_and
2482 ( volatile atomic_long* __a__, long __m__ )
2483 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2486 inline long atomic_fetch_or_explicit
2487 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2488 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2490 inline long atomic_fetch_or
2491 ( volatile atomic_long* __a__, long __m__ )
2492 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2495 inline long atomic_fetch_xor_explicit
2496 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2497 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2499 inline long atomic_fetch_xor
2500 ( volatile atomic_long* __a__, long __m__ )
2501 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2504 inline unsigned long atomic_fetch_add_explicit
2505 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2506 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2508 inline unsigned long atomic_fetch_add
2509 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2510 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2513 inline unsigned long atomic_fetch_sub_explicit
2514 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2515 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2517 inline unsigned long atomic_fetch_sub
2518 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2519 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2522 inline unsigned long atomic_fetch_and_explicit
2523 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2524 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2526 inline unsigned long atomic_fetch_and
2527 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2528 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2531 inline unsigned long atomic_fetch_or_explicit
2532 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2533 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2535 inline unsigned long atomic_fetch_or
2536 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2537 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2540 inline unsigned long atomic_fetch_xor_explicit
2541 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2542 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2544 inline unsigned long atomic_fetch_xor
2545 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2546 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2549 inline long long atomic_fetch_add_explicit
2550 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2551 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2553 inline long long atomic_fetch_add
2554 ( volatile atomic_llong* __a__, long long __m__ )
2555 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2558 inline long long atomic_fetch_sub_explicit
2559 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2560 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2562 inline long long atomic_fetch_sub
2563 ( volatile atomic_llong* __a__, long long __m__ )
2564 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2567 inline long long atomic_fetch_and_explicit
2568 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2569 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2571 inline long long atomic_fetch_and
2572 ( volatile atomic_llong* __a__, long long __m__ )
2573 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2576 inline long long atomic_fetch_or_explicit
2577 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2578 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2580 inline long long atomic_fetch_or
2581 ( volatile atomic_llong* __a__, long long __m__ )
2582 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2585 inline long long atomic_fetch_xor_explicit
2586 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2587 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2589 inline long long atomic_fetch_xor
2590 ( volatile atomic_llong* __a__, long long __m__ )
2591 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2594 inline unsigned long long atomic_fetch_add_explicit
2595 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2596 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2598 inline unsigned long long atomic_fetch_add
2599 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2600 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2603 inline unsigned long long atomic_fetch_sub_explicit
2604 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2605 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2607 inline unsigned long long atomic_fetch_sub
2608 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2609 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2612 inline unsigned long long atomic_fetch_and_explicit
2613 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2614 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2616 inline unsigned long long atomic_fetch_and
2617 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2618 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2621 inline unsigned long long atomic_fetch_or_explicit
2622 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2623 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2625 inline unsigned long long atomic_fetch_or
2626 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2627 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2630 inline unsigned long long atomic_fetch_xor_explicit
2631 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2632 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2634 inline unsigned long long atomic_fetch_xor
2635 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2636 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2639 inline wchar_t atomic_fetch_add_explicit
2640 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2641 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2643 inline wchar_t atomic_fetch_add
2644 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2645 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2648 inline wchar_t atomic_fetch_sub_explicit
2649 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2650 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2652 inline wchar_t atomic_fetch_sub
2653 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2654 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2657 inline wchar_t atomic_fetch_and_explicit
2658 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2659 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2661 inline wchar_t atomic_fetch_and
2662 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2663 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2666 inline wchar_t atomic_fetch_or_explicit
2667 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2668 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2670 inline wchar_t atomic_fetch_or
2671 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2672 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2675 inline wchar_t atomic_fetch_xor_explicit
2676 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2677 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2679 inline wchar_t atomic_fetch_xor
2680 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2681 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2687 #define atomic_is_lock_free( __a__ ) \
2690 #define atomic_load( __a__ ) \
2691 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2693 #define atomic_load_explicit( __a__, __x__ ) \
2694 _ATOMIC_LOAD_( __a__, __x__ )
2696 #define atomic_init( __a__, __m__ ) \
2697 _ATOMIC_INIT_( __a__, __m__ )
2699 #define atomic_store( __a__, __m__ ) \
2700 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2702 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2703 _ATOMIC_STORE_( __a__, __m__, __x__ )
2705 #define atomic_swap( __a__, __m__ ) \
2706 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2708 #define atomic_swap_explicit( __a__, __m__, __x__ ) \
2709 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2711 #define atomic_compare_swap( __a__, __e__, __m__ ) \
2712 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2714 #define atomic_compare_swap_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2715 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2717 #define atomic_fence( __a__, __x__ ) \
2718 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
2721 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2722 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2724 #define atomic_fetch_add( __a__, __m__ ) \
2725 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2728 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2729 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2731 #define atomic_fetch_sub( __a__, __m__ ) \
2732 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2735 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2736 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2738 #define atomic_fetch_and( __a__, __m__ ) \
2739 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2742 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2743 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2745 #define atomic_fetch_or( __a__, __m__ ) \
2746 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2749 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2750 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2752 #define atomic_fetch_xor( __a__, __m__ ) \
2753 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2762 inline bool atomic_bool::is_lock_free() const volatile
2765 inline void atomic_bool::store
2766 ( bool __m__, memory_order __x__ ) volatile
2767 { atomic_store_explicit( this, __m__, __x__ ); }
2769 inline bool atomic_bool::load
2770 ( memory_order __x__ ) volatile
2771 { return atomic_load_explicit( this, __x__ ); }
2773 inline bool atomic_bool::swap
2774 ( bool __m__, memory_order __x__ ) volatile
2775 { return atomic_swap_explicit( this, __m__, __x__ ); }
2777 inline bool atomic_bool::compare_swap
2778 ( bool& __e__, bool __m__,
2779 memory_order __x__, memory_order __y__ ) volatile
2780 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2782 inline bool atomic_bool::compare_swap
2783 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
2784 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2785 __x__ == memory_order_acq_rel ? memory_order_acquire :
2786 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2788 inline void atomic_bool::fence
2789 ( memory_order __x__ ) const volatile
2790 { return atomic_fence( this, __x__ ); }
2793 inline bool atomic_address::is_lock_free() const volatile
2796 inline void atomic_address::store
2797 ( void* __m__, memory_order __x__ ) volatile
2798 { atomic_store_explicit( this, __m__, __x__ ); }
2800 inline void* atomic_address::load
2801 ( memory_order __x__ ) volatile
2802 { return atomic_load_explicit( this, __x__ ); }
2804 inline void* atomic_address::swap
2805 ( void* __m__, memory_order __x__ ) volatile
2806 { return atomic_swap_explicit( this, __m__, __x__ ); }
2808 inline bool atomic_address::compare_swap
2809 ( void*& __e__, void* __m__,
2810 memory_order __x__, memory_order __y__ ) volatile
2811 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2813 inline bool atomic_address::compare_swap
2814 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
2815 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2816 __x__ == memory_order_acq_rel ? memory_order_acquire :
2817 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2819 inline void atomic_address::fence
2820 ( memory_order __x__ ) const volatile
2821 { return atomic_fence( this, __x__ ); }
2824 inline bool atomic_char::is_lock_free() const volatile
2827 inline void atomic_char::store
2828 ( char __m__, memory_order __x__ ) volatile
2829 { atomic_store_explicit( this, __m__, __x__ ); }
2831 inline char atomic_char::load
2832 ( memory_order __x__ ) volatile
2833 { return atomic_load_explicit( this, __x__ ); }
2835 inline char atomic_char::swap
2836 ( char __m__, memory_order __x__ ) volatile
2837 { return atomic_swap_explicit( this, __m__, __x__ ); }
2839 inline bool atomic_char::compare_swap
2840 ( char& __e__, char __m__,
2841 memory_order __x__, memory_order __y__ ) volatile
2842 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2844 inline bool atomic_char::compare_swap
2845 ( char& __e__, char __m__, memory_order __x__ ) volatile
2846 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2847 __x__ == memory_order_acq_rel ? memory_order_acquire :
2848 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2850 inline void atomic_char::fence
2851 ( memory_order __x__ ) const volatile
2852 { return atomic_fence( this, __x__ ); }
2855 inline bool atomic_schar::is_lock_free() const volatile
2858 inline void atomic_schar::store
2859 ( signed char __m__, memory_order __x__ ) volatile
2860 { atomic_store_explicit( this, __m__, __x__ ); }
2862 inline signed char atomic_schar::load
2863 ( memory_order __x__ ) volatile
2864 { return atomic_load_explicit( this, __x__ ); }
2866 inline signed char atomic_schar::swap
2867 ( signed char __m__, memory_order __x__ ) volatile
2868 { return atomic_swap_explicit( this, __m__, __x__ ); }
2870 inline bool atomic_schar::compare_swap
2871 ( signed char& __e__, signed char __m__,
2872 memory_order __x__, memory_order __y__ ) volatile
2873 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2875 inline bool atomic_schar::compare_swap
2876 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
2877 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2878 __x__ == memory_order_acq_rel ? memory_order_acquire :
2879 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2881 inline void atomic_schar::fence
2882 ( memory_order __x__ ) const volatile
2883 { return atomic_fence( this, __x__ ); }
2886 inline bool atomic_uchar::is_lock_free() const volatile
2889 inline void atomic_uchar::store
2890 ( unsigned char __m__, memory_order __x__ ) volatile
2891 { atomic_store_explicit( this, __m__, __x__ ); }
2893 inline unsigned char atomic_uchar::load
2894 ( memory_order __x__ ) volatile
2895 { return atomic_load_explicit( this, __x__ ); }
2897 inline unsigned char atomic_uchar::swap
2898 ( unsigned char __m__, memory_order __x__ ) volatile
2899 { return atomic_swap_explicit( this, __m__, __x__ ); }
2901 inline bool atomic_uchar::compare_swap
2902 ( unsigned char& __e__, unsigned char __m__,
2903 memory_order __x__, memory_order __y__ ) volatile
2904 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2906 inline bool atomic_uchar::compare_swap
2907 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
2908 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2909 __x__ == memory_order_acq_rel ? memory_order_acquire :
2910 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2912 inline void atomic_uchar::fence
2913 ( memory_order __x__ ) const volatile
2914 { return atomic_fence( this, __x__ ); }
2917 inline bool atomic_short::is_lock_free() const volatile
2920 inline void atomic_short::store
2921 ( short __m__, memory_order __x__ ) volatile
2922 { atomic_store_explicit( this, __m__, __x__ ); }
2924 inline short atomic_short::load
2925 ( memory_order __x__ ) volatile
2926 { return atomic_load_explicit( this, __x__ ); }
2928 inline short atomic_short::swap
2929 ( short __m__, memory_order __x__ ) volatile
2930 { return atomic_swap_explicit( this, __m__, __x__ ); }
2932 inline bool atomic_short::compare_swap
2933 ( short& __e__, short __m__,
2934 memory_order __x__, memory_order __y__ ) volatile
2935 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2937 inline bool atomic_short::compare_swap
2938 ( short& __e__, short __m__, memory_order __x__ ) volatile
2939 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2940 __x__ == memory_order_acq_rel ? memory_order_acquire :
2941 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2943 inline void atomic_short::fence
2944 ( memory_order __x__ ) const volatile
2945 { return atomic_fence( this, __x__ ); }
2948 inline bool atomic_ushort::is_lock_free() const volatile
2951 inline void atomic_ushort::store
2952 ( unsigned short __m__, memory_order __x__ ) volatile
2953 { atomic_store_explicit( this, __m__, __x__ ); }
2955 inline unsigned short atomic_ushort::load
2956 ( memory_order __x__ ) volatile
2957 { return atomic_load_explicit( this, __x__ ); }
2959 inline unsigned short atomic_ushort::swap
2960 ( unsigned short __m__, memory_order __x__ ) volatile
2961 { return atomic_swap_explicit( this, __m__, __x__ ); }
2963 inline bool atomic_ushort::compare_swap
2964 ( unsigned short& __e__, unsigned short __m__,
2965 memory_order __x__, memory_order __y__ ) volatile
2966 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2968 inline bool atomic_ushort::compare_swap
2969 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
2970 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2971 __x__ == memory_order_acq_rel ? memory_order_acquire :
2972 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2974 inline void atomic_ushort::fence
2975 ( memory_order __x__ ) const volatile
2976 { return atomic_fence( this, __x__ ); }
2979 inline bool atomic_int::is_lock_free() const volatile
2982 inline void atomic_int::store
2983 ( int __m__, memory_order __x__ ) volatile
2984 { atomic_store_explicit( this, __m__, __x__ ); }
2986 inline int atomic_int::load
2987 ( memory_order __x__ ) volatile
2988 { return atomic_load_explicit( this, __x__ ); }
2990 inline int atomic_int::swap
2991 ( int __m__, memory_order __x__ ) volatile
2992 { return atomic_swap_explicit( this, __m__, __x__ ); }
2994 inline bool atomic_int::compare_swap
2995 ( int& __e__, int __m__,
2996 memory_order __x__, memory_order __y__ ) volatile
2997 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2999 inline bool atomic_int::compare_swap
3000 ( int& __e__, int __m__, memory_order __x__ ) volatile
3001 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3002 __x__ == memory_order_acq_rel ? memory_order_acquire :
3003 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3005 inline void atomic_int::fence
3006 ( memory_order __x__ ) const volatile
3007 { return atomic_fence( this, __x__ ); }
3010 inline bool atomic_uint::is_lock_free() const volatile
3013 inline void atomic_uint::store
3014 ( unsigned int __m__, memory_order __x__ ) volatile
3015 { atomic_store_explicit( this, __m__, __x__ ); }
3017 inline unsigned int atomic_uint::load
3018 ( memory_order __x__ ) volatile
3019 { return atomic_load_explicit( this, __x__ ); }
3021 inline unsigned int atomic_uint::swap
3022 ( unsigned int __m__, memory_order __x__ ) volatile
3023 { return atomic_swap_explicit( this, __m__, __x__ ); }
3025 inline bool atomic_uint::compare_swap
3026 ( unsigned int& __e__, unsigned int __m__,
3027 memory_order __x__, memory_order __y__ ) volatile
3028 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3030 inline bool atomic_uint::compare_swap
3031 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3032 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3033 __x__ == memory_order_acq_rel ? memory_order_acquire :
3034 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3036 inline void atomic_uint::fence
3037 ( memory_order __x__ ) const volatile
3038 { return atomic_fence( this, __x__ ); }
3041 inline bool atomic_long::is_lock_free() const volatile
3044 inline void atomic_long::store
3045 ( long __m__, memory_order __x__ ) volatile
3046 { atomic_store_explicit( this, __m__, __x__ ); }
3048 inline long atomic_long::load
3049 ( memory_order __x__ ) volatile
3050 { return atomic_load_explicit( this, __x__ ); }
3052 inline long atomic_long::swap
3053 ( long __m__, memory_order __x__ ) volatile
3054 { return atomic_swap_explicit( this, __m__, __x__ ); }
3056 inline bool atomic_long::compare_swap
3057 ( long& __e__, long __m__,
3058 memory_order __x__, memory_order __y__ ) volatile
3059 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3061 inline bool atomic_long::compare_swap
3062 ( long& __e__, long __m__, memory_order __x__ ) volatile
3063 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3064 __x__ == memory_order_acq_rel ? memory_order_acquire :
3065 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3067 inline void atomic_long::fence
3068 ( memory_order __x__ ) const volatile
3069 { return atomic_fence( this, __x__ ); }
3072 inline bool atomic_ulong::is_lock_free() const volatile
3075 inline void atomic_ulong::store
3076 ( unsigned long __m__, memory_order __x__ ) volatile
3077 { atomic_store_explicit( this, __m__, __x__ ); }
3079 inline unsigned long atomic_ulong::load
3080 ( memory_order __x__ ) volatile
3081 { return atomic_load_explicit( this, __x__ ); }
3083 inline unsigned long atomic_ulong::swap
3084 ( unsigned long __m__, memory_order __x__ ) volatile
3085 { return atomic_swap_explicit( this, __m__, __x__ ); }
3087 inline bool atomic_ulong::compare_swap
3088 ( unsigned long& __e__, unsigned long __m__,
3089 memory_order __x__, memory_order __y__ ) volatile
3090 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3092 inline bool atomic_ulong::compare_swap
3093 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3094 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3095 __x__ == memory_order_acq_rel ? memory_order_acquire :
3096 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3098 inline void atomic_ulong::fence
3099 ( memory_order __x__ ) const volatile
3100 { return atomic_fence( this, __x__ ); }
3103 inline bool atomic_llong::is_lock_free() const volatile
3106 inline void atomic_llong::store
3107 ( long long __m__, memory_order __x__ ) volatile
3108 { atomic_store_explicit( this, __m__, __x__ ); }
3110 inline long long atomic_llong::load
3111 ( memory_order __x__ ) volatile
3112 { return atomic_load_explicit( this, __x__ ); }
3114 inline long long atomic_llong::swap
3115 ( long long __m__, memory_order __x__ ) volatile
3116 { return atomic_swap_explicit( this, __m__, __x__ ); }
3118 inline bool atomic_llong::compare_swap
3119 ( long long& __e__, long long __m__,
3120 memory_order __x__, memory_order __y__ ) volatile
3121 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3123 inline bool atomic_llong::compare_swap
3124 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3125 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3126 __x__ == memory_order_acq_rel ? memory_order_acquire :
3127 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3129 inline void atomic_llong::fence
3130 ( memory_order __x__ ) const volatile
3131 { return atomic_fence( this, __x__ ); }
3134 inline bool atomic_ullong::is_lock_free() const volatile
3137 inline void atomic_ullong::store
3138 ( unsigned long long __m__, memory_order __x__ ) volatile
3139 { atomic_store_explicit( this, __m__, __x__ ); }
3141 inline unsigned long long atomic_ullong::load
3142 ( memory_order __x__ ) volatile
3143 { return atomic_load_explicit( this, __x__ ); }
3145 inline unsigned long long atomic_ullong::swap
3146 ( unsigned long long __m__, memory_order __x__ ) volatile
3147 { return atomic_swap_explicit( this, __m__, __x__ ); }
3149 inline bool atomic_ullong::compare_swap
3150 ( unsigned long long& __e__, unsigned long long __m__,
3151 memory_order __x__, memory_order __y__ ) volatile
3152 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3154 inline bool atomic_ullong::compare_swap
3155 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3156 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3157 __x__ == memory_order_acq_rel ? memory_order_acquire :
3158 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3160 inline void atomic_ullong::fence
3161 ( memory_order __x__ ) const volatile
3162 { return atomic_fence( this, __x__ ); }
3165 inline bool atomic_wchar_t::is_lock_free() const volatile
3168 inline void atomic_wchar_t::store
3169 ( wchar_t __m__, memory_order __x__ ) volatile
3170 { atomic_store_explicit( this, __m__, __x__ ); }
3172 inline wchar_t atomic_wchar_t::load
3173 ( memory_order __x__ ) volatile
3174 { return atomic_load_explicit( this, __x__ ); }
3176 inline wchar_t atomic_wchar_t::swap
3177 ( wchar_t __m__, memory_order __x__ ) volatile
3178 { return atomic_swap_explicit( this, __m__, __x__ ); }
3180 inline bool atomic_wchar_t::compare_swap
3181 ( wchar_t& __e__, wchar_t __m__,
3182 memory_order __x__, memory_order __y__ ) volatile
3183 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3185 inline bool atomic_wchar_t::compare_swap
3186 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3187 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3188 __x__ == memory_order_acq_rel ? memory_order_acquire :
3189 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3191 inline void atomic_wchar_t::fence
3192 ( memory_order __x__ ) const volatile
3193 { return atomic_fence( this, __x__ ); }
3196 template< typename T >
3197 inline bool atomic<T>::is_lock_free() const volatile
3200 template< typename T >
3201 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3202 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3204 template< typename T >
3205 inline T atomic<T>::load( memory_order __x__ ) volatile
3206 { return _ATOMIC_LOAD_( this, __x__ ); }
3208 template< typename T >
3209 inline T atomic<T>::swap( T __v__, memory_order __x__ ) volatile
3210 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3212 template< typename T >
3213 inline bool atomic<T>::compare_swap
3214 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3215 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3217 template< typename T >
3218 inline bool atomic<T>::compare_swap
3219 ( T& __r__, T __v__, memory_order __x__ ) volatile
3220 { return compare_swap( __r__, __v__, __x__,
3221 __x__ == memory_order_acq_rel ? memory_order_acquire :
3222 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3225 inline void* atomic_address::fetch_add
3226 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3227 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3229 inline void* atomic_address::fetch_sub
3230 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3231 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3234 inline char atomic_char::fetch_add
3235 ( char __m__, memory_order __x__ ) volatile
3236 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3239 inline char atomic_char::fetch_sub
3240 ( char __m__, memory_order __x__ ) volatile
3241 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3244 inline char atomic_char::fetch_and
3245 ( char __m__, memory_order __x__ ) volatile
3246 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3249 inline char atomic_char::fetch_or
3250 ( char __m__, memory_order __x__ ) volatile
3251 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3254 inline char atomic_char::fetch_xor
3255 ( char __m__, memory_order __x__ ) volatile
3256 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3259 inline signed char atomic_schar::fetch_add
3260 ( signed char __m__, memory_order __x__ ) volatile
3261 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3264 inline signed char atomic_schar::fetch_sub
3265 ( signed char __m__, memory_order __x__ ) volatile
3266 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3269 inline signed char atomic_schar::fetch_and
3270 ( signed char __m__, memory_order __x__ ) volatile
3271 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3274 inline signed char atomic_schar::fetch_or
3275 ( signed char __m__, memory_order __x__ ) volatile
3276 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3279 inline signed char atomic_schar::fetch_xor
3280 ( signed char __m__, memory_order __x__ ) volatile
3281 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3284 inline unsigned char atomic_uchar::fetch_add
3285 ( unsigned char __m__, memory_order __x__ ) volatile
3286 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3289 inline unsigned char atomic_uchar::fetch_sub
3290 ( unsigned char __m__, memory_order __x__ ) volatile
3291 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3294 inline unsigned char atomic_uchar::fetch_and
3295 ( unsigned char __m__, memory_order __x__ ) volatile
3296 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3299 inline unsigned char atomic_uchar::fetch_or
3300 ( unsigned char __m__, memory_order __x__ ) volatile
3301 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3304 inline unsigned char atomic_uchar::fetch_xor
3305 ( unsigned char __m__, memory_order __x__ ) volatile
3306 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3309 inline short atomic_short::fetch_add
3310 ( short __m__, memory_order __x__ ) volatile
3311 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3314 inline short atomic_short::fetch_sub
3315 ( short __m__, memory_order __x__ ) volatile
3316 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3319 inline short atomic_short::fetch_and
3320 ( short __m__, memory_order __x__ ) volatile
3321 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3324 inline short atomic_short::fetch_or
3325 ( short __m__, memory_order __x__ ) volatile
3326 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3329 inline short atomic_short::fetch_xor
3330 ( short __m__, memory_order __x__ ) volatile
3331 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3334 inline unsigned short atomic_ushort::fetch_add
3335 ( unsigned short __m__, memory_order __x__ ) volatile
3336 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3339 inline unsigned short atomic_ushort::fetch_sub
3340 ( unsigned short __m__, memory_order __x__ ) volatile
3341 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3344 inline unsigned short atomic_ushort::fetch_and
3345 ( unsigned short __m__, memory_order __x__ ) volatile
3346 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3349 inline unsigned short atomic_ushort::fetch_or
3350 ( unsigned short __m__, memory_order __x__ ) volatile
3351 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3354 inline unsigned short atomic_ushort::fetch_xor
3355 ( unsigned short __m__, memory_order __x__ ) volatile
3356 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3359 inline int atomic_int::fetch_add
3360 ( int __m__, memory_order __x__ ) volatile
3361 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3364 inline int atomic_int::fetch_sub
3365 ( int __m__, memory_order __x__ ) volatile
3366 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3369 inline int atomic_int::fetch_and
3370 ( int __m__, memory_order __x__ ) volatile
3371 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3374 inline int atomic_int::fetch_or
3375 ( int __m__, memory_order __x__ ) volatile
3376 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3379 inline int atomic_int::fetch_xor
3380 ( int __m__, memory_order __x__ ) volatile
3381 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3384 inline unsigned int atomic_uint::fetch_add
3385 ( unsigned int __m__, memory_order __x__ ) volatile
3386 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3389 inline unsigned int atomic_uint::fetch_sub
3390 ( unsigned int __m__, memory_order __x__ ) volatile
3391 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3394 inline unsigned int atomic_uint::fetch_and
3395 ( unsigned int __m__, memory_order __x__ ) volatile
3396 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3399 inline unsigned int atomic_uint::fetch_or
3400 ( unsigned int __m__, memory_order __x__ ) volatile
3401 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3404 inline unsigned int atomic_uint::fetch_xor
3405 ( unsigned int __m__, memory_order __x__ ) volatile
3406 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3409 inline long atomic_long::fetch_add
3410 ( long __m__, memory_order __x__ ) volatile
3411 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3414 inline long atomic_long::fetch_sub
3415 ( long __m__, memory_order __x__ ) volatile
3416 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3419 inline long atomic_long::fetch_and
3420 ( long __m__, memory_order __x__ ) volatile
3421 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3424 inline long atomic_long::fetch_or
3425 ( long __m__, memory_order __x__ ) volatile
3426 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3429 inline long atomic_long::fetch_xor
3430 ( long __m__, memory_order __x__ ) volatile
3431 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3434 inline unsigned long atomic_ulong::fetch_add
3435 ( unsigned long __m__, memory_order __x__ ) volatile
3436 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3439 inline unsigned long atomic_ulong::fetch_sub
3440 ( unsigned long __m__, memory_order __x__ ) volatile
3441 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3444 inline unsigned long atomic_ulong::fetch_and
3445 ( unsigned long __m__, memory_order __x__ ) volatile
3446 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3449 inline unsigned long atomic_ulong::fetch_or
3450 ( unsigned long __m__, memory_order __x__ ) volatile
3451 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3454 inline unsigned long atomic_ulong::fetch_xor
3455 ( unsigned long __m__, memory_order __x__ ) volatile
3456 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3459 inline long long atomic_llong::fetch_add
3460 ( long long __m__, memory_order __x__ ) volatile
3461 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3464 inline long long atomic_llong::fetch_sub
3465 ( long long __m__, memory_order __x__ ) volatile
3466 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3469 inline long long atomic_llong::fetch_and
3470 ( long long __m__, memory_order __x__ ) volatile
3471 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3474 inline long long atomic_llong::fetch_or
3475 ( long long __m__, memory_order __x__ ) volatile
3476 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3479 inline long long atomic_llong::fetch_xor
3480 ( long long __m__, memory_order __x__ ) volatile
3481 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3484 inline unsigned long long atomic_ullong::fetch_add
3485 ( unsigned long long __m__, memory_order __x__ ) volatile
3486 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3489 inline unsigned long long atomic_ullong::fetch_sub
3490 ( unsigned long long __m__, memory_order __x__ ) volatile
3491 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3494 inline unsigned long long atomic_ullong::fetch_and
3495 ( unsigned long long __m__, memory_order __x__ ) volatile
3496 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3499 inline unsigned long long atomic_ullong::fetch_or
3500 ( unsigned long long __m__, memory_order __x__ ) volatile
3501 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3504 inline unsigned long long atomic_ullong::fetch_xor
3505 ( unsigned long long __m__, memory_order __x__ ) volatile
3506 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3509 inline wchar_t atomic_wchar_t::fetch_add
3510 ( wchar_t __m__, memory_order __x__ ) volatile
3511 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3514 inline wchar_t atomic_wchar_t::fetch_sub
3515 ( wchar_t __m__, memory_order __x__ ) volatile
3516 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3519 inline wchar_t atomic_wchar_t::fetch_and
3520 ( wchar_t __m__, memory_order __x__ ) volatile
3521 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3524 inline wchar_t atomic_wchar_t::fetch_or
3525 ( wchar_t __m__, memory_order __x__ ) volatile
3526 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3529 inline wchar_t atomic_wchar_t::fetch_xor
3530 ( wchar_t __m__, memory_order __x__ ) volatile
3531 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3534 template< typename T >
3535 T* atomic<T*>::load( memory_order __x__ ) volatile
3536 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3538 template< typename T >
3539 T* atomic<T*>::swap( T* __v__, memory_order __x__ ) volatile
3540 { return static_cast<T*>( atomic_address::swap( __v__, __x__ ) ); }
3542 template< typename T >
3543 bool atomic<T*>::compare_swap
3544 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3545 { return atomic_address::compare_swap( *reinterpret_cast<void**>( &__r__ ),
3546 static_cast<void*>( __v__ ), __x__, __y__ ); }
3547 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3549 template< typename T >
3550 bool atomic<T*>::compare_swap
3551 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3552 { return compare_swap( __r__, __v__, __x__,
3553 __x__ == memory_order_acq_rel ? memory_order_acquire :
3554 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3556 template< typename T >
3557 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3558 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3560 template< typename T >
3561 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3562 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }