1 #include "memoryorder.h"
8 #define CPP0X( feature )
10 typedef struct atomic_flag
13 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
14 void clear( memory_order = memory_order_seq_cst ) volatile;
15 void fence( memory_order ) const volatile;
17 CPP0X( atomic_flag() = default; )
18 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
19 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
26 #define ATOMIC_FLAG_INIT { false }
32 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
33 extern bool atomic_flag_test_and_set_explicit
34 ( volatile atomic_flag*, memory_order );
35 extern void atomic_flag_clear( volatile atomic_flag* );
36 extern void atomic_flag_clear_explicit
37 ( volatile atomic_flag*, memory_order );
38 extern void atomic_flag_fence
39 ( const volatile atomic_flag*, memory_order );
40 extern void __atomic_flag_wait__
41 ( volatile atomic_flag* );
42 extern void __atomic_flag_wait_explicit__
43 ( volatile atomic_flag*, memory_order );
51 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
52 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
54 inline void atomic_flag::clear( memory_order __x__ ) volatile
55 { atomic_flag_clear_explicit( this, __x__ ); }
57 inline void atomic_flag::fence( memory_order __x__ ) const volatile
58 { atomic_flag_fence( this, __x__ ); }
64 The remainder of the example implementation uses the following
65 macros. These macros exploit GNU extensions for value-returning
66 blocks (AKA statement expressions) and __typeof__.
68 The macros rely on data fields of atomic structs being named __f__.
69 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
70 __g__=flag, __m__=modified, __o__=operation, __r__=result,
71 __p__=pointer to field, __v__=value (for single evaluation),
72 __x__=memory-ordering, and __y__=memory-ordering.
75 #define _ATOMIC_LOAD_( __a__, __x__ ) \
76 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
77 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
80 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
81 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
82 __typeof__(__m__) __v__ = (__m__); \
83 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
87 #define _ATOMIC_INIT_( __a__, __m__ ) \
88 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
89 __typeof__(__m__) __v__ = (__m__); \
90 model_init_action((void *) __p__, (uint64_t) __v__); \
93 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
94 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
95 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
96 __typeof__(__m__) __v__ = (__m__); \
97 __typeof__((__a__)->__f__) __copy__= __old__; \
98 __copy__ __o__ __v__; \
99 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
102 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
103 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
104 __typeof__(__e__) __q__ = (__e__); \
105 __typeof__(__m__) __v__ = (__m__); \
107 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__);\
108 if (__t__ == * __q__ ) { \
109 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
110 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
114 #define _ATOMIC_FENCE_( __a__, __x__ ) \
118 #define ATOMIC_CHAR_LOCK_FREE 1
119 #define ATOMIC_CHAR16_T_LOCK_FREE 1
120 #define ATOMIC_CHAR32_T_LOCK_FREE 1
121 #define ATOMIC_WCHAR_T_LOCK_FREE 1
122 #define ATOMIC_SHORT_LOCK_FREE 1
123 #define ATOMIC_INT_LOCK_FREE 1
124 #define ATOMIC_LONG_LOCK_FREE 1
125 #define ATOMIC_LLONG_LOCK_FREE 1
126 #define ATOMIC_ADDRESS_LOCK_FREE 1
128 typedef struct atomic_bool
131 bool is_lock_free() const volatile;
132 void store( bool, memory_order = memory_order_seq_cst ) volatile;
133 bool load( memory_order = memory_order_seq_cst ) volatile;
134 bool swap( bool, memory_order = memory_order_seq_cst ) volatile;
135 bool compare_swap ( bool&, bool, memory_order, memory_order ) volatile;
136 bool compare_swap ( bool&, bool,
137 memory_order = memory_order_seq_cst) volatile;
138 void fence( memory_order ) const volatile;
140 CPP0X( atomic_bool() = delete; )
141 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
142 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
143 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
145 bool operator =( bool __v__ ) volatile
146 { store( __v__ ); return __v__; }
148 friend void atomic_store_explicit( volatile atomic_bool*, bool,
150 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
151 friend bool atomic_swap_explicit( volatile atomic_bool*, bool,
153 friend bool atomic_compare_swap_explicit( volatile atomic_bool*, bool*, bool,
154 memory_order, memory_order );
155 friend void atomic_fence( const volatile atomic_bool*, memory_order );
163 typedef struct atomic_address
166 bool is_lock_free() const volatile;
167 void store( void*, memory_order = memory_order_seq_cst ) volatile;
168 void* load( memory_order = memory_order_seq_cst ) volatile;
169 void* swap( void*, memory_order = memory_order_seq_cst ) volatile;
170 bool compare_swap( void*&, void*, memory_order, memory_order ) volatile;
171 bool compare_swap( void*&, void*,
172 memory_order = memory_order_seq_cst ) volatile;
173 void fence( memory_order ) const volatile;
174 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
175 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
177 CPP0X( atomic_address() = default; )
178 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
179 CPP0X( atomic_address( const atomic_address& ) = delete; )
180 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
182 void* operator =( void* __v__ ) volatile
183 { store( __v__ ); return __v__; }
185 void* operator +=( ptrdiff_t __v__ ) volatile
186 { return fetch_add( __v__ ); }
188 void* operator -=( ptrdiff_t __v__ ) volatile
189 { return fetch_sub( __v__ ); }
191 friend void atomic_store_explicit( volatile atomic_address*, void*,
193 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
194 friend void* atomic_swap_explicit( volatile atomic_address*, void*,
196 friend bool atomic_compare_swap_explicit( volatile atomic_address*,
197 void**, void*, memory_order, memory_order );
198 friend void atomic_fence( const volatile atomic_address*, memory_order );
199 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
201 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
210 typedef struct atomic_char
213 bool is_lock_free() const volatile;
215 memory_order = memory_order_seq_cst ) volatile;
216 char load( memory_order = memory_order_seq_cst ) volatile;
218 memory_order = memory_order_seq_cst ) volatile;
219 bool compare_swap( char&, char,
220 memory_order, memory_order ) volatile;
221 bool compare_swap( char&, char,
222 memory_order = memory_order_seq_cst ) volatile;
223 void fence( memory_order ) const volatile;
224 char fetch_add( char,
225 memory_order = memory_order_seq_cst ) volatile;
226 char fetch_sub( char,
227 memory_order = memory_order_seq_cst ) volatile;
228 char fetch_and( char,
229 memory_order = memory_order_seq_cst ) volatile;
231 memory_order = memory_order_seq_cst ) volatile;
232 char fetch_xor( char,
233 memory_order = memory_order_seq_cst ) volatile;
235 CPP0X( atomic_char() = default; )
236 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
237 CPP0X( atomic_char( const atomic_char& ) = delete; )
238 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
240 char operator =( char __v__ ) volatile
241 { store( __v__ ); return __v__; }
243 char operator ++( int ) volatile
244 { return fetch_add( 1 ); }
246 char operator --( int ) volatile
247 { return fetch_sub( 1 ); }
249 char operator ++() volatile
250 { return fetch_add( 1 ) + 1; }
252 char operator --() volatile
253 { return fetch_sub( 1 ) - 1; }
255 char operator +=( char __v__ ) volatile
256 { return fetch_add( __v__ ) + __v__; }
258 char operator -=( char __v__ ) volatile
259 { return fetch_sub( __v__ ) - __v__; }
261 char operator &=( char __v__ ) volatile
262 { return fetch_and( __v__ ) & __v__; }
264 char operator |=( char __v__ ) volatile
265 { return fetch_or( __v__ ) | __v__; }
267 char operator ^=( char __v__ ) volatile
268 { return fetch_xor( __v__ ) ^ __v__; }
270 friend void atomic_store_explicit( volatile atomic_char*, char,
272 friend char atomic_load_explicit( volatile atomic_char*,
274 friend char atomic_swap_explicit( volatile atomic_char*,
275 char, memory_order );
276 friend bool atomic_compare_swap_explicit( volatile atomic_char*,
277 char*, char, memory_order, memory_order );
278 friend void atomic_fence( const volatile atomic_char*, memory_order );
279 friend char atomic_fetch_add_explicit( volatile atomic_char*,
280 char, memory_order );
281 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
282 char, memory_order );
283 friend char atomic_fetch_and_explicit( volatile atomic_char*,
284 char, memory_order );
285 friend char atomic_fetch_or_explicit( volatile atomic_char*,
286 char, memory_order );
287 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
288 char, memory_order );
296 typedef struct atomic_schar
299 bool is_lock_free() const volatile;
300 void store( signed char,
301 memory_order = memory_order_seq_cst ) volatile;
302 signed char load( memory_order = memory_order_seq_cst ) volatile;
303 signed char swap( signed char,
304 memory_order = memory_order_seq_cst ) volatile;
305 bool compare_swap( signed char&, signed char,
306 memory_order, memory_order ) volatile;
307 bool compare_swap( signed char&, signed char,
308 memory_order = memory_order_seq_cst ) volatile;
309 void fence( memory_order ) const volatile;
310 signed char fetch_add( signed char,
311 memory_order = memory_order_seq_cst ) volatile;
312 signed char fetch_sub( signed char,
313 memory_order = memory_order_seq_cst ) volatile;
314 signed char fetch_and( signed char,
315 memory_order = memory_order_seq_cst ) volatile;
316 signed char fetch_or( signed char,
317 memory_order = memory_order_seq_cst ) volatile;
318 signed char fetch_xor( signed char,
319 memory_order = memory_order_seq_cst ) volatile;
321 CPP0X( atomic_schar() = default; )
322 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
323 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
324 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
326 signed char operator =( signed char __v__ ) volatile
327 { store( __v__ ); return __v__; }
329 signed char operator ++( int ) volatile
330 { return fetch_add( 1 ); }
332 signed char operator --( int ) volatile
333 { return fetch_sub( 1 ); }
335 signed char operator ++() volatile
336 { return fetch_add( 1 ) + 1; }
338 signed char operator --() volatile
339 { return fetch_sub( 1 ) - 1; }
341 signed char operator +=( signed char __v__ ) volatile
342 { return fetch_add( __v__ ) + __v__; }
344 signed char operator -=( signed char __v__ ) volatile
345 { return fetch_sub( __v__ ) - __v__; }
347 signed char operator &=( signed char __v__ ) volatile
348 { return fetch_and( __v__ ) & __v__; }
350 signed char operator |=( signed char __v__ ) volatile
351 { return fetch_or( __v__ ) | __v__; }
353 signed char operator ^=( signed char __v__ ) volatile
354 { return fetch_xor( __v__ ) ^ __v__; }
356 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
358 friend signed char atomic_load_explicit( volatile atomic_schar*,
360 friend signed char atomic_swap_explicit( volatile atomic_schar*,
361 signed char, memory_order );
362 friend bool atomic_compare_swap_explicit( volatile atomic_schar*,
363 signed char*, signed char, memory_order, memory_order );
364 friend void atomic_fence( const volatile atomic_schar*, memory_order );
365 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
366 signed char, memory_order );
367 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
368 signed char, memory_order );
369 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
370 signed char, memory_order );
371 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
372 signed char, memory_order );
373 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
374 signed char, memory_order );
382 typedef struct atomic_uchar
385 bool is_lock_free() const volatile;
386 void store( unsigned char,
387 memory_order = memory_order_seq_cst ) volatile;
388 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
389 unsigned char swap( unsigned char,
390 memory_order = memory_order_seq_cst ) volatile;
391 bool compare_swap( unsigned char&, unsigned char,
392 memory_order, memory_order ) volatile;
393 bool compare_swap( unsigned char&, unsigned char,
394 memory_order = memory_order_seq_cst ) volatile;
395 void fence( memory_order ) const volatile;
396 unsigned char fetch_add( unsigned char,
397 memory_order = memory_order_seq_cst ) volatile;
398 unsigned char fetch_sub( unsigned char,
399 memory_order = memory_order_seq_cst ) volatile;
400 unsigned char fetch_and( unsigned char,
401 memory_order = memory_order_seq_cst ) volatile;
402 unsigned char fetch_or( unsigned char,
403 memory_order = memory_order_seq_cst ) volatile;
404 unsigned char fetch_xor( unsigned char,
405 memory_order = memory_order_seq_cst ) volatile;
407 CPP0X( atomic_uchar() = default; )
408 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
409 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
410 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
412 unsigned char operator =( unsigned char __v__ ) volatile
413 { store( __v__ ); return __v__; }
415 unsigned char operator ++( int ) volatile
416 { return fetch_add( 1 ); }
418 unsigned char operator --( int ) volatile
419 { return fetch_sub( 1 ); }
421 unsigned char operator ++() volatile
422 { return fetch_add( 1 ) + 1; }
424 unsigned char operator --() volatile
425 { return fetch_sub( 1 ) - 1; }
427 unsigned char operator +=( unsigned char __v__ ) volatile
428 { return fetch_add( __v__ ) + __v__; }
430 unsigned char operator -=( unsigned char __v__ ) volatile
431 { return fetch_sub( __v__ ) - __v__; }
433 unsigned char operator &=( unsigned char __v__ ) volatile
434 { return fetch_and( __v__ ) & __v__; }
436 unsigned char operator |=( unsigned char __v__ ) volatile
437 { return fetch_or( __v__ ) | __v__; }
439 unsigned char operator ^=( unsigned char __v__ ) volatile
440 { return fetch_xor( __v__ ) ^ __v__; }
442 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
444 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
446 friend unsigned char atomic_swap_explicit( volatile atomic_uchar*,
447 unsigned char, memory_order );
448 friend bool atomic_compare_swap_explicit( volatile atomic_uchar*,
449 unsigned char*, unsigned char, memory_order, memory_order );
450 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
451 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
452 unsigned char, memory_order );
453 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
454 unsigned char, memory_order );
455 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
456 unsigned char, memory_order );
457 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
458 unsigned char, memory_order );
459 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
460 unsigned char, memory_order );
468 typedef struct atomic_short
471 bool is_lock_free() const volatile;
473 memory_order = memory_order_seq_cst ) volatile;
474 short load( memory_order = memory_order_seq_cst ) volatile;
476 memory_order = memory_order_seq_cst ) volatile;
477 bool compare_swap( short&, short,
478 memory_order, memory_order ) volatile;
479 bool compare_swap( short&, short,
480 memory_order = memory_order_seq_cst ) volatile;
481 void fence( memory_order ) const volatile;
482 short fetch_add( short,
483 memory_order = memory_order_seq_cst ) volatile;
484 short fetch_sub( short,
485 memory_order = memory_order_seq_cst ) volatile;
486 short fetch_and( short,
487 memory_order = memory_order_seq_cst ) volatile;
488 short fetch_or( short,
489 memory_order = memory_order_seq_cst ) volatile;
490 short fetch_xor( short,
491 memory_order = memory_order_seq_cst ) volatile;
493 CPP0X( atomic_short() = default; )
494 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
495 CPP0X( atomic_short( const atomic_short& ) = delete; )
496 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
498 short operator =( short __v__ ) volatile
499 { store( __v__ ); return __v__; }
501 short operator ++( int ) volatile
502 { return fetch_add( 1 ); }
504 short operator --( int ) volatile
505 { return fetch_sub( 1 ); }
507 short operator ++() volatile
508 { return fetch_add( 1 ) + 1; }
510 short operator --() volatile
511 { return fetch_sub( 1 ) - 1; }
513 short operator +=( short __v__ ) volatile
514 { return fetch_add( __v__ ) + __v__; }
516 short operator -=( short __v__ ) volatile
517 { return fetch_sub( __v__ ) - __v__; }
519 short operator &=( short __v__ ) volatile
520 { return fetch_and( __v__ ) & __v__; }
522 short operator |=( short __v__ ) volatile
523 { return fetch_or( __v__ ) | __v__; }
525 short operator ^=( short __v__ ) volatile
526 { return fetch_xor( __v__ ) ^ __v__; }
528 friend void atomic_store_explicit( volatile atomic_short*, short,
530 friend short atomic_load_explicit( volatile atomic_short*,
532 friend short atomic_swap_explicit( volatile atomic_short*,
533 short, memory_order );
534 friend bool atomic_compare_swap_explicit( volatile atomic_short*,
535 short*, short, memory_order, memory_order );
536 friend void atomic_fence( const volatile atomic_short*, memory_order );
537 friend short atomic_fetch_add_explicit( volatile atomic_short*,
538 short, memory_order );
539 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
540 short, memory_order );
541 friend short atomic_fetch_and_explicit( volatile atomic_short*,
542 short, memory_order );
543 friend short atomic_fetch_or_explicit( volatile atomic_short*,
544 short, memory_order );
545 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
546 short, memory_order );
554 typedef struct atomic_ushort
557 bool is_lock_free() const volatile;
558 void store( unsigned short,
559 memory_order = memory_order_seq_cst ) volatile;
560 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
561 unsigned short swap( unsigned short,
562 memory_order = memory_order_seq_cst ) volatile;
563 bool compare_swap( unsigned short&, unsigned short,
564 memory_order, memory_order ) volatile;
565 bool compare_swap( unsigned short&, unsigned short,
566 memory_order = memory_order_seq_cst ) volatile;
567 void fence( memory_order ) const volatile;
568 unsigned short fetch_add( unsigned short,
569 memory_order = memory_order_seq_cst ) volatile;
570 unsigned short fetch_sub( unsigned short,
571 memory_order = memory_order_seq_cst ) volatile;
572 unsigned short fetch_and( unsigned short,
573 memory_order = memory_order_seq_cst ) volatile;
574 unsigned short fetch_or( unsigned short,
575 memory_order = memory_order_seq_cst ) volatile;
576 unsigned short fetch_xor( unsigned short,
577 memory_order = memory_order_seq_cst ) volatile;
579 CPP0X( atomic_ushort() = default; )
580 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
581 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
582 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
584 unsigned short operator =( unsigned short __v__ ) volatile
585 { store( __v__ ); return __v__; }
587 unsigned short operator ++( int ) volatile
588 { return fetch_add( 1 ); }
590 unsigned short operator --( int ) volatile
591 { return fetch_sub( 1 ); }
593 unsigned short operator ++() volatile
594 { return fetch_add( 1 ) + 1; }
596 unsigned short operator --() volatile
597 { return fetch_sub( 1 ) - 1; }
599 unsigned short operator +=( unsigned short __v__ ) volatile
600 { return fetch_add( __v__ ) + __v__; }
602 unsigned short operator -=( unsigned short __v__ ) volatile
603 { return fetch_sub( __v__ ) - __v__; }
605 unsigned short operator &=( unsigned short __v__ ) volatile
606 { return fetch_and( __v__ ) & __v__; }
608 unsigned short operator |=( unsigned short __v__ ) volatile
609 { return fetch_or( __v__ ) | __v__; }
611 unsigned short operator ^=( unsigned short __v__ ) volatile
612 { return fetch_xor( __v__ ) ^ __v__; }
614 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
616 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
618 friend unsigned short atomic_swap_explicit( volatile atomic_ushort*,
619 unsigned short, memory_order );
620 friend bool atomic_compare_swap_explicit( volatile atomic_ushort*,
621 unsigned short*, unsigned short, memory_order, memory_order );
622 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
623 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
624 unsigned short, memory_order );
625 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
626 unsigned short, memory_order );
627 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
628 unsigned short, memory_order );
629 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
630 unsigned short, memory_order );
631 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
632 unsigned short, memory_order );
636 unsigned short __f__;
640 typedef struct atomic_int
643 bool is_lock_free() const volatile;
645 memory_order = memory_order_seq_cst ) volatile;
646 int load( memory_order = memory_order_seq_cst ) volatile;
648 memory_order = memory_order_seq_cst ) volatile;
649 bool compare_swap( int&, int,
650 memory_order, memory_order ) volatile;
651 bool compare_swap( int&, int,
652 memory_order = memory_order_seq_cst ) volatile;
653 void fence( memory_order ) const volatile;
655 memory_order = memory_order_seq_cst ) volatile;
657 memory_order = memory_order_seq_cst ) volatile;
659 memory_order = memory_order_seq_cst ) volatile;
661 memory_order = memory_order_seq_cst ) volatile;
663 memory_order = memory_order_seq_cst ) volatile;
665 CPP0X( atomic_int() = default; )
666 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
667 CPP0X( atomic_int( const atomic_int& ) = delete; )
668 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
670 int operator =( int __v__ ) volatile
671 { store( __v__ ); return __v__; }
673 int operator ++( int ) volatile
674 { return fetch_add( 1 ); }
676 int operator --( int ) volatile
677 { return fetch_sub( 1 ); }
679 int operator ++() volatile
680 { return fetch_add( 1 ) + 1; }
682 int operator --() volatile
683 { return fetch_sub( 1 ) - 1; }
685 int operator +=( int __v__ ) volatile
686 { return fetch_add( __v__ ) + __v__; }
688 int operator -=( int __v__ ) volatile
689 { return fetch_sub( __v__ ) - __v__; }
691 int operator &=( int __v__ ) volatile
692 { return fetch_and( __v__ ) & __v__; }
694 int operator |=( int __v__ ) volatile
695 { return fetch_or( __v__ ) | __v__; }
697 int operator ^=( int __v__ ) volatile
698 { return fetch_xor( __v__ ) ^ __v__; }
700 friend void atomic_store_explicit( volatile atomic_int*, int,
702 friend int atomic_load_explicit( volatile atomic_int*,
704 friend int atomic_swap_explicit( volatile atomic_int*,
706 friend bool atomic_compare_swap_explicit( volatile atomic_int*,
707 int*, int, memory_order, memory_order );
708 friend void atomic_fence( const volatile atomic_int*, memory_order );
709 friend int atomic_fetch_add_explicit( volatile atomic_int*,
711 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
713 friend int atomic_fetch_and_explicit( volatile atomic_int*,
715 friend int atomic_fetch_or_explicit( volatile atomic_int*,
717 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
726 typedef struct atomic_uint
729 bool is_lock_free() const volatile;
730 void store( unsigned int,
731 memory_order = memory_order_seq_cst ) volatile;
732 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
733 unsigned int swap( unsigned int,
734 memory_order = memory_order_seq_cst ) volatile;
735 bool compare_swap( unsigned int&, unsigned int,
736 memory_order, memory_order ) volatile;
737 bool compare_swap( unsigned int&, unsigned int,
738 memory_order = memory_order_seq_cst ) volatile;
739 void fence( memory_order ) const volatile;
740 unsigned int fetch_add( unsigned int,
741 memory_order = memory_order_seq_cst ) volatile;
742 unsigned int fetch_sub( unsigned int,
743 memory_order = memory_order_seq_cst ) volatile;
744 unsigned int fetch_and( unsigned int,
745 memory_order = memory_order_seq_cst ) volatile;
746 unsigned int fetch_or( unsigned int,
747 memory_order = memory_order_seq_cst ) volatile;
748 unsigned int fetch_xor( unsigned int,
749 memory_order = memory_order_seq_cst ) volatile;
751 CPP0X( atomic_uint() = default; )
752 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
753 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
754 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
756 unsigned int operator =( unsigned int __v__ ) volatile
757 { store( __v__ ); return __v__; }
759 unsigned int operator ++( int ) volatile
760 { return fetch_add( 1 ); }
762 unsigned int operator --( int ) volatile
763 { return fetch_sub( 1 ); }
765 unsigned int operator ++() volatile
766 { return fetch_add( 1 ) + 1; }
768 unsigned int operator --() volatile
769 { return fetch_sub( 1 ) - 1; }
771 unsigned int operator +=( unsigned int __v__ ) volatile
772 { return fetch_add( __v__ ) + __v__; }
774 unsigned int operator -=( unsigned int __v__ ) volatile
775 { return fetch_sub( __v__ ) - __v__; }
777 unsigned int operator &=( unsigned int __v__ ) volatile
778 { return fetch_and( __v__ ) & __v__; }
780 unsigned int operator |=( unsigned int __v__ ) volatile
781 { return fetch_or( __v__ ) | __v__; }
783 unsigned int operator ^=( unsigned int __v__ ) volatile
784 { return fetch_xor( __v__ ) ^ __v__; }
786 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
788 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
790 friend unsigned int atomic_swap_explicit( volatile atomic_uint*,
791 unsigned int, memory_order );
792 friend bool atomic_compare_swap_explicit( volatile atomic_uint*,
793 unsigned int*, unsigned int, memory_order, memory_order );
794 friend void atomic_fence( const volatile atomic_uint*, memory_order );
795 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
796 unsigned int, memory_order );
797 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
798 unsigned int, memory_order );
799 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
800 unsigned int, memory_order );
801 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
802 unsigned int, memory_order );
803 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
804 unsigned int, memory_order );
812 typedef struct atomic_long
815 bool is_lock_free() const volatile;
817 memory_order = memory_order_seq_cst ) volatile;
818 long load( memory_order = memory_order_seq_cst ) volatile;
820 memory_order = memory_order_seq_cst ) volatile;
821 bool compare_swap( long&, long,
822 memory_order, memory_order ) volatile;
823 bool compare_swap( long&, long,
824 memory_order = memory_order_seq_cst ) volatile;
825 void fence( memory_order ) const volatile;
826 long fetch_add( long,
827 memory_order = memory_order_seq_cst ) volatile;
828 long fetch_sub( long,
829 memory_order = memory_order_seq_cst ) volatile;
830 long fetch_and( long,
831 memory_order = memory_order_seq_cst ) volatile;
833 memory_order = memory_order_seq_cst ) volatile;
834 long fetch_xor( long,
835 memory_order = memory_order_seq_cst ) volatile;
837 CPP0X( atomic_long() = default; )
838 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
839 CPP0X( atomic_long( const atomic_long& ) = delete; )
840 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
842 long operator =( long __v__ ) volatile
843 { store( __v__ ); return __v__; }
845 long operator ++( int ) volatile
846 { return fetch_add( 1 ); }
848 long operator --( int ) volatile
849 { return fetch_sub( 1 ); }
851 long operator ++() volatile
852 { return fetch_add( 1 ) + 1; }
854 long operator --() volatile
855 { return fetch_sub( 1 ) - 1; }
857 long operator +=( long __v__ ) volatile
858 { return fetch_add( __v__ ) + __v__; }
860 long operator -=( long __v__ ) volatile
861 { return fetch_sub( __v__ ) - __v__; }
863 long operator &=( long __v__ ) volatile
864 { return fetch_and( __v__ ) & __v__; }
866 long operator |=( long __v__ ) volatile
867 { return fetch_or( __v__ ) | __v__; }
869 long operator ^=( long __v__ ) volatile
870 { return fetch_xor( __v__ ) ^ __v__; }
872 friend void atomic_store_explicit( volatile atomic_long*, long,
874 friend long atomic_load_explicit( volatile atomic_long*,
876 friend long atomic_swap_explicit( volatile atomic_long*,
877 long, memory_order );
878 friend bool atomic_compare_swap_explicit( volatile atomic_long*,
879 long*, long, memory_order, memory_order );
880 friend void atomic_fence( const volatile atomic_long*, memory_order );
881 friend long atomic_fetch_add_explicit( volatile atomic_long*,
882 long, memory_order );
883 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
884 long, memory_order );
885 friend long atomic_fetch_and_explicit( volatile atomic_long*,
886 long, memory_order );
887 friend long atomic_fetch_or_explicit( volatile atomic_long*,
888 long, memory_order );
889 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
890 long, memory_order );
898 typedef struct atomic_ulong
901 bool is_lock_free() const volatile;
902 void store( unsigned long,
903 memory_order = memory_order_seq_cst ) volatile;
904 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
905 unsigned long swap( unsigned long,
906 memory_order = memory_order_seq_cst ) volatile;
907 bool compare_swap( unsigned long&, unsigned long,
908 memory_order, memory_order ) volatile;
909 bool compare_swap( unsigned long&, unsigned long,
910 memory_order = memory_order_seq_cst ) volatile;
911 void fence( memory_order ) const volatile;
912 unsigned long fetch_add( unsigned long,
913 memory_order = memory_order_seq_cst ) volatile;
914 unsigned long fetch_sub( unsigned long,
915 memory_order = memory_order_seq_cst ) volatile;
916 unsigned long fetch_and( unsigned long,
917 memory_order = memory_order_seq_cst ) volatile;
918 unsigned long fetch_or( unsigned long,
919 memory_order = memory_order_seq_cst ) volatile;
920 unsigned long fetch_xor( unsigned long,
921 memory_order = memory_order_seq_cst ) volatile;
923 CPP0X( atomic_ulong() = default; )
924 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
925 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
926 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
928 unsigned long operator =( unsigned long __v__ ) volatile
929 { store( __v__ ); return __v__; }
931 unsigned long operator ++( int ) volatile
932 { return fetch_add( 1 ); }
934 unsigned long operator --( int ) volatile
935 { return fetch_sub( 1 ); }
937 unsigned long operator ++() volatile
938 { return fetch_add( 1 ) + 1; }
940 unsigned long operator --() volatile
941 { return fetch_sub( 1 ) - 1; }
943 unsigned long operator +=( unsigned long __v__ ) volatile
944 { return fetch_add( __v__ ) + __v__; }
946 unsigned long operator -=( unsigned long __v__ ) volatile
947 { return fetch_sub( __v__ ) - __v__; }
949 unsigned long operator &=( unsigned long __v__ ) volatile
950 { return fetch_and( __v__ ) & __v__; }
952 unsigned long operator |=( unsigned long __v__ ) volatile
953 { return fetch_or( __v__ ) | __v__; }
955 unsigned long operator ^=( unsigned long __v__ ) volatile
956 { return fetch_xor( __v__ ) ^ __v__; }
958 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
960 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
962 friend unsigned long atomic_swap_explicit( volatile atomic_ulong*,
963 unsigned long, memory_order );
964 friend bool atomic_compare_swap_explicit( volatile atomic_ulong*,
965 unsigned long*, unsigned long, memory_order, memory_order );
966 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
967 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
968 unsigned long, memory_order );
969 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
970 unsigned long, memory_order );
971 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
972 unsigned long, memory_order );
973 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
974 unsigned long, memory_order );
975 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
976 unsigned long, memory_order );
984 typedef struct atomic_llong
987 bool is_lock_free() const volatile;
988 void store( long long,
989 memory_order = memory_order_seq_cst ) volatile;
990 long long load( memory_order = memory_order_seq_cst ) volatile;
991 long long swap( long long,
992 memory_order = memory_order_seq_cst ) volatile;
993 bool compare_swap( long long&, long long,
994 memory_order, memory_order ) volatile;
995 bool compare_swap( long long&, long long,
996 memory_order = memory_order_seq_cst ) volatile;
997 void fence( memory_order ) const volatile;
998 long long fetch_add( long long,
999 memory_order = memory_order_seq_cst ) volatile;
1000 long long fetch_sub( long long,
1001 memory_order = memory_order_seq_cst ) volatile;
1002 long long fetch_and( long long,
1003 memory_order = memory_order_seq_cst ) volatile;
1004 long long fetch_or( long long,
1005 memory_order = memory_order_seq_cst ) volatile;
1006 long long fetch_xor( long long,
1007 memory_order = memory_order_seq_cst ) volatile;
1009 CPP0X( atomic_llong() = default; )
1010 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1011 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1012 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1014 long long operator =( long long __v__ ) volatile
1015 { store( __v__ ); return __v__; }
1017 long long operator ++( int ) volatile
1018 { return fetch_add( 1 ); }
1020 long long operator --( int ) volatile
1021 { return fetch_sub( 1 ); }
1023 long long operator ++() volatile
1024 { return fetch_add( 1 ) + 1; }
1026 long long operator --() volatile
1027 { return fetch_sub( 1 ) - 1; }
1029 long long operator +=( long long __v__ ) volatile
1030 { return fetch_add( __v__ ) + __v__; }
1032 long long operator -=( long long __v__ ) volatile
1033 { return fetch_sub( __v__ ) - __v__; }
1035 long long operator &=( long long __v__ ) volatile
1036 { return fetch_and( __v__ ) & __v__; }
1038 long long operator |=( long long __v__ ) volatile
1039 { return fetch_or( __v__ ) | __v__; }
1041 long long operator ^=( long long __v__ ) volatile
1042 { return fetch_xor( __v__ ) ^ __v__; }
1044 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1046 friend long long atomic_load_explicit( volatile atomic_llong*,
1048 friend long long atomic_swap_explicit( volatile atomic_llong*,
1049 long long, memory_order );
1050 friend bool atomic_compare_swap_explicit( volatile atomic_llong*,
1051 long long*, long long, memory_order, memory_order );
1052 friend void atomic_fence( const volatile atomic_llong*, memory_order );
1053 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1054 long long, memory_order );
1055 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1056 long long, memory_order );
1057 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1058 long long, memory_order );
1059 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1060 long long, memory_order );
1061 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1062 long long, memory_order );
1070 typedef struct atomic_ullong
1073 bool is_lock_free() const volatile;
1074 void store( unsigned long long,
1075 memory_order = memory_order_seq_cst ) volatile;
1076 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1077 unsigned long long swap( unsigned long long,
1078 memory_order = memory_order_seq_cst ) volatile;
1079 bool compare_swap( unsigned long long&, unsigned long long,
1080 memory_order, memory_order ) volatile;
1081 bool compare_swap( unsigned long long&, unsigned long long,
1082 memory_order = memory_order_seq_cst ) volatile;
1083 void fence( memory_order ) const volatile;
1084 unsigned long long fetch_add( unsigned long long,
1085 memory_order = memory_order_seq_cst ) volatile;
1086 unsigned long long fetch_sub( unsigned long long,
1087 memory_order = memory_order_seq_cst ) volatile;
1088 unsigned long long fetch_and( unsigned long long,
1089 memory_order = memory_order_seq_cst ) volatile;
1090 unsigned long long fetch_or( unsigned long long,
1091 memory_order = memory_order_seq_cst ) volatile;
1092 unsigned long long fetch_xor( unsigned long long,
1093 memory_order = memory_order_seq_cst ) volatile;
1095 CPP0X( atomic_ullong() = default; )
1096 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1097 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1098 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1100 unsigned long long operator =( unsigned long long __v__ ) volatile
1101 { store( __v__ ); return __v__; }
1103 unsigned long long operator ++( int ) volatile
1104 { return fetch_add( 1 ); }
1106 unsigned long long operator --( int ) volatile
1107 { return fetch_sub( 1 ); }
1109 unsigned long long operator ++() volatile
1110 { return fetch_add( 1 ) + 1; }
1112 unsigned long long operator --() volatile
1113 { return fetch_sub( 1 ) - 1; }
1115 unsigned long long operator +=( unsigned long long __v__ ) volatile
1116 { return fetch_add( __v__ ) + __v__; }
1118 unsigned long long operator -=( unsigned long long __v__ ) volatile
1119 { return fetch_sub( __v__ ) - __v__; }
1121 unsigned long long operator &=( unsigned long long __v__ ) volatile
1122 { return fetch_and( __v__ ) & __v__; }
1124 unsigned long long operator |=( unsigned long long __v__ ) volatile
1125 { return fetch_or( __v__ ) | __v__; }
1127 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1128 { return fetch_xor( __v__ ) ^ __v__; }
1130 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1132 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1134 friend unsigned long long atomic_swap_explicit( volatile atomic_ullong*,
1135 unsigned long long, memory_order );
1136 friend bool atomic_compare_swap_explicit( volatile atomic_ullong*,
1137 unsigned long long*, unsigned long long, memory_order, memory_order );
1138 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
1139 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1140 unsigned long long, memory_order );
1141 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1142 unsigned long long, memory_order );
1143 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1144 unsigned long long, memory_order );
1145 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1146 unsigned long long, memory_order );
1147 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1148 unsigned long long, memory_order );
1152 unsigned long long __f__;
1156 typedef atomic_schar atomic_int_least8_t;
1157 typedef atomic_uchar atomic_uint_least8_t;
1158 typedef atomic_short atomic_int_least16_t;
1159 typedef atomic_ushort atomic_uint_least16_t;
1160 typedef atomic_int atomic_int_least32_t;
1161 typedef atomic_uint atomic_uint_least32_t;
1162 typedef atomic_llong atomic_int_least64_t;
1163 typedef atomic_ullong atomic_uint_least64_t;
1165 typedef atomic_schar atomic_int_fast8_t;
1166 typedef atomic_uchar atomic_uint_fast8_t;
1167 typedef atomic_short atomic_int_fast16_t;
1168 typedef atomic_ushort atomic_uint_fast16_t;
1169 typedef atomic_int atomic_int_fast32_t;
1170 typedef atomic_uint atomic_uint_fast32_t;
1171 typedef atomic_llong atomic_int_fast64_t;
1172 typedef atomic_ullong atomic_uint_fast64_t;
1174 typedef atomic_long atomic_intptr_t;
1175 typedef atomic_ulong atomic_uintptr_t;
1177 typedef atomic_long atomic_ssize_t;
1178 typedef atomic_ulong atomic_size_t;
1180 typedef atomic_long atomic_ptrdiff_t;
1182 typedef atomic_llong atomic_intmax_t;
1183 typedef atomic_ullong atomic_uintmax_t;
1189 typedef struct atomic_wchar_t
1192 bool is_lock_free() const volatile;
1193 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1194 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1195 wchar_t swap( wchar_t,
1196 memory_order = memory_order_seq_cst ) volatile;
1197 bool compare_swap( wchar_t&, wchar_t,
1198 memory_order, memory_order ) volatile;
1199 bool compare_swap( wchar_t&, wchar_t,
1200 memory_order = memory_order_seq_cst ) volatile;
1201 void fence( memory_order ) const volatile;
1202 wchar_t fetch_add( wchar_t,
1203 memory_order = memory_order_seq_cst ) volatile;
1204 wchar_t fetch_sub( wchar_t,
1205 memory_order = memory_order_seq_cst ) volatile;
1206 wchar_t fetch_and( wchar_t,
1207 memory_order = memory_order_seq_cst ) volatile;
1208 wchar_t fetch_or( wchar_t,
1209 memory_order = memory_order_seq_cst ) volatile;
1210 wchar_t fetch_xor( wchar_t,
1211 memory_order = memory_order_seq_cst ) volatile;
1213 CPP0X( atomic_wchar_t() = default; )
1214 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1215 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1216 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1218 wchar_t operator =( wchar_t __v__ ) volatile
1219 { store( __v__ ); return __v__; }
1221 wchar_t operator ++( int ) volatile
1222 { return fetch_add( 1 ); }
1224 wchar_t operator --( int ) volatile
1225 { return fetch_sub( 1 ); }
1227 wchar_t operator ++() volatile
1228 { return fetch_add( 1 ) + 1; }
1230 wchar_t operator --() volatile
1231 { return fetch_sub( 1 ) - 1; }
1233 wchar_t operator +=( wchar_t __v__ ) volatile
1234 { return fetch_add( __v__ ) + __v__; }
1236 wchar_t operator -=( wchar_t __v__ ) volatile
1237 { return fetch_sub( __v__ ) - __v__; }
1239 wchar_t operator &=( wchar_t __v__ ) volatile
1240 { return fetch_and( __v__ ) & __v__; }
1242 wchar_t operator |=( wchar_t __v__ ) volatile
1243 { return fetch_or( __v__ ) | __v__; }
1245 wchar_t operator ^=( wchar_t __v__ ) volatile
1246 { return fetch_xor( __v__ ) ^ __v__; }
1248 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1250 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1252 friend wchar_t atomic_swap_explicit( volatile atomic_wchar_t*,
1253 wchar_t, memory_order );
1254 friend bool atomic_compare_swap_explicit( volatile atomic_wchar_t*,
1255 wchar_t*, wchar_t, memory_order, memory_order );
1256 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
1257 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1258 wchar_t, memory_order );
1259 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1260 wchar_t, memory_order );
1261 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1262 wchar_t, memory_order );
1263 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1264 wchar_t, memory_order );
1265 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1266 wchar_t, memory_order );
1276 typedef atomic_int_least16_t atomic_char16_t;
1277 typedef atomic_int_least32_t atomic_char32_t;
1278 typedef atomic_int_least32_t atomic_wchar_t;
1285 template< typename T >
1290 bool is_lock_free() const volatile;
1291 void store( T, memory_order = memory_order_seq_cst ) volatile;
1292 T load( memory_order = memory_order_seq_cst ) volatile;
1293 T swap( T __v__, memory_order = memory_order_seq_cst ) volatile;
1294 bool compare_swap( T&, T, memory_order, memory_order ) volatile;
1295 bool compare_swap( T&, T, memory_order = memory_order_seq_cst ) volatile;
1296 void fence( memory_order ) const volatile;
1298 CPP0X( atomic() = default; )
1299 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1300 CPP0X( atomic( const atomic& ) = delete; )
1301 atomic& operator =( const atomic& ) CPP0X(=delete);
1303 T operator =( T __v__ ) volatile
1304 { store( __v__ ); return __v__; }
1315 template<typename T> struct atomic< T* > : atomic_address
1317 T* load( memory_order = memory_order_seq_cst ) volatile;
1318 T* swap( T*, memory_order = memory_order_seq_cst ) volatile;
1319 bool compare_swap( T*&, T*, memory_order, memory_order ) volatile;
1320 bool compare_swap( T*&, T*,
1321 memory_order = memory_order_seq_cst ) volatile;
1322 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1323 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1325 CPP0X( atomic() = default; )
1326 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1327 CPP0X( atomic( const atomic& ) = delete; )
1328 atomic& operator =( const atomic& ) CPP0X(=delete);
1330 T* operator =( T* __v__ ) volatile
1331 { store( __v__ ); return __v__; }
1333 T* operator ++( int ) volatile
1334 { return fetch_add( 1 ); }
1336 T* operator --( int ) volatile
1337 { return fetch_sub( 1 ); }
1339 T* operator ++() volatile
1340 { return fetch_add( 1 ) + 1; }
1342 T* operator --() volatile
1343 { return fetch_sub( 1 ) - 1; }
1345 T* operator +=( T* __v__ ) volatile
1346 { return fetch_add( __v__ ) + __v__; }
1348 T* operator -=( T* __v__ ) volatile
1349 { return fetch_sub( __v__ ) - __v__; }
1357 template<> struct atomic< bool > : atomic_bool
1359 CPP0X( atomic() = default; )
1360 CPP0X( constexpr explicit atomic( bool __v__ )
1361 : atomic_bool( __v__ ) { } )
1362 CPP0X( atomic( const atomic& ) = delete; )
1363 atomic& operator =( const atomic& ) CPP0X(=delete);
1365 bool operator =( bool __v__ ) volatile
1366 { store( __v__ ); return __v__; }
1370 template<> struct atomic< void* > : atomic_address
1372 CPP0X( atomic() = default; )
1373 CPP0X( constexpr explicit atomic( void* __v__ )
1374 : atomic_address( __v__ ) { } )
1375 CPP0X( atomic( const atomic& ) = delete; )
1376 atomic& operator =( const atomic& ) CPP0X(=delete);
1378 void* operator =( void* __v__ ) volatile
1379 { store( __v__ ); return __v__; }
1383 template<> struct atomic< char > : atomic_char
1385 CPP0X( atomic() = default; )
1386 CPP0X( constexpr explicit atomic( char __v__ )
1387 : atomic_char( __v__ ) { } )
1388 CPP0X( atomic( const atomic& ) = delete; )
1389 atomic& operator =( const atomic& ) CPP0X(=delete);
1391 char operator =( char __v__ ) volatile
1392 { store( __v__ ); return __v__; }
1396 template<> struct atomic< signed char > : atomic_schar
1398 CPP0X( atomic() = default; )
1399 CPP0X( constexpr explicit atomic( signed char __v__ )
1400 : atomic_schar( __v__ ) { } )
1401 CPP0X( atomic( const atomic& ) = delete; )
1402 atomic& operator =( const atomic& ) CPP0X(=delete);
1404 signed char operator =( signed char __v__ ) volatile
1405 { store( __v__ ); return __v__; }
1409 template<> struct atomic< unsigned char > : atomic_uchar
1411 CPP0X( atomic() = default; )
1412 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1413 : atomic_uchar( __v__ ) { } )
1414 CPP0X( atomic( const atomic& ) = delete; )
1415 atomic& operator =( const atomic& ) CPP0X(=delete);
1417 unsigned char operator =( unsigned char __v__ ) volatile
1418 { store( __v__ ); return __v__; }
1422 template<> struct atomic< short > : atomic_short
1424 CPP0X( atomic() = default; )
1425 CPP0X( constexpr explicit atomic( short __v__ )
1426 : atomic_short( __v__ ) { } )
1427 CPP0X( atomic( const atomic& ) = delete; )
1428 atomic& operator =( const atomic& ) CPP0X(=delete);
1430 short operator =( short __v__ ) volatile
1431 { store( __v__ ); return __v__; }
1435 template<> struct atomic< unsigned short > : atomic_ushort
1437 CPP0X( atomic() = default; )
1438 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1439 : atomic_ushort( __v__ ) { } )
1440 CPP0X( atomic( const atomic& ) = delete; )
1441 atomic& operator =( const atomic& ) CPP0X(=delete);
1443 unsigned short operator =( unsigned short __v__ ) volatile
1444 { store( __v__ ); return __v__; }
1448 template<> struct atomic< int > : atomic_int
1450 CPP0X( atomic() = default; )
1451 CPP0X( constexpr explicit atomic( int __v__ )
1452 : atomic_int( __v__ ) { } )
1453 CPP0X( atomic( const atomic& ) = delete; )
1454 atomic& operator =( const atomic& ) CPP0X(=delete);
1456 int operator =( int __v__ ) volatile
1457 { store( __v__ ); return __v__; }
1461 template<> struct atomic< unsigned int > : atomic_uint
1463 CPP0X( atomic() = default; )
1464 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1465 : atomic_uint( __v__ ) { } )
1466 CPP0X( atomic( const atomic& ) = delete; )
1467 atomic& operator =( const atomic& ) CPP0X(=delete);
1469 unsigned int operator =( unsigned int __v__ ) volatile
1470 { store( __v__ ); return __v__; }
1474 template<> struct atomic< long > : atomic_long
1476 CPP0X( atomic() = default; )
1477 CPP0X( constexpr explicit atomic( long __v__ )
1478 : atomic_long( __v__ ) { } )
1479 CPP0X( atomic( const atomic& ) = delete; )
1480 atomic& operator =( const atomic& ) CPP0X(=delete);
1482 long operator =( long __v__ ) volatile
1483 { store( __v__ ); return __v__; }
1487 template<> struct atomic< unsigned long > : atomic_ulong
1489 CPP0X( atomic() = default; )
1490 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1491 : atomic_ulong( __v__ ) { } )
1492 CPP0X( atomic( const atomic& ) = delete; )
1493 atomic& operator =( const atomic& ) CPP0X(=delete);
1495 unsigned long operator =( unsigned long __v__ ) volatile
1496 { store( __v__ ); return __v__; }
1500 template<> struct atomic< long long > : atomic_llong
1502 CPP0X( atomic() = default; )
1503 CPP0X( constexpr explicit atomic( long long __v__ )
1504 : atomic_llong( __v__ ) { } )
1505 CPP0X( atomic( const atomic& ) = delete; )
1506 atomic& operator =( const atomic& ) CPP0X(=delete);
1508 long long operator =( long long __v__ ) volatile
1509 { store( __v__ ); return __v__; }
1513 template<> struct atomic< unsigned long long > : atomic_ullong
1515 CPP0X( atomic() = default; )
1516 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1517 : atomic_ullong( __v__ ) { } )
1518 CPP0X( atomic( const atomic& ) = delete; )
1519 atomic& operator =( const atomic& ) CPP0X(=delete);
1521 unsigned long long operator =( unsigned long long __v__ ) volatile
1522 { store( __v__ ); return __v__; }
1526 template<> struct atomic< wchar_t > : atomic_wchar_t
1528 CPP0X( atomic() = default; )
1529 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1530 : atomic_wchar_t( __v__ ) { } )
1531 CPP0X( atomic( const atomic& ) = delete; )
1532 atomic& operator =( const atomic& ) CPP0X(=delete);
1534 wchar_t operator =( wchar_t __v__ ) volatile
1535 { store( __v__ ); return __v__; }
1545 inline bool atomic_is_lock_free
1546 ( const volatile atomic_bool* __a__ )
1549 inline bool atomic_load_explicit
1550 ( volatile atomic_bool* __a__, memory_order __x__ )
1551 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1553 inline bool atomic_load
1554 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1556 inline void atomic_store_explicit
1557 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1558 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1560 inline void atomic_store
1561 ( volatile atomic_bool* __a__, bool __m__ )
1562 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1564 inline bool atomic_swap_explicit
1565 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1566 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1568 inline bool atomic_swap
1569 ( volatile atomic_bool* __a__, bool __m__ )
1570 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1572 inline bool atomic_compare_swap_explicit
1573 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1574 memory_order __x__, memory_order __y__ )
1575 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1577 inline bool atomic_compare_swap
1578 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1579 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1580 memory_order_seq_cst, memory_order_seq_cst ); }
1582 inline void atomic_fence
1583 ( const volatile atomic_bool* __a__, memory_order __x__ )
1584 { _ATOMIC_FENCE_( __a__, __x__ ); }
1587 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1590 inline void* atomic_load_explicit
1591 ( volatile atomic_address* __a__, memory_order __x__ )
1592 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1594 inline void* atomic_load( volatile atomic_address* __a__ )
1595 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1597 inline void atomic_store_explicit
1598 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1599 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1601 inline void atomic_store
1602 ( volatile atomic_address* __a__, void* __m__ )
1603 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1605 inline void* atomic_swap_explicit
1606 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1607 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1609 inline void* atomic_swap
1610 ( volatile atomic_address* __a__, void* __m__ )
1611 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1613 inline bool atomic_compare_swap_explicit
1614 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1615 memory_order __x__, memory_order __y__ )
1616 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1618 inline bool atomic_compare_swap
1619 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1620 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1621 memory_order_seq_cst, memory_order_seq_cst ); }
1623 inline void atomic_fence
1624 ( const volatile atomic_address* __a__, memory_order __x__ )
1625 { _ATOMIC_FENCE_( __a__, __x__ ); }
1628 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1631 inline char atomic_load_explicit
1632 ( volatile atomic_char* __a__, memory_order __x__ )
1633 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1635 inline char atomic_load( volatile atomic_char* __a__ )
1636 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1638 inline void atomic_store_explicit
1639 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1640 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1642 inline void atomic_store
1643 ( volatile atomic_char* __a__, char __m__ )
1644 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1646 inline char atomic_swap_explicit
1647 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1648 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1650 inline char atomic_swap
1651 ( volatile atomic_char* __a__, char __m__ )
1652 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1654 inline bool atomic_compare_swap_explicit
1655 ( volatile atomic_char* __a__, char* __e__, char __m__,
1656 memory_order __x__, memory_order __y__ )
1657 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1659 inline bool atomic_compare_swap
1660 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1661 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1662 memory_order_seq_cst, memory_order_seq_cst ); }
1664 inline void atomic_fence
1665 ( const volatile atomic_char* __a__, memory_order __x__ )
1666 { _ATOMIC_FENCE_( __a__, __x__ ); }
1669 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1672 inline signed char atomic_load_explicit
1673 ( volatile atomic_schar* __a__, memory_order __x__ )
1674 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1676 inline signed char atomic_load( volatile atomic_schar* __a__ )
1677 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1679 inline void atomic_store_explicit
1680 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1681 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1683 inline void atomic_store
1684 ( volatile atomic_schar* __a__, signed char __m__ )
1685 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1687 inline signed char atomic_swap_explicit
1688 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1689 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1691 inline signed char atomic_swap
1692 ( volatile atomic_schar* __a__, signed char __m__ )
1693 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1695 inline bool atomic_compare_swap_explicit
1696 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1697 memory_order __x__, memory_order __y__ )
1698 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1700 inline bool atomic_compare_swap
1701 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1702 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1703 memory_order_seq_cst, memory_order_seq_cst ); }
1705 inline void atomic_fence
1706 ( const volatile atomic_schar* __a__, memory_order __x__ )
1707 { _ATOMIC_FENCE_( __a__, __x__ ); }
1710 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1713 inline unsigned char atomic_load_explicit
1714 ( volatile atomic_uchar* __a__, memory_order __x__ )
1715 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1717 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1718 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1720 inline void atomic_store_explicit
1721 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1722 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1724 inline void atomic_store
1725 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1726 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1728 inline unsigned char atomic_swap_explicit
1729 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1730 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1732 inline unsigned char atomic_swap
1733 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1734 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1736 inline bool atomic_compare_swap_explicit
1737 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1738 memory_order __x__, memory_order __y__ )
1739 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1741 inline bool atomic_compare_swap
1742 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1743 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1744 memory_order_seq_cst, memory_order_seq_cst ); }
1746 inline void atomic_fence
1747 ( const volatile atomic_uchar* __a__, memory_order __x__ )
1748 { _ATOMIC_FENCE_( __a__, __x__ ); }
1751 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1754 inline short atomic_load_explicit
1755 ( volatile atomic_short* __a__, memory_order __x__ )
1756 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1758 inline short atomic_load( volatile atomic_short* __a__ )
1759 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1761 inline void atomic_store_explicit
1762 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1763 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1765 inline void atomic_store
1766 ( volatile atomic_short* __a__, short __m__ )
1767 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1769 inline short atomic_swap_explicit
1770 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1771 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1773 inline short atomic_swap
1774 ( volatile atomic_short* __a__, short __m__ )
1775 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1777 inline bool atomic_compare_swap_explicit
1778 ( volatile atomic_short* __a__, short* __e__, short __m__,
1779 memory_order __x__, memory_order __y__ )
1780 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1782 inline bool atomic_compare_swap
1783 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1784 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1785 memory_order_seq_cst, memory_order_seq_cst ); }
1787 inline void atomic_fence
1788 ( const volatile atomic_short* __a__, memory_order __x__ )
1789 { _ATOMIC_FENCE_( __a__, __x__ ); }
1792 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1795 inline unsigned short atomic_load_explicit
1796 ( volatile atomic_ushort* __a__, memory_order __x__ )
1797 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1799 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1800 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1802 inline void atomic_store_explicit
1803 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1804 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1806 inline void atomic_store
1807 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1808 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1810 inline unsigned short atomic_swap_explicit
1811 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1812 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1814 inline unsigned short atomic_swap
1815 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1816 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1818 inline bool atomic_compare_swap_explicit
1819 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1820 memory_order __x__, memory_order __y__ )
1821 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1823 inline bool atomic_compare_swap
1824 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1825 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1826 memory_order_seq_cst, memory_order_seq_cst ); }
1828 inline void atomic_fence
1829 ( const volatile atomic_ushort* __a__, memory_order __x__ )
1830 { _ATOMIC_FENCE_( __a__, __x__ ); }
1833 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1836 inline int atomic_load_explicit
1837 ( volatile atomic_int* __a__, memory_order __x__ )
1838 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1840 inline int atomic_load( volatile atomic_int* __a__ )
1841 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1843 inline void atomic_store_explicit
1844 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1845 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1847 inline void atomic_store
1848 ( volatile atomic_int* __a__, int __m__ )
1849 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1851 inline int atomic_swap_explicit
1852 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1853 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1855 inline int atomic_swap
1856 ( volatile atomic_int* __a__, int __m__ )
1857 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1859 inline bool atomic_compare_swap_explicit
1860 ( volatile atomic_int* __a__, int* __e__, int __m__,
1861 memory_order __x__, memory_order __y__ )
1862 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1864 inline bool atomic_compare_swap
1865 ( volatile atomic_int* __a__, int* __e__, int __m__ )
1866 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1867 memory_order_seq_cst, memory_order_seq_cst ); }
1869 inline void atomic_fence
1870 ( const volatile atomic_int* __a__, memory_order __x__ )
1871 { _ATOMIC_FENCE_( __a__, __x__ ); }
1874 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
1877 inline unsigned int atomic_load_explicit
1878 ( volatile atomic_uint* __a__, memory_order __x__ )
1879 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1881 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
1882 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1884 inline void atomic_store_explicit
1885 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1886 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1888 inline void atomic_store
1889 ( volatile atomic_uint* __a__, unsigned int __m__ )
1890 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1892 inline unsigned int atomic_swap_explicit
1893 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1894 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1896 inline unsigned int atomic_swap
1897 ( volatile atomic_uint* __a__, unsigned int __m__ )
1898 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1900 inline bool atomic_compare_swap_explicit
1901 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
1902 memory_order __x__, memory_order __y__ )
1903 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1905 inline bool atomic_compare_swap
1906 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
1907 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1908 memory_order_seq_cst, memory_order_seq_cst ); }
1910 inline void atomic_fence
1911 ( const volatile atomic_uint* __a__, memory_order __x__ )
1912 { _ATOMIC_FENCE_( __a__, __x__ ); }
1915 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
1918 inline long atomic_load_explicit
1919 ( volatile atomic_long* __a__, memory_order __x__ )
1920 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1922 inline long atomic_load( volatile atomic_long* __a__ )
1923 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1925 inline void atomic_store_explicit
1926 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1927 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1929 inline void atomic_store
1930 ( volatile atomic_long* __a__, long __m__ )
1931 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1933 inline long atomic_swap_explicit
1934 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1935 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1937 inline long atomic_swap
1938 ( volatile atomic_long* __a__, long __m__ )
1939 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1941 inline bool atomic_compare_swap_explicit
1942 ( volatile atomic_long* __a__, long* __e__, long __m__,
1943 memory_order __x__, memory_order __y__ )
1944 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1946 inline bool atomic_compare_swap
1947 ( volatile atomic_long* __a__, long* __e__, long __m__ )
1948 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1949 memory_order_seq_cst, memory_order_seq_cst ); }
1951 inline void atomic_fence
1952 ( const volatile atomic_long* __a__, memory_order __x__ )
1953 { _ATOMIC_FENCE_( __a__, __x__ ); }
1956 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
1959 inline unsigned long atomic_load_explicit
1960 ( volatile atomic_ulong* __a__, memory_order __x__ )
1961 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1963 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
1964 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1966 inline void atomic_store_explicit
1967 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1968 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1970 inline void atomic_store
1971 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1972 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1974 inline unsigned long atomic_swap_explicit
1975 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1976 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1978 inline unsigned long atomic_swap
1979 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1980 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1982 inline bool atomic_compare_swap_explicit
1983 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
1984 memory_order __x__, memory_order __y__ )
1985 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1987 inline bool atomic_compare_swap
1988 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
1989 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1990 memory_order_seq_cst, memory_order_seq_cst ); }
1992 inline void atomic_fence
1993 ( const volatile atomic_ulong* __a__, memory_order __x__ )
1994 { _ATOMIC_FENCE_( __a__, __x__ ); }
1997 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2000 inline long long atomic_load_explicit
2001 ( volatile atomic_llong* __a__, memory_order __x__ )
2002 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2004 inline long long atomic_load( volatile atomic_llong* __a__ )
2005 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2007 inline void atomic_store_explicit
2008 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2009 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2011 inline void atomic_store
2012 ( volatile atomic_llong* __a__, long long __m__ )
2013 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2015 inline long long atomic_swap_explicit
2016 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2017 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2019 inline long long atomic_swap
2020 ( volatile atomic_llong* __a__, long long __m__ )
2021 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2023 inline bool atomic_compare_swap_explicit
2024 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2025 memory_order __x__, memory_order __y__ )
2026 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2028 inline bool atomic_compare_swap
2029 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2030 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2031 memory_order_seq_cst, memory_order_seq_cst ); }
2033 inline void atomic_fence
2034 ( const volatile atomic_llong* __a__, memory_order __x__ )
2035 { _ATOMIC_FENCE_( __a__, __x__ ); }
2038 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2041 inline unsigned long long atomic_load_explicit
2042 ( volatile atomic_ullong* __a__, memory_order __x__ )
2043 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2045 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2046 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2048 inline void atomic_store_explicit
2049 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2050 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2052 inline void atomic_store
2053 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2054 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2056 inline unsigned long long atomic_swap_explicit
2057 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2058 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2060 inline unsigned long long atomic_swap
2061 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2062 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2064 inline bool atomic_compare_swap_explicit
2065 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2066 memory_order __x__, memory_order __y__ )
2067 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2069 inline bool atomic_compare_swap
2070 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2071 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2072 memory_order_seq_cst, memory_order_seq_cst ); }
2074 inline void atomic_fence
2075 ( const volatile atomic_ullong* __a__, memory_order __x__ )
2076 { _ATOMIC_FENCE_( __a__, __x__ ); }
2079 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2082 inline wchar_t atomic_load_explicit
2083 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2084 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2086 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2087 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2089 inline void atomic_store_explicit
2090 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2091 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2093 inline void atomic_store
2094 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2095 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2097 inline wchar_t atomic_swap_explicit
2098 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2099 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2101 inline wchar_t atomic_swap
2102 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2103 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2105 inline bool atomic_compare_swap_explicit
2106 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2107 memory_order __x__, memory_order __y__ )
2108 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2110 inline bool atomic_compare_swap
2111 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2112 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2113 memory_order_seq_cst, memory_order_seq_cst ); }
2115 inline void atomic_fence
2116 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
2117 { _ATOMIC_FENCE_( __a__, __x__ ); }
2120 inline void* atomic_fetch_add_explicit
2121 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2123 void* volatile* __p__ = &((__a__)->__f__);
2124 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2125 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2128 inline void* atomic_fetch_add
2129 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2130 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2133 inline void* atomic_fetch_sub_explicit
2134 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2136 void* volatile* __p__ = &((__a__)->__f__);
2137 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2138 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2141 inline void* atomic_fetch_sub
2142 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2143 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2145 inline char atomic_fetch_add_explicit
2146 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2147 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2149 inline char atomic_fetch_add
2150 ( volatile atomic_char* __a__, char __m__ )
2151 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2154 inline char atomic_fetch_sub_explicit
2155 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2156 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2158 inline char atomic_fetch_sub
2159 ( volatile atomic_char* __a__, char __m__ )
2160 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2163 inline char atomic_fetch_and_explicit
2164 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2165 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2167 inline char atomic_fetch_and
2168 ( volatile atomic_char* __a__, char __m__ )
2169 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2172 inline char atomic_fetch_or_explicit
2173 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2174 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2176 inline char atomic_fetch_or
2177 ( volatile atomic_char* __a__, char __m__ )
2178 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2181 inline char atomic_fetch_xor_explicit
2182 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2183 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2185 inline char atomic_fetch_xor
2186 ( volatile atomic_char* __a__, char __m__ )
2187 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2190 inline signed char atomic_fetch_add_explicit
2191 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2192 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2194 inline signed char atomic_fetch_add
2195 ( volatile atomic_schar* __a__, signed char __m__ )
2196 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2199 inline signed char atomic_fetch_sub_explicit
2200 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2201 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2203 inline signed char atomic_fetch_sub
2204 ( volatile atomic_schar* __a__, signed char __m__ )
2205 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2208 inline signed char atomic_fetch_and_explicit
2209 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2210 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2212 inline signed char atomic_fetch_and
2213 ( volatile atomic_schar* __a__, signed char __m__ )
2214 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2217 inline signed char atomic_fetch_or_explicit
2218 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2219 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2221 inline signed char atomic_fetch_or
2222 ( volatile atomic_schar* __a__, signed char __m__ )
2223 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2226 inline signed char atomic_fetch_xor_explicit
2227 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2228 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2230 inline signed char atomic_fetch_xor
2231 ( volatile atomic_schar* __a__, signed char __m__ )
2232 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2235 inline unsigned char atomic_fetch_add_explicit
2236 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2237 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2239 inline unsigned char atomic_fetch_add
2240 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2241 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2244 inline unsigned char atomic_fetch_sub_explicit
2245 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2246 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2248 inline unsigned char atomic_fetch_sub
2249 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2250 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2253 inline unsigned char atomic_fetch_and_explicit
2254 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2255 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2257 inline unsigned char atomic_fetch_and
2258 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2259 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2262 inline unsigned char atomic_fetch_or_explicit
2263 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2264 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2266 inline unsigned char atomic_fetch_or
2267 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2268 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2271 inline unsigned char atomic_fetch_xor_explicit
2272 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2273 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2275 inline unsigned char atomic_fetch_xor
2276 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2277 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2280 inline short atomic_fetch_add_explicit
2281 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2282 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2284 inline short atomic_fetch_add
2285 ( volatile atomic_short* __a__, short __m__ )
2286 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2289 inline short atomic_fetch_sub_explicit
2290 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2291 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2293 inline short atomic_fetch_sub
2294 ( volatile atomic_short* __a__, short __m__ )
2295 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2298 inline short atomic_fetch_and_explicit
2299 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2300 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2302 inline short atomic_fetch_and
2303 ( volatile atomic_short* __a__, short __m__ )
2304 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2307 inline short atomic_fetch_or_explicit
2308 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2309 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2311 inline short atomic_fetch_or
2312 ( volatile atomic_short* __a__, short __m__ )
2313 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2316 inline short atomic_fetch_xor_explicit
2317 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2318 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2320 inline short atomic_fetch_xor
2321 ( volatile atomic_short* __a__, short __m__ )
2322 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2325 inline unsigned short atomic_fetch_add_explicit
2326 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2327 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2329 inline unsigned short atomic_fetch_add
2330 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2331 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2334 inline unsigned short atomic_fetch_sub_explicit
2335 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2336 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2338 inline unsigned short atomic_fetch_sub
2339 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2340 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2343 inline unsigned short atomic_fetch_and_explicit
2344 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2345 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2347 inline unsigned short atomic_fetch_and
2348 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2349 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2352 inline unsigned short atomic_fetch_or_explicit
2353 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2354 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2356 inline unsigned short atomic_fetch_or
2357 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2358 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2361 inline unsigned short atomic_fetch_xor_explicit
2362 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2363 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2365 inline unsigned short atomic_fetch_xor
2366 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2367 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2370 inline int atomic_fetch_add_explicit
2371 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2372 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2374 inline int atomic_fetch_add
2375 ( volatile atomic_int* __a__, int __m__ )
2376 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2379 inline int atomic_fetch_sub_explicit
2380 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2381 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2383 inline int atomic_fetch_sub
2384 ( volatile atomic_int* __a__, int __m__ )
2385 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2388 inline int atomic_fetch_and_explicit
2389 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2390 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2392 inline int atomic_fetch_and
2393 ( volatile atomic_int* __a__, int __m__ )
2394 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2397 inline int atomic_fetch_or_explicit
2398 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2399 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2401 inline int atomic_fetch_or
2402 ( volatile atomic_int* __a__, int __m__ )
2403 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2406 inline int atomic_fetch_xor_explicit
2407 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2408 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2410 inline int atomic_fetch_xor
2411 ( volatile atomic_int* __a__, int __m__ )
2412 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2415 inline unsigned int atomic_fetch_add_explicit
2416 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2417 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2419 inline unsigned int atomic_fetch_add
2420 ( volatile atomic_uint* __a__, unsigned int __m__ )
2421 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2424 inline unsigned int atomic_fetch_sub_explicit
2425 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2426 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2428 inline unsigned int atomic_fetch_sub
2429 ( volatile atomic_uint* __a__, unsigned int __m__ )
2430 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2433 inline unsigned int atomic_fetch_and_explicit
2434 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2435 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2437 inline unsigned int atomic_fetch_and
2438 ( volatile atomic_uint* __a__, unsigned int __m__ )
2439 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2442 inline unsigned int atomic_fetch_or_explicit
2443 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2444 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2446 inline unsigned int atomic_fetch_or
2447 ( volatile atomic_uint* __a__, unsigned int __m__ )
2448 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2451 inline unsigned int atomic_fetch_xor_explicit
2452 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2453 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2455 inline unsigned int atomic_fetch_xor
2456 ( volatile atomic_uint* __a__, unsigned int __m__ )
2457 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2460 inline long atomic_fetch_add_explicit
2461 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2462 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2464 inline long atomic_fetch_add
2465 ( volatile atomic_long* __a__, long __m__ )
2466 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2469 inline long atomic_fetch_sub_explicit
2470 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2471 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2473 inline long atomic_fetch_sub
2474 ( volatile atomic_long* __a__, long __m__ )
2475 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2478 inline long atomic_fetch_and_explicit
2479 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2480 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2482 inline long atomic_fetch_and
2483 ( volatile atomic_long* __a__, long __m__ )
2484 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2487 inline long atomic_fetch_or_explicit
2488 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2489 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2491 inline long atomic_fetch_or
2492 ( volatile atomic_long* __a__, long __m__ )
2493 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2496 inline long atomic_fetch_xor_explicit
2497 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2498 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2500 inline long atomic_fetch_xor
2501 ( volatile atomic_long* __a__, long __m__ )
2502 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2505 inline unsigned long atomic_fetch_add_explicit
2506 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2507 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2509 inline unsigned long atomic_fetch_add
2510 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2511 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2514 inline unsigned long atomic_fetch_sub_explicit
2515 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2516 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2518 inline unsigned long atomic_fetch_sub
2519 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2520 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2523 inline unsigned long atomic_fetch_and_explicit
2524 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2525 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2527 inline unsigned long atomic_fetch_and
2528 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2529 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2532 inline unsigned long atomic_fetch_or_explicit
2533 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2534 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2536 inline unsigned long atomic_fetch_or
2537 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2538 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2541 inline unsigned long atomic_fetch_xor_explicit
2542 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2543 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2545 inline unsigned long atomic_fetch_xor
2546 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2547 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2550 inline long long atomic_fetch_add_explicit
2551 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2552 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2554 inline long long atomic_fetch_add
2555 ( volatile atomic_llong* __a__, long long __m__ )
2556 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2559 inline long long atomic_fetch_sub_explicit
2560 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2561 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2563 inline long long atomic_fetch_sub
2564 ( volatile atomic_llong* __a__, long long __m__ )
2565 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2568 inline long long atomic_fetch_and_explicit
2569 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2570 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2572 inline long long atomic_fetch_and
2573 ( volatile atomic_llong* __a__, long long __m__ )
2574 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2577 inline long long atomic_fetch_or_explicit
2578 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2579 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2581 inline long long atomic_fetch_or
2582 ( volatile atomic_llong* __a__, long long __m__ )
2583 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2586 inline long long atomic_fetch_xor_explicit
2587 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2588 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2590 inline long long atomic_fetch_xor
2591 ( volatile atomic_llong* __a__, long long __m__ )
2592 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2595 inline unsigned long long atomic_fetch_add_explicit
2596 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2597 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2599 inline unsigned long long atomic_fetch_add
2600 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2601 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2604 inline unsigned long long atomic_fetch_sub_explicit
2605 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2606 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2608 inline unsigned long long atomic_fetch_sub
2609 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2610 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2613 inline unsigned long long atomic_fetch_and_explicit
2614 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2615 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2617 inline unsigned long long atomic_fetch_and
2618 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2619 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2622 inline unsigned long long atomic_fetch_or_explicit
2623 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2624 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2626 inline unsigned long long atomic_fetch_or
2627 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2628 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2631 inline unsigned long long atomic_fetch_xor_explicit
2632 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2633 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2635 inline unsigned long long atomic_fetch_xor
2636 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2637 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2640 inline wchar_t atomic_fetch_add_explicit
2641 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2642 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2644 inline wchar_t atomic_fetch_add
2645 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2646 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2649 inline wchar_t atomic_fetch_sub_explicit
2650 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2651 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2653 inline wchar_t atomic_fetch_sub
2654 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2655 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2658 inline wchar_t atomic_fetch_and_explicit
2659 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2660 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2662 inline wchar_t atomic_fetch_and
2663 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2664 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2667 inline wchar_t atomic_fetch_or_explicit
2668 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2669 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2671 inline wchar_t atomic_fetch_or
2672 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2673 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2676 inline wchar_t atomic_fetch_xor_explicit
2677 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2678 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2680 inline wchar_t atomic_fetch_xor
2681 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2682 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2688 #define atomic_is_lock_free( __a__ ) \
2691 #define atomic_load( __a__ ) \
2692 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2694 #define atomic_load_explicit( __a__, __x__ ) \
2695 _ATOMIC_LOAD_( __a__, __x__ )
2697 #define atomic_init( __a__, __m__ ) \
2698 _ATOMIC_INIT_( __a__, __m__ )
2700 #define atomic_store( __a__, __m__ ) \
2701 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2703 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2704 _ATOMIC_STORE_( __a__, __m__, __x__ )
2706 #define atomic_swap( __a__, __m__ ) \
2707 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2709 #define atomic_swap_explicit( __a__, __m__, __x__ ) \
2710 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2712 #define atomic_compare_swap( __a__, __e__, __m__ ) \
2713 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2715 #define atomic_compare_swap_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2716 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2718 #define atomic_fence( __a__, __x__ ) \
2719 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
2722 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2723 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2725 #define atomic_fetch_add( __a__, __m__ ) \
2726 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2729 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2730 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2732 #define atomic_fetch_sub( __a__, __m__ ) \
2733 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2736 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2737 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2739 #define atomic_fetch_and( __a__, __m__ ) \
2740 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2743 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2744 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2746 #define atomic_fetch_or( __a__, __m__ ) \
2747 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2750 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2751 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2753 #define atomic_fetch_xor( __a__, __m__ ) \
2754 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2763 inline bool atomic_bool::is_lock_free() const volatile
2766 inline void atomic_bool::store
2767 ( bool __m__, memory_order __x__ ) volatile
2768 { atomic_store_explicit( this, __m__, __x__ ); }
2770 inline bool atomic_bool::load
2771 ( memory_order __x__ ) volatile
2772 { return atomic_load_explicit( this, __x__ ); }
2774 inline bool atomic_bool::swap
2775 ( bool __m__, memory_order __x__ ) volatile
2776 { return atomic_swap_explicit( this, __m__, __x__ ); }
2778 inline bool atomic_bool::compare_swap
2779 ( bool& __e__, bool __m__,
2780 memory_order __x__, memory_order __y__ ) volatile
2781 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2783 inline bool atomic_bool::compare_swap
2784 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
2785 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2786 __x__ == memory_order_acq_rel ? memory_order_acquire :
2787 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2789 inline void atomic_bool::fence
2790 ( memory_order __x__ ) const volatile
2791 { return atomic_fence( this, __x__ ); }
2794 inline bool atomic_address::is_lock_free() const volatile
2797 inline void atomic_address::store
2798 ( void* __m__, memory_order __x__ ) volatile
2799 { atomic_store_explicit( this, __m__, __x__ ); }
2801 inline void* atomic_address::load
2802 ( memory_order __x__ ) volatile
2803 { return atomic_load_explicit( this, __x__ ); }
2805 inline void* atomic_address::swap
2806 ( void* __m__, memory_order __x__ ) volatile
2807 { return atomic_swap_explicit( this, __m__, __x__ ); }
2809 inline bool atomic_address::compare_swap
2810 ( void*& __e__, void* __m__,
2811 memory_order __x__, memory_order __y__ ) volatile
2812 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2814 inline bool atomic_address::compare_swap
2815 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
2816 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2817 __x__ == memory_order_acq_rel ? memory_order_acquire :
2818 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2820 inline void atomic_address::fence
2821 ( memory_order __x__ ) const volatile
2822 { return atomic_fence( this, __x__ ); }
2825 inline bool atomic_char::is_lock_free() const volatile
2828 inline void atomic_char::store
2829 ( char __m__, memory_order __x__ ) volatile
2830 { atomic_store_explicit( this, __m__, __x__ ); }
2832 inline char atomic_char::load
2833 ( memory_order __x__ ) volatile
2834 { return atomic_load_explicit( this, __x__ ); }
2836 inline char atomic_char::swap
2837 ( char __m__, memory_order __x__ ) volatile
2838 { return atomic_swap_explicit( this, __m__, __x__ ); }
2840 inline bool atomic_char::compare_swap
2841 ( char& __e__, char __m__,
2842 memory_order __x__, memory_order __y__ ) volatile
2843 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2845 inline bool atomic_char::compare_swap
2846 ( char& __e__, char __m__, memory_order __x__ ) volatile
2847 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2848 __x__ == memory_order_acq_rel ? memory_order_acquire :
2849 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2851 inline void atomic_char::fence
2852 ( memory_order __x__ ) const volatile
2853 { return atomic_fence( this, __x__ ); }
2856 inline bool atomic_schar::is_lock_free() const volatile
2859 inline void atomic_schar::store
2860 ( signed char __m__, memory_order __x__ ) volatile
2861 { atomic_store_explicit( this, __m__, __x__ ); }
2863 inline signed char atomic_schar::load
2864 ( memory_order __x__ ) volatile
2865 { return atomic_load_explicit( this, __x__ ); }
2867 inline signed char atomic_schar::swap
2868 ( signed char __m__, memory_order __x__ ) volatile
2869 { return atomic_swap_explicit( this, __m__, __x__ ); }
2871 inline bool atomic_schar::compare_swap
2872 ( signed char& __e__, signed char __m__,
2873 memory_order __x__, memory_order __y__ ) volatile
2874 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2876 inline bool atomic_schar::compare_swap
2877 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
2878 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2879 __x__ == memory_order_acq_rel ? memory_order_acquire :
2880 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2882 inline void atomic_schar::fence
2883 ( memory_order __x__ ) const volatile
2884 { return atomic_fence( this, __x__ ); }
2887 inline bool atomic_uchar::is_lock_free() const volatile
2890 inline void atomic_uchar::store
2891 ( unsigned char __m__, memory_order __x__ ) volatile
2892 { atomic_store_explicit( this, __m__, __x__ ); }
2894 inline unsigned char atomic_uchar::load
2895 ( memory_order __x__ ) volatile
2896 { return atomic_load_explicit( this, __x__ ); }
2898 inline unsigned char atomic_uchar::swap
2899 ( unsigned char __m__, memory_order __x__ ) volatile
2900 { return atomic_swap_explicit( this, __m__, __x__ ); }
2902 inline bool atomic_uchar::compare_swap
2903 ( unsigned char& __e__, unsigned char __m__,
2904 memory_order __x__, memory_order __y__ ) volatile
2905 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2907 inline bool atomic_uchar::compare_swap
2908 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
2909 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2910 __x__ == memory_order_acq_rel ? memory_order_acquire :
2911 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2913 inline void atomic_uchar::fence
2914 ( memory_order __x__ ) const volatile
2915 { return atomic_fence( this, __x__ ); }
2918 inline bool atomic_short::is_lock_free() const volatile
2921 inline void atomic_short::store
2922 ( short __m__, memory_order __x__ ) volatile
2923 { atomic_store_explicit( this, __m__, __x__ ); }
2925 inline short atomic_short::load
2926 ( memory_order __x__ ) volatile
2927 { return atomic_load_explicit( this, __x__ ); }
2929 inline short atomic_short::swap
2930 ( short __m__, memory_order __x__ ) volatile
2931 { return atomic_swap_explicit( this, __m__, __x__ ); }
2933 inline bool atomic_short::compare_swap
2934 ( short& __e__, short __m__,
2935 memory_order __x__, memory_order __y__ ) volatile
2936 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2938 inline bool atomic_short::compare_swap
2939 ( short& __e__, short __m__, memory_order __x__ ) volatile
2940 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2941 __x__ == memory_order_acq_rel ? memory_order_acquire :
2942 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2944 inline void atomic_short::fence
2945 ( memory_order __x__ ) const volatile
2946 { return atomic_fence( this, __x__ ); }
2949 inline bool atomic_ushort::is_lock_free() const volatile
2952 inline void atomic_ushort::store
2953 ( unsigned short __m__, memory_order __x__ ) volatile
2954 { atomic_store_explicit( this, __m__, __x__ ); }
2956 inline unsigned short atomic_ushort::load
2957 ( memory_order __x__ ) volatile
2958 { return atomic_load_explicit( this, __x__ ); }
2960 inline unsigned short atomic_ushort::swap
2961 ( unsigned short __m__, memory_order __x__ ) volatile
2962 { return atomic_swap_explicit( this, __m__, __x__ ); }
2964 inline bool atomic_ushort::compare_swap
2965 ( unsigned short& __e__, unsigned short __m__,
2966 memory_order __x__, memory_order __y__ ) volatile
2967 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2969 inline bool atomic_ushort::compare_swap
2970 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
2971 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2972 __x__ == memory_order_acq_rel ? memory_order_acquire :
2973 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2975 inline void atomic_ushort::fence
2976 ( memory_order __x__ ) const volatile
2977 { return atomic_fence( this, __x__ ); }
2980 inline bool atomic_int::is_lock_free() const volatile
2983 inline void atomic_int::store
2984 ( int __m__, memory_order __x__ ) volatile
2985 { atomic_store_explicit( this, __m__, __x__ ); }
2987 inline int atomic_int::load
2988 ( memory_order __x__ ) volatile
2989 { return atomic_load_explicit( this, __x__ ); }
2991 inline int atomic_int::swap
2992 ( int __m__, memory_order __x__ ) volatile
2993 { return atomic_swap_explicit( this, __m__, __x__ ); }
2995 inline bool atomic_int::compare_swap
2996 ( int& __e__, int __m__,
2997 memory_order __x__, memory_order __y__ ) volatile
2998 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3000 inline bool atomic_int::compare_swap
3001 ( int& __e__, int __m__, memory_order __x__ ) volatile
3002 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3003 __x__ == memory_order_acq_rel ? memory_order_acquire :
3004 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3006 inline void atomic_int::fence
3007 ( memory_order __x__ ) const volatile
3008 { return atomic_fence( this, __x__ ); }
3011 inline bool atomic_uint::is_lock_free() const volatile
3014 inline void atomic_uint::store
3015 ( unsigned int __m__, memory_order __x__ ) volatile
3016 { atomic_store_explicit( this, __m__, __x__ ); }
3018 inline unsigned int atomic_uint::load
3019 ( memory_order __x__ ) volatile
3020 { return atomic_load_explicit( this, __x__ ); }
3022 inline unsigned int atomic_uint::swap
3023 ( unsigned int __m__, memory_order __x__ ) volatile
3024 { return atomic_swap_explicit( this, __m__, __x__ ); }
3026 inline bool atomic_uint::compare_swap
3027 ( unsigned int& __e__, unsigned int __m__,
3028 memory_order __x__, memory_order __y__ ) volatile
3029 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3031 inline bool atomic_uint::compare_swap
3032 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3033 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3034 __x__ == memory_order_acq_rel ? memory_order_acquire :
3035 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3037 inline void atomic_uint::fence
3038 ( memory_order __x__ ) const volatile
3039 { return atomic_fence( this, __x__ ); }
3042 inline bool atomic_long::is_lock_free() const volatile
3045 inline void atomic_long::store
3046 ( long __m__, memory_order __x__ ) volatile
3047 { atomic_store_explicit( this, __m__, __x__ ); }
3049 inline long atomic_long::load
3050 ( memory_order __x__ ) volatile
3051 { return atomic_load_explicit( this, __x__ ); }
3053 inline long atomic_long::swap
3054 ( long __m__, memory_order __x__ ) volatile
3055 { return atomic_swap_explicit( this, __m__, __x__ ); }
3057 inline bool atomic_long::compare_swap
3058 ( long& __e__, long __m__,
3059 memory_order __x__, memory_order __y__ ) volatile
3060 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3062 inline bool atomic_long::compare_swap
3063 ( long& __e__, long __m__, memory_order __x__ ) volatile
3064 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3065 __x__ == memory_order_acq_rel ? memory_order_acquire :
3066 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3068 inline void atomic_long::fence
3069 ( memory_order __x__ ) const volatile
3070 { return atomic_fence( this, __x__ ); }
3073 inline bool atomic_ulong::is_lock_free() const volatile
3076 inline void atomic_ulong::store
3077 ( unsigned long __m__, memory_order __x__ ) volatile
3078 { atomic_store_explicit( this, __m__, __x__ ); }
3080 inline unsigned long atomic_ulong::load
3081 ( memory_order __x__ ) volatile
3082 { return atomic_load_explicit( this, __x__ ); }
3084 inline unsigned long atomic_ulong::swap
3085 ( unsigned long __m__, memory_order __x__ ) volatile
3086 { return atomic_swap_explicit( this, __m__, __x__ ); }
3088 inline bool atomic_ulong::compare_swap
3089 ( unsigned long& __e__, unsigned long __m__,
3090 memory_order __x__, memory_order __y__ ) volatile
3091 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3093 inline bool atomic_ulong::compare_swap
3094 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3095 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3096 __x__ == memory_order_acq_rel ? memory_order_acquire :
3097 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3099 inline void atomic_ulong::fence
3100 ( memory_order __x__ ) const volatile
3101 { return atomic_fence( this, __x__ ); }
3104 inline bool atomic_llong::is_lock_free() const volatile
3107 inline void atomic_llong::store
3108 ( long long __m__, memory_order __x__ ) volatile
3109 { atomic_store_explicit( this, __m__, __x__ ); }
3111 inline long long atomic_llong::load
3112 ( memory_order __x__ ) volatile
3113 { return atomic_load_explicit( this, __x__ ); }
3115 inline long long atomic_llong::swap
3116 ( long long __m__, memory_order __x__ ) volatile
3117 { return atomic_swap_explicit( this, __m__, __x__ ); }
3119 inline bool atomic_llong::compare_swap
3120 ( long long& __e__, long long __m__,
3121 memory_order __x__, memory_order __y__ ) volatile
3122 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3124 inline bool atomic_llong::compare_swap
3125 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3126 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3127 __x__ == memory_order_acq_rel ? memory_order_acquire :
3128 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3130 inline void atomic_llong::fence
3131 ( memory_order __x__ ) const volatile
3132 { return atomic_fence( this, __x__ ); }
3135 inline bool atomic_ullong::is_lock_free() const volatile
3138 inline void atomic_ullong::store
3139 ( unsigned long long __m__, memory_order __x__ ) volatile
3140 { atomic_store_explicit( this, __m__, __x__ ); }
3142 inline unsigned long long atomic_ullong::load
3143 ( memory_order __x__ ) volatile
3144 { return atomic_load_explicit( this, __x__ ); }
3146 inline unsigned long long atomic_ullong::swap
3147 ( unsigned long long __m__, memory_order __x__ ) volatile
3148 { return atomic_swap_explicit( this, __m__, __x__ ); }
3150 inline bool atomic_ullong::compare_swap
3151 ( unsigned long long& __e__, unsigned long long __m__,
3152 memory_order __x__, memory_order __y__ ) volatile
3153 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3155 inline bool atomic_ullong::compare_swap
3156 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3157 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3158 __x__ == memory_order_acq_rel ? memory_order_acquire :
3159 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3161 inline void atomic_ullong::fence
3162 ( memory_order __x__ ) const volatile
3163 { return atomic_fence( this, __x__ ); }
3166 inline bool atomic_wchar_t::is_lock_free() const volatile
3169 inline void atomic_wchar_t::store
3170 ( wchar_t __m__, memory_order __x__ ) volatile
3171 { atomic_store_explicit( this, __m__, __x__ ); }
3173 inline wchar_t atomic_wchar_t::load
3174 ( memory_order __x__ ) volatile
3175 { return atomic_load_explicit( this, __x__ ); }
3177 inline wchar_t atomic_wchar_t::swap
3178 ( wchar_t __m__, memory_order __x__ ) volatile
3179 { return atomic_swap_explicit( this, __m__, __x__ ); }
3181 inline bool atomic_wchar_t::compare_swap
3182 ( wchar_t& __e__, wchar_t __m__,
3183 memory_order __x__, memory_order __y__ ) volatile
3184 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3186 inline bool atomic_wchar_t::compare_swap
3187 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3188 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3189 __x__ == memory_order_acq_rel ? memory_order_acquire :
3190 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3192 inline void atomic_wchar_t::fence
3193 ( memory_order __x__ ) const volatile
3194 { return atomic_fence( this, __x__ ); }
3197 template< typename T >
3198 inline bool atomic<T>::is_lock_free() const volatile
3201 template< typename T >
3202 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3203 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3205 template< typename T >
3206 inline T atomic<T>::load( memory_order __x__ ) volatile
3207 { return _ATOMIC_LOAD_( this, __x__ ); }
3209 template< typename T >
3210 inline T atomic<T>::swap( T __v__, memory_order __x__ ) volatile
3211 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3213 template< typename T >
3214 inline bool atomic<T>::compare_swap
3215 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3216 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3218 template< typename T >
3219 inline bool atomic<T>::compare_swap
3220 ( T& __r__, T __v__, memory_order __x__ ) volatile
3221 { return compare_swap( __r__, __v__, __x__,
3222 __x__ == memory_order_acq_rel ? memory_order_acquire :
3223 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3226 inline void* atomic_address::fetch_add
3227 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3228 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3230 inline void* atomic_address::fetch_sub
3231 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3232 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3235 inline char atomic_char::fetch_add
3236 ( char __m__, memory_order __x__ ) volatile
3237 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3240 inline char atomic_char::fetch_sub
3241 ( char __m__, memory_order __x__ ) volatile
3242 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3245 inline char atomic_char::fetch_and
3246 ( char __m__, memory_order __x__ ) volatile
3247 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3250 inline char atomic_char::fetch_or
3251 ( char __m__, memory_order __x__ ) volatile
3252 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3255 inline char atomic_char::fetch_xor
3256 ( char __m__, memory_order __x__ ) volatile
3257 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3260 inline signed char atomic_schar::fetch_add
3261 ( signed char __m__, memory_order __x__ ) volatile
3262 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3265 inline signed char atomic_schar::fetch_sub
3266 ( signed char __m__, memory_order __x__ ) volatile
3267 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3270 inline signed char atomic_schar::fetch_and
3271 ( signed char __m__, memory_order __x__ ) volatile
3272 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3275 inline signed char atomic_schar::fetch_or
3276 ( signed char __m__, memory_order __x__ ) volatile
3277 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3280 inline signed char atomic_schar::fetch_xor
3281 ( signed char __m__, memory_order __x__ ) volatile
3282 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3285 inline unsigned char atomic_uchar::fetch_add
3286 ( unsigned char __m__, memory_order __x__ ) volatile
3287 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3290 inline unsigned char atomic_uchar::fetch_sub
3291 ( unsigned char __m__, memory_order __x__ ) volatile
3292 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3295 inline unsigned char atomic_uchar::fetch_and
3296 ( unsigned char __m__, memory_order __x__ ) volatile
3297 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3300 inline unsigned char atomic_uchar::fetch_or
3301 ( unsigned char __m__, memory_order __x__ ) volatile
3302 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3305 inline unsigned char atomic_uchar::fetch_xor
3306 ( unsigned char __m__, memory_order __x__ ) volatile
3307 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3310 inline short atomic_short::fetch_add
3311 ( short __m__, memory_order __x__ ) volatile
3312 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3315 inline short atomic_short::fetch_sub
3316 ( short __m__, memory_order __x__ ) volatile
3317 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3320 inline short atomic_short::fetch_and
3321 ( short __m__, memory_order __x__ ) volatile
3322 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3325 inline short atomic_short::fetch_or
3326 ( short __m__, memory_order __x__ ) volatile
3327 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3330 inline short atomic_short::fetch_xor
3331 ( short __m__, memory_order __x__ ) volatile
3332 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3335 inline unsigned short atomic_ushort::fetch_add
3336 ( unsigned short __m__, memory_order __x__ ) volatile
3337 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3340 inline unsigned short atomic_ushort::fetch_sub
3341 ( unsigned short __m__, memory_order __x__ ) volatile
3342 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3345 inline unsigned short atomic_ushort::fetch_and
3346 ( unsigned short __m__, memory_order __x__ ) volatile
3347 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3350 inline unsigned short atomic_ushort::fetch_or
3351 ( unsigned short __m__, memory_order __x__ ) volatile
3352 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3355 inline unsigned short atomic_ushort::fetch_xor
3356 ( unsigned short __m__, memory_order __x__ ) volatile
3357 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3360 inline int atomic_int::fetch_add
3361 ( int __m__, memory_order __x__ ) volatile
3362 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3365 inline int atomic_int::fetch_sub
3366 ( int __m__, memory_order __x__ ) volatile
3367 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3370 inline int atomic_int::fetch_and
3371 ( int __m__, memory_order __x__ ) volatile
3372 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3375 inline int atomic_int::fetch_or
3376 ( int __m__, memory_order __x__ ) volatile
3377 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3380 inline int atomic_int::fetch_xor
3381 ( int __m__, memory_order __x__ ) volatile
3382 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3385 inline unsigned int atomic_uint::fetch_add
3386 ( unsigned int __m__, memory_order __x__ ) volatile
3387 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3390 inline unsigned int atomic_uint::fetch_sub
3391 ( unsigned int __m__, memory_order __x__ ) volatile
3392 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3395 inline unsigned int atomic_uint::fetch_and
3396 ( unsigned int __m__, memory_order __x__ ) volatile
3397 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3400 inline unsigned int atomic_uint::fetch_or
3401 ( unsigned int __m__, memory_order __x__ ) volatile
3402 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3405 inline unsigned int atomic_uint::fetch_xor
3406 ( unsigned int __m__, memory_order __x__ ) volatile
3407 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3410 inline long atomic_long::fetch_add
3411 ( long __m__, memory_order __x__ ) volatile
3412 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3415 inline long atomic_long::fetch_sub
3416 ( long __m__, memory_order __x__ ) volatile
3417 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3420 inline long atomic_long::fetch_and
3421 ( long __m__, memory_order __x__ ) volatile
3422 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3425 inline long atomic_long::fetch_or
3426 ( long __m__, memory_order __x__ ) volatile
3427 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3430 inline long atomic_long::fetch_xor
3431 ( long __m__, memory_order __x__ ) volatile
3432 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3435 inline unsigned long atomic_ulong::fetch_add
3436 ( unsigned long __m__, memory_order __x__ ) volatile
3437 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3440 inline unsigned long atomic_ulong::fetch_sub
3441 ( unsigned long __m__, memory_order __x__ ) volatile
3442 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3445 inline unsigned long atomic_ulong::fetch_and
3446 ( unsigned long __m__, memory_order __x__ ) volatile
3447 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3450 inline unsigned long atomic_ulong::fetch_or
3451 ( unsigned long __m__, memory_order __x__ ) volatile
3452 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3455 inline unsigned long atomic_ulong::fetch_xor
3456 ( unsigned long __m__, memory_order __x__ ) volatile
3457 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3460 inline long long atomic_llong::fetch_add
3461 ( long long __m__, memory_order __x__ ) volatile
3462 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3465 inline long long atomic_llong::fetch_sub
3466 ( long long __m__, memory_order __x__ ) volatile
3467 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3470 inline long long atomic_llong::fetch_and
3471 ( long long __m__, memory_order __x__ ) volatile
3472 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3475 inline long long atomic_llong::fetch_or
3476 ( long long __m__, memory_order __x__ ) volatile
3477 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3480 inline long long atomic_llong::fetch_xor
3481 ( long long __m__, memory_order __x__ ) volatile
3482 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3485 inline unsigned long long atomic_ullong::fetch_add
3486 ( unsigned long long __m__, memory_order __x__ ) volatile
3487 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3490 inline unsigned long long atomic_ullong::fetch_sub
3491 ( unsigned long long __m__, memory_order __x__ ) volatile
3492 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3495 inline unsigned long long atomic_ullong::fetch_and
3496 ( unsigned long long __m__, memory_order __x__ ) volatile
3497 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3500 inline unsigned long long atomic_ullong::fetch_or
3501 ( unsigned long long __m__, memory_order __x__ ) volatile
3502 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3505 inline unsigned long long atomic_ullong::fetch_xor
3506 ( unsigned long long __m__, memory_order __x__ ) volatile
3507 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3510 inline wchar_t atomic_wchar_t::fetch_add
3511 ( wchar_t __m__, memory_order __x__ ) volatile
3512 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3515 inline wchar_t atomic_wchar_t::fetch_sub
3516 ( wchar_t __m__, memory_order __x__ ) volatile
3517 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3520 inline wchar_t atomic_wchar_t::fetch_and
3521 ( wchar_t __m__, memory_order __x__ ) volatile
3522 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3525 inline wchar_t atomic_wchar_t::fetch_or
3526 ( wchar_t __m__, memory_order __x__ ) volatile
3527 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3530 inline wchar_t atomic_wchar_t::fetch_xor
3531 ( wchar_t __m__, memory_order __x__ ) volatile
3532 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3535 template< typename T >
3536 T* atomic<T*>::load( memory_order __x__ ) volatile
3537 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3539 template< typename T >
3540 T* atomic<T*>::swap( T* __v__, memory_order __x__ ) volatile
3541 { return static_cast<T*>( atomic_address::swap( __v__, __x__ ) ); }
3543 template< typename T >
3544 bool atomic<T*>::compare_swap
3545 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3546 { return atomic_address::compare_swap( *reinterpret_cast<void**>( &__r__ ),
3547 static_cast<void*>( __v__ ), __x__, __y__ ); }
3548 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3550 template< typename T >
3551 bool atomic<T*>::compare_swap
3552 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3553 { return compare_swap( __r__, __v__, __x__,
3554 __x__ == memory_order_acq_rel ? memory_order_acquire :
3555 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3557 template< typename T >
3558 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3559 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3561 template< typename T >
3562 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3563 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }