11 #define CPP0X( feature )
14 typedef enum memory_order {
15 memory_order_relaxed, memory_order_acquire, memory_order_release,
16 memory_order_acq_rel, memory_order_seq_cst
20 typedef struct atomic_flag
23 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
24 void clear( memory_order = memory_order_seq_cst ) volatile;
25 void fence( memory_order ) const volatile;
27 CPP0X( atomic_flag() = default; )
28 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
29 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
36 #define ATOMIC_FLAG_INIT { false }
42 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
43 extern bool atomic_flag_test_and_set_explicit
44 ( volatile atomic_flag*, memory_order );
45 extern void atomic_flag_clear( volatile atomic_flag* );
46 extern void atomic_flag_clear_explicit
47 ( volatile atomic_flag*, memory_order );
48 extern void atomic_flag_fence
49 ( const volatile atomic_flag*, memory_order );
50 extern void __atomic_flag_wait__
51 ( volatile atomic_flag* );
52 extern void __atomic_flag_wait_explicit__
53 ( volatile atomic_flag*, memory_order );
54 extern volatile atomic_flag* __atomic_flag_for_address__
55 ( const volatile void* __z__ )
56 __attribute__((const));
64 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
65 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
67 inline void atomic_flag::clear( memory_order __x__ ) volatile
68 { atomic_flag_clear_explicit( this, __x__ ); }
70 inline void atomic_flag::fence( memory_order __x__ ) const volatile
71 { atomic_flag_fence( this, __x__ ); }
77 The remainder of the example implementation uses the following
78 macros. These macros exploit GNU extensions for value-returning
79 blocks (AKA statement expressions) and __typeof__.
81 The macros rely on data fields of atomic structs being named __f__.
82 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
83 __g__=flag, __m__=modified, __o__=operation, __r__=result,
84 __p__=pointer to field, __v__=value (for single evaluation),
85 __x__=memory-ordering, and __y__=memory-ordering.
88 #define _ATOMIC_LOAD_( __a__, __x__ ) \
89 ({ model->switch_to_master(new ModelAction(ATOMIC_READ, __x__, __a__)); \
90 ((__typeof__((__a__)->__f__)) (thread_current()->get_return_value())); \
94 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
95 ({__typeof__(__m__) __v__ = (__m__); \
96 model->switch_to_master(new ModelAction(ATOMIC_WRITE, __x__, __a__, __v__)); \
99 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
100 ({ model->switch_to_master(new ModelAction(ATOMIC_READ, __x__, __a__)); \
101 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) thread_current()->get_return_value(); \
102 __typeof__(__m__) __v__ = (__m__); \
103 __typeof__((__a__)->__f__) __copy__= __old__; \
104 __copy__ __o__ __v__; \
105 model->switch_to_master(new ModelAction(ATOMIC_RMW, __x__, __a__, __copy__)); \
108 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
109 ({ __typeof__(__e__) __q__ = (__e__); \
110 __typeof__(__m__) __v__ = (__m__); \
112 model->switch_to_master(new ModelAction(ATOMIC_READ, __x__, __a__)); \
113 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) thread_current()->get_return_value(); \
114 if (__t__ == * __q__ ) { \
115 model->switch_to_master(new ModelAction(ATOMIC_RMW, __x__, __a__, __v__)); __r__ = true; } \
116 else { *__q__ = __t__; __r__ = false;} \
119 #define _ATOMIC_FENCE_( __a__, __x__ ) \
122 #define ATOMIC_INTEGRAL_LOCK_FREE 1
123 #define ATOMIC_ADDRESS_LOCK_FREE 1
125 typedef struct atomic_bool
128 bool is_lock_free() const volatile;
129 void store( bool, memory_order = memory_order_seq_cst ) volatile;
130 bool load( memory_order = memory_order_seq_cst ) volatile;
131 bool swap( bool, memory_order = memory_order_seq_cst ) volatile;
132 bool compare_swap ( bool&, bool, memory_order, memory_order ) volatile;
133 bool compare_swap ( bool&, bool,
134 memory_order = memory_order_seq_cst) volatile;
135 void fence( memory_order ) const volatile;
137 CPP0X( atomic_bool() = delete; )
138 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
139 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
140 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
142 bool operator =( bool __v__ ) volatile
143 { store( __v__ ); return __v__; }
145 friend void atomic_store_explicit( volatile atomic_bool*, bool,
147 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
148 friend bool atomic_swap_explicit( volatile atomic_bool*, bool,
150 friend bool atomic_compare_swap_explicit( volatile atomic_bool*, bool*, bool,
151 memory_order, memory_order );
152 friend void atomic_fence( const volatile atomic_bool*, memory_order );
160 typedef struct atomic_address
163 bool is_lock_free() const volatile;
164 void store( void*, memory_order = memory_order_seq_cst ) volatile;
165 void* load( memory_order = memory_order_seq_cst ) volatile;
166 void* swap( void*, memory_order = memory_order_seq_cst ) volatile;
167 bool compare_swap( void*&, void*, memory_order, memory_order ) volatile;
168 bool compare_swap( void*&, void*,
169 memory_order = memory_order_seq_cst ) volatile;
170 void fence( memory_order ) const volatile;
171 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
172 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
174 CPP0X( atomic_address() = default; )
175 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
176 CPP0X( atomic_address( const atomic_address& ) = delete; )
177 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
179 void* operator =( void* __v__ ) volatile
180 { store( __v__ ); return __v__; }
182 void* operator +=( ptrdiff_t __v__ ) volatile
183 { return fetch_add( __v__ ); }
185 void* operator -=( ptrdiff_t __v__ ) volatile
186 { return fetch_sub( __v__ ); }
188 friend void atomic_store_explicit( volatile atomic_address*, void*,
190 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
191 friend void* atomic_swap_explicit( volatile atomic_address*, void*,
193 friend bool atomic_compare_swap_explicit( volatile atomic_address*,
194 void**, void*, memory_order, memory_order );
195 friend void atomic_fence( const volatile atomic_address*, memory_order );
196 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
198 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
207 typedef struct atomic_char
210 bool is_lock_free() const volatile;
212 memory_order = memory_order_seq_cst ) volatile;
213 char load( memory_order = memory_order_seq_cst ) volatile;
215 memory_order = memory_order_seq_cst ) volatile;
216 bool compare_swap( char&, char,
217 memory_order, memory_order ) volatile;
218 bool compare_swap( char&, char,
219 memory_order = memory_order_seq_cst ) volatile;
220 void fence( memory_order ) const volatile;
221 char fetch_add( char,
222 memory_order = memory_order_seq_cst ) volatile;
223 char fetch_sub( char,
224 memory_order = memory_order_seq_cst ) volatile;
225 char fetch_and( char,
226 memory_order = memory_order_seq_cst ) volatile;
228 memory_order = memory_order_seq_cst ) volatile;
229 char fetch_xor( char,
230 memory_order = memory_order_seq_cst ) volatile;
232 CPP0X( atomic_char() = default; )
233 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
234 CPP0X( atomic_char( const atomic_char& ) = delete; )
235 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
237 char operator =( char __v__ ) volatile
238 { store( __v__ ); return __v__; }
240 char operator ++( int ) volatile
241 { return fetch_add( 1 ); }
243 char operator --( int ) volatile
244 { return fetch_sub( 1 ); }
246 char operator ++() volatile
247 { return fetch_add( 1 ) + 1; }
249 char operator --() volatile
250 { return fetch_sub( 1 ) - 1; }
252 char operator +=( char __v__ ) volatile
253 { return fetch_add( __v__ ) + __v__; }
255 char operator -=( char __v__ ) volatile
256 { return fetch_sub( __v__ ) - __v__; }
258 char operator &=( char __v__ ) volatile
259 { return fetch_and( __v__ ) & __v__; }
261 char operator |=( char __v__ ) volatile
262 { return fetch_or( __v__ ) | __v__; }
264 char operator ^=( char __v__ ) volatile
265 { return fetch_xor( __v__ ) ^ __v__; }
267 friend void atomic_store_explicit( volatile atomic_char*, char,
269 friend char atomic_load_explicit( volatile atomic_char*,
271 friend char atomic_swap_explicit( volatile atomic_char*,
272 char, memory_order );
273 friend bool atomic_compare_swap_explicit( volatile atomic_char*,
274 char*, char, memory_order, memory_order );
275 friend void atomic_fence( const volatile atomic_char*, memory_order );
276 friend char atomic_fetch_add_explicit( volatile atomic_char*,
277 char, memory_order );
278 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
279 char, memory_order );
280 friend char atomic_fetch_and_explicit( volatile atomic_char*,
281 char, memory_order );
282 friend char atomic_fetch_or_explicit( volatile atomic_char*,
283 char, memory_order );
284 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
285 char, memory_order );
293 typedef struct atomic_schar
296 bool is_lock_free() const volatile;
297 void store( signed char,
298 memory_order = memory_order_seq_cst ) volatile;
299 signed char load( memory_order = memory_order_seq_cst ) volatile;
300 signed char swap( signed char,
301 memory_order = memory_order_seq_cst ) volatile;
302 bool compare_swap( signed char&, signed char,
303 memory_order, memory_order ) volatile;
304 bool compare_swap( signed char&, signed char,
305 memory_order = memory_order_seq_cst ) volatile;
306 void fence( memory_order ) const volatile;
307 signed char fetch_add( signed char,
308 memory_order = memory_order_seq_cst ) volatile;
309 signed char fetch_sub( signed char,
310 memory_order = memory_order_seq_cst ) volatile;
311 signed char fetch_and( signed char,
312 memory_order = memory_order_seq_cst ) volatile;
313 signed char fetch_or( signed char,
314 memory_order = memory_order_seq_cst ) volatile;
315 signed char fetch_xor( signed char,
316 memory_order = memory_order_seq_cst ) volatile;
318 CPP0X( atomic_schar() = default; )
319 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
320 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
321 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
323 signed char operator =( signed char __v__ ) volatile
324 { store( __v__ ); return __v__; }
326 signed char operator ++( int ) volatile
327 { return fetch_add( 1 ); }
329 signed char operator --( int ) volatile
330 { return fetch_sub( 1 ); }
332 signed char operator ++() volatile
333 { return fetch_add( 1 ) + 1; }
335 signed char operator --() volatile
336 { return fetch_sub( 1 ) - 1; }
338 signed char operator +=( signed char __v__ ) volatile
339 { return fetch_add( __v__ ) + __v__; }
341 signed char operator -=( signed char __v__ ) volatile
342 { return fetch_sub( __v__ ) - __v__; }
344 signed char operator &=( signed char __v__ ) volatile
345 { return fetch_and( __v__ ) & __v__; }
347 signed char operator |=( signed char __v__ ) volatile
348 { return fetch_or( __v__ ) | __v__; }
350 signed char operator ^=( signed char __v__ ) volatile
351 { return fetch_xor( __v__ ) ^ __v__; }
353 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
355 friend signed char atomic_load_explicit( volatile atomic_schar*,
357 friend signed char atomic_swap_explicit( volatile atomic_schar*,
358 signed char, memory_order );
359 friend bool atomic_compare_swap_explicit( volatile atomic_schar*,
360 signed char*, signed char, memory_order, memory_order );
361 friend void atomic_fence( const volatile atomic_schar*, memory_order );
362 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
363 signed char, memory_order );
364 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
365 signed char, memory_order );
366 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
367 signed char, memory_order );
368 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
369 signed char, memory_order );
370 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
371 signed char, memory_order );
379 typedef struct atomic_uchar
382 bool is_lock_free() const volatile;
383 void store( unsigned char,
384 memory_order = memory_order_seq_cst ) volatile;
385 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
386 unsigned char swap( unsigned char,
387 memory_order = memory_order_seq_cst ) volatile;
388 bool compare_swap( unsigned char&, unsigned char,
389 memory_order, memory_order ) volatile;
390 bool compare_swap( unsigned char&, unsigned char,
391 memory_order = memory_order_seq_cst ) volatile;
392 void fence( memory_order ) const volatile;
393 unsigned char fetch_add( unsigned char,
394 memory_order = memory_order_seq_cst ) volatile;
395 unsigned char fetch_sub( unsigned char,
396 memory_order = memory_order_seq_cst ) volatile;
397 unsigned char fetch_and( unsigned char,
398 memory_order = memory_order_seq_cst ) volatile;
399 unsigned char fetch_or( unsigned char,
400 memory_order = memory_order_seq_cst ) volatile;
401 unsigned char fetch_xor( unsigned char,
402 memory_order = memory_order_seq_cst ) volatile;
404 CPP0X( atomic_uchar() = default; )
405 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
406 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
407 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
409 unsigned char operator =( unsigned char __v__ ) volatile
410 { store( __v__ ); return __v__; }
412 unsigned char operator ++( int ) volatile
413 { return fetch_add( 1 ); }
415 unsigned char operator --( int ) volatile
416 { return fetch_sub( 1 ); }
418 unsigned char operator ++() volatile
419 { return fetch_add( 1 ) + 1; }
421 unsigned char operator --() volatile
422 { return fetch_sub( 1 ) - 1; }
424 unsigned char operator +=( unsigned char __v__ ) volatile
425 { return fetch_add( __v__ ) + __v__; }
427 unsigned char operator -=( unsigned char __v__ ) volatile
428 { return fetch_sub( __v__ ) - __v__; }
430 unsigned char operator &=( unsigned char __v__ ) volatile
431 { return fetch_and( __v__ ) & __v__; }
433 unsigned char operator |=( unsigned char __v__ ) volatile
434 { return fetch_or( __v__ ) | __v__; }
436 unsigned char operator ^=( unsigned char __v__ ) volatile
437 { return fetch_xor( __v__ ) ^ __v__; }
439 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
441 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
443 friend unsigned char atomic_swap_explicit( volatile atomic_uchar*,
444 unsigned char, memory_order );
445 friend bool atomic_compare_swap_explicit( volatile atomic_uchar*,
446 unsigned char*, unsigned char, memory_order, memory_order );
447 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
448 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
449 unsigned char, memory_order );
450 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
451 unsigned char, memory_order );
452 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
453 unsigned char, memory_order );
454 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
455 unsigned char, memory_order );
456 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
457 unsigned char, memory_order );
465 typedef struct atomic_short
468 bool is_lock_free() const volatile;
470 memory_order = memory_order_seq_cst ) volatile;
471 short load( memory_order = memory_order_seq_cst ) volatile;
473 memory_order = memory_order_seq_cst ) volatile;
474 bool compare_swap( short&, short,
475 memory_order, memory_order ) volatile;
476 bool compare_swap( short&, short,
477 memory_order = memory_order_seq_cst ) volatile;
478 void fence( memory_order ) const volatile;
479 short fetch_add( short,
480 memory_order = memory_order_seq_cst ) volatile;
481 short fetch_sub( short,
482 memory_order = memory_order_seq_cst ) volatile;
483 short fetch_and( short,
484 memory_order = memory_order_seq_cst ) volatile;
485 short fetch_or( short,
486 memory_order = memory_order_seq_cst ) volatile;
487 short fetch_xor( short,
488 memory_order = memory_order_seq_cst ) volatile;
490 CPP0X( atomic_short() = default; )
491 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
492 CPP0X( atomic_short( const atomic_short& ) = delete; )
493 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
495 short operator =( short __v__ ) volatile
496 { store( __v__ ); return __v__; }
498 short operator ++( int ) volatile
499 { return fetch_add( 1 ); }
501 short operator --( int ) volatile
502 { return fetch_sub( 1 ); }
504 short operator ++() volatile
505 { return fetch_add( 1 ) + 1; }
507 short operator --() volatile
508 { return fetch_sub( 1 ) - 1; }
510 short operator +=( short __v__ ) volatile
511 { return fetch_add( __v__ ) + __v__; }
513 short operator -=( short __v__ ) volatile
514 { return fetch_sub( __v__ ) - __v__; }
516 short operator &=( short __v__ ) volatile
517 { return fetch_and( __v__ ) & __v__; }
519 short operator |=( short __v__ ) volatile
520 { return fetch_or( __v__ ) | __v__; }
522 short operator ^=( short __v__ ) volatile
523 { return fetch_xor( __v__ ) ^ __v__; }
525 friend void atomic_store_explicit( volatile atomic_short*, short,
527 friend short atomic_load_explicit( volatile atomic_short*,
529 friend short atomic_swap_explicit( volatile atomic_short*,
530 short, memory_order );
531 friend bool atomic_compare_swap_explicit( volatile atomic_short*,
532 short*, short, memory_order, memory_order );
533 friend void atomic_fence( const volatile atomic_short*, memory_order );
534 friend short atomic_fetch_add_explicit( volatile atomic_short*,
535 short, memory_order );
536 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
537 short, memory_order );
538 friend short atomic_fetch_and_explicit( volatile atomic_short*,
539 short, memory_order );
540 friend short atomic_fetch_or_explicit( volatile atomic_short*,
541 short, memory_order );
542 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
543 short, memory_order );
551 typedef struct atomic_ushort
554 bool is_lock_free() const volatile;
555 void store( unsigned short,
556 memory_order = memory_order_seq_cst ) volatile;
557 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
558 unsigned short swap( unsigned short,
559 memory_order = memory_order_seq_cst ) volatile;
560 bool compare_swap( unsigned short&, unsigned short,
561 memory_order, memory_order ) volatile;
562 bool compare_swap( unsigned short&, unsigned short,
563 memory_order = memory_order_seq_cst ) volatile;
564 void fence( memory_order ) const volatile;
565 unsigned short fetch_add( unsigned short,
566 memory_order = memory_order_seq_cst ) volatile;
567 unsigned short fetch_sub( unsigned short,
568 memory_order = memory_order_seq_cst ) volatile;
569 unsigned short fetch_and( unsigned short,
570 memory_order = memory_order_seq_cst ) volatile;
571 unsigned short fetch_or( unsigned short,
572 memory_order = memory_order_seq_cst ) volatile;
573 unsigned short fetch_xor( unsigned short,
574 memory_order = memory_order_seq_cst ) volatile;
576 CPP0X( atomic_ushort() = default; )
577 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
578 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
579 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
581 unsigned short operator =( unsigned short __v__ ) volatile
582 { store( __v__ ); return __v__; }
584 unsigned short operator ++( int ) volatile
585 { return fetch_add( 1 ); }
587 unsigned short operator --( int ) volatile
588 { return fetch_sub( 1 ); }
590 unsigned short operator ++() volatile
591 { return fetch_add( 1 ) + 1; }
593 unsigned short operator --() volatile
594 { return fetch_sub( 1 ) - 1; }
596 unsigned short operator +=( unsigned short __v__ ) volatile
597 { return fetch_add( __v__ ) + __v__; }
599 unsigned short operator -=( unsigned short __v__ ) volatile
600 { return fetch_sub( __v__ ) - __v__; }
602 unsigned short operator &=( unsigned short __v__ ) volatile
603 { return fetch_and( __v__ ) & __v__; }
605 unsigned short operator |=( unsigned short __v__ ) volatile
606 { return fetch_or( __v__ ) | __v__; }
608 unsigned short operator ^=( unsigned short __v__ ) volatile
609 { return fetch_xor( __v__ ) ^ __v__; }
611 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
613 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
615 friend unsigned short atomic_swap_explicit( volatile atomic_ushort*,
616 unsigned short, memory_order );
617 friend bool atomic_compare_swap_explicit( volatile atomic_ushort*,
618 unsigned short*, unsigned short, memory_order, memory_order );
619 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
620 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
621 unsigned short, memory_order );
622 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
623 unsigned short, memory_order );
624 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
625 unsigned short, memory_order );
626 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
627 unsigned short, memory_order );
628 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
629 unsigned short, memory_order );
633 unsigned short __f__;
637 typedef struct atomic_int
640 bool is_lock_free() const volatile;
642 memory_order = memory_order_seq_cst ) volatile;
643 int load( memory_order = memory_order_seq_cst ) volatile;
645 memory_order = memory_order_seq_cst ) volatile;
646 bool compare_swap( int&, int,
647 memory_order, memory_order ) volatile;
648 bool compare_swap( int&, int,
649 memory_order = memory_order_seq_cst ) volatile;
650 void fence( memory_order ) const volatile;
652 memory_order = memory_order_seq_cst ) volatile;
654 memory_order = memory_order_seq_cst ) volatile;
656 memory_order = memory_order_seq_cst ) volatile;
658 memory_order = memory_order_seq_cst ) volatile;
660 memory_order = memory_order_seq_cst ) volatile;
662 CPP0X( atomic_int() = default; )
663 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
664 CPP0X( atomic_int( const atomic_int& ) = delete; )
665 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
667 int operator =( int __v__ ) volatile
668 { store( __v__ ); return __v__; }
670 int operator ++( int ) volatile
671 { return fetch_add( 1 ); }
673 int operator --( int ) volatile
674 { return fetch_sub( 1 ); }
676 int operator ++() volatile
677 { return fetch_add( 1 ) + 1; }
679 int operator --() volatile
680 { return fetch_sub( 1 ) - 1; }
682 int operator +=( int __v__ ) volatile
683 { return fetch_add( __v__ ) + __v__; }
685 int operator -=( int __v__ ) volatile
686 { return fetch_sub( __v__ ) - __v__; }
688 int operator &=( int __v__ ) volatile
689 { return fetch_and( __v__ ) & __v__; }
691 int operator |=( int __v__ ) volatile
692 { return fetch_or( __v__ ) | __v__; }
694 int operator ^=( int __v__ ) volatile
695 { return fetch_xor( __v__ ) ^ __v__; }
697 friend void atomic_store_explicit( volatile atomic_int*, int,
699 friend int atomic_load_explicit( volatile atomic_int*,
701 friend int atomic_swap_explicit( volatile atomic_int*,
703 friend bool atomic_compare_swap_explicit( volatile atomic_int*,
704 int*, int, memory_order, memory_order );
705 friend void atomic_fence( const volatile atomic_int*, memory_order );
706 friend int atomic_fetch_add_explicit( volatile atomic_int*,
708 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
710 friend int atomic_fetch_and_explicit( volatile atomic_int*,
712 friend int atomic_fetch_or_explicit( volatile atomic_int*,
714 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
723 typedef struct atomic_uint
726 bool is_lock_free() const volatile;
727 void store( unsigned int,
728 memory_order = memory_order_seq_cst ) volatile;
729 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
730 unsigned int swap( unsigned int,
731 memory_order = memory_order_seq_cst ) volatile;
732 bool compare_swap( unsigned int&, unsigned int,
733 memory_order, memory_order ) volatile;
734 bool compare_swap( unsigned int&, unsigned int,
735 memory_order = memory_order_seq_cst ) volatile;
736 void fence( memory_order ) const volatile;
737 unsigned int fetch_add( unsigned int,
738 memory_order = memory_order_seq_cst ) volatile;
739 unsigned int fetch_sub( unsigned int,
740 memory_order = memory_order_seq_cst ) volatile;
741 unsigned int fetch_and( unsigned int,
742 memory_order = memory_order_seq_cst ) volatile;
743 unsigned int fetch_or( unsigned int,
744 memory_order = memory_order_seq_cst ) volatile;
745 unsigned int fetch_xor( unsigned int,
746 memory_order = memory_order_seq_cst ) volatile;
748 CPP0X( atomic_uint() = default; )
749 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
750 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
751 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
753 unsigned int operator =( unsigned int __v__ ) volatile
754 { store( __v__ ); return __v__; }
756 unsigned int operator ++( int ) volatile
757 { return fetch_add( 1 ); }
759 unsigned int operator --( int ) volatile
760 { return fetch_sub( 1 ); }
762 unsigned int operator ++() volatile
763 { return fetch_add( 1 ) + 1; }
765 unsigned int operator --() volatile
766 { return fetch_sub( 1 ) - 1; }
768 unsigned int operator +=( unsigned int __v__ ) volatile
769 { return fetch_add( __v__ ) + __v__; }
771 unsigned int operator -=( unsigned int __v__ ) volatile
772 { return fetch_sub( __v__ ) - __v__; }
774 unsigned int operator &=( unsigned int __v__ ) volatile
775 { return fetch_and( __v__ ) & __v__; }
777 unsigned int operator |=( unsigned int __v__ ) volatile
778 { return fetch_or( __v__ ) | __v__; }
780 unsigned int operator ^=( unsigned int __v__ ) volatile
781 { return fetch_xor( __v__ ) ^ __v__; }
783 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
785 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
787 friend unsigned int atomic_swap_explicit( volatile atomic_uint*,
788 unsigned int, memory_order );
789 friend bool atomic_compare_swap_explicit( volatile atomic_uint*,
790 unsigned int*, unsigned int, memory_order, memory_order );
791 friend void atomic_fence( const volatile atomic_uint*, memory_order );
792 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
793 unsigned int, memory_order );
794 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
795 unsigned int, memory_order );
796 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
797 unsigned int, memory_order );
798 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
799 unsigned int, memory_order );
800 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
801 unsigned int, memory_order );
809 typedef struct atomic_long
812 bool is_lock_free() const volatile;
814 memory_order = memory_order_seq_cst ) volatile;
815 long load( memory_order = memory_order_seq_cst ) volatile;
817 memory_order = memory_order_seq_cst ) volatile;
818 bool compare_swap( long&, long,
819 memory_order, memory_order ) volatile;
820 bool compare_swap( long&, long,
821 memory_order = memory_order_seq_cst ) volatile;
822 void fence( memory_order ) const volatile;
823 long fetch_add( long,
824 memory_order = memory_order_seq_cst ) volatile;
825 long fetch_sub( long,
826 memory_order = memory_order_seq_cst ) volatile;
827 long fetch_and( long,
828 memory_order = memory_order_seq_cst ) volatile;
830 memory_order = memory_order_seq_cst ) volatile;
831 long fetch_xor( long,
832 memory_order = memory_order_seq_cst ) volatile;
834 CPP0X( atomic_long() = default; )
835 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
836 CPP0X( atomic_long( const atomic_long& ) = delete; )
837 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
839 long operator =( long __v__ ) volatile
840 { store( __v__ ); return __v__; }
842 long operator ++( int ) volatile
843 { return fetch_add( 1 ); }
845 long operator --( int ) volatile
846 { return fetch_sub( 1 ); }
848 long operator ++() volatile
849 { return fetch_add( 1 ) + 1; }
851 long operator --() volatile
852 { return fetch_sub( 1 ) - 1; }
854 long operator +=( long __v__ ) volatile
855 { return fetch_add( __v__ ) + __v__; }
857 long operator -=( long __v__ ) volatile
858 { return fetch_sub( __v__ ) - __v__; }
860 long operator &=( long __v__ ) volatile
861 { return fetch_and( __v__ ) & __v__; }
863 long operator |=( long __v__ ) volatile
864 { return fetch_or( __v__ ) | __v__; }
866 long operator ^=( long __v__ ) volatile
867 { return fetch_xor( __v__ ) ^ __v__; }
869 friend void atomic_store_explicit( volatile atomic_long*, long,
871 friend long atomic_load_explicit( volatile atomic_long*,
873 friend long atomic_swap_explicit( volatile atomic_long*,
874 long, memory_order );
875 friend bool atomic_compare_swap_explicit( volatile atomic_long*,
876 long*, long, memory_order, memory_order );
877 friend void atomic_fence( const volatile atomic_long*, memory_order );
878 friend long atomic_fetch_add_explicit( volatile atomic_long*,
879 long, memory_order );
880 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
881 long, memory_order );
882 friend long atomic_fetch_and_explicit( volatile atomic_long*,
883 long, memory_order );
884 friend long atomic_fetch_or_explicit( volatile atomic_long*,
885 long, memory_order );
886 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
887 long, memory_order );
895 typedef struct atomic_ulong
898 bool is_lock_free() const volatile;
899 void store( unsigned long,
900 memory_order = memory_order_seq_cst ) volatile;
901 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
902 unsigned long swap( unsigned long,
903 memory_order = memory_order_seq_cst ) volatile;
904 bool compare_swap( unsigned long&, unsigned long,
905 memory_order, memory_order ) volatile;
906 bool compare_swap( unsigned long&, unsigned long,
907 memory_order = memory_order_seq_cst ) volatile;
908 void fence( memory_order ) const volatile;
909 unsigned long fetch_add( unsigned long,
910 memory_order = memory_order_seq_cst ) volatile;
911 unsigned long fetch_sub( unsigned long,
912 memory_order = memory_order_seq_cst ) volatile;
913 unsigned long fetch_and( unsigned long,
914 memory_order = memory_order_seq_cst ) volatile;
915 unsigned long fetch_or( unsigned long,
916 memory_order = memory_order_seq_cst ) volatile;
917 unsigned long fetch_xor( unsigned long,
918 memory_order = memory_order_seq_cst ) volatile;
920 CPP0X( atomic_ulong() = default; )
921 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
922 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
923 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
925 unsigned long operator =( unsigned long __v__ ) volatile
926 { store( __v__ ); return __v__; }
928 unsigned long operator ++( int ) volatile
929 { return fetch_add( 1 ); }
931 unsigned long operator --( int ) volatile
932 { return fetch_sub( 1 ); }
934 unsigned long operator ++() volatile
935 { return fetch_add( 1 ) + 1; }
937 unsigned long operator --() volatile
938 { return fetch_sub( 1 ) - 1; }
940 unsigned long operator +=( unsigned long __v__ ) volatile
941 { return fetch_add( __v__ ) + __v__; }
943 unsigned long operator -=( unsigned long __v__ ) volatile
944 { return fetch_sub( __v__ ) - __v__; }
946 unsigned long operator &=( unsigned long __v__ ) volatile
947 { return fetch_and( __v__ ) & __v__; }
949 unsigned long operator |=( unsigned long __v__ ) volatile
950 { return fetch_or( __v__ ) | __v__; }
952 unsigned long operator ^=( unsigned long __v__ ) volatile
953 { return fetch_xor( __v__ ) ^ __v__; }
955 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
957 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
959 friend unsigned long atomic_swap_explicit( volatile atomic_ulong*,
960 unsigned long, memory_order );
961 friend bool atomic_compare_swap_explicit( volatile atomic_ulong*,
962 unsigned long*, unsigned long, memory_order, memory_order );
963 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
964 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
965 unsigned long, memory_order );
966 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
967 unsigned long, memory_order );
968 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
969 unsigned long, memory_order );
970 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
971 unsigned long, memory_order );
972 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
973 unsigned long, memory_order );
981 typedef struct atomic_llong
984 bool is_lock_free() const volatile;
985 void store( long long,
986 memory_order = memory_order_seq_cst ) volatile;
987 long long load( memory_order = memory_order_seq_cst ) volatile;
988 long long swap( long long,
989 memory_order = memory_order_seq_cst ) volatile;
990 bool compare_swap( long long&, long long,
991 memory_order, memory_order ) volatile;
992 bool compare_swap( long long&, long long,
993 memory_order = memory_order_seq_cst ) volatile;
994 void fence( memory_order ) const volatile;
995 long long fetch_add( long long,
996 memory_order = memory_order_seq_cst ) volatile;
997 long long fetch_sub( long long,
998 memory_order = memory_order_seq_cst ) volatile;
999 long long fetch_and( long long,
1000 memory_order = memory_order_seq_cst ) volatile;
1001 long long fetch_or( long long,
1002 memory_order = memory_order_seq_cst ) volatile;
1003 long long fetch_xor( long long,
1004 memory_order = memory_order_seq_cst ) volatile;
1006 CPP0X( atomic_llong() = default; )
1007 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1008 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1009 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1011 long long operator =( long long __v__ ) volatile
1012 { store( __v__ ); return __v__; }
1014 long long operator ++( int ) volatile
1015 { return fetch_add( 1 ); }
1017 long long operator --( int ) volatile
1018 { return fetch_sub( 1 ); }
1020 long long operator ++() volatile
1021 { return fetch_add( 1 ) + 1; }
1023 long long operator --() volatile
1024 { return fetch_sub( 1 ) - 1; }
1026 long long operator +=( long long __v__ ) volatile
1027 { return fetch_add( __v__ ) + __v__; }
1029 long long operator -=( long long __v__ ) volatile
1030 { return fetch_sub( __v__ ) - __v__; }
1032 long long operator &=( long long __v__ ) volatile
1033 { return fetch_and( __v__ ) & __v__; }
1035 long long operator |=( long long __v__ ) volatile
1036 { return fetch_or( __v__ ) | __v__; }
1038 long long operator ^=( long long __v__ ) volatile
1039 { return fetch_xor( __v__ ) ^ __v__; }
1041 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1043 friend long long atomic_load_explicit( volatile atomic_llong*,
1045 friend long long atomic_swap_explicit( volatile atomic_llong*,
1046 long long, memory_order );
1047 friend bool atomic_compare_swap_explicit( volatile atomic_llong*,
1048 long long*, long long, memory_order, memory_order );
1049 friend void atomic_fence( const volatile atomic_llong*, memory_order );
1050 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1051 long long, memory_order );
1052 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1053 long long, memory_order );
1054 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1055 long long, memory_order );
1056 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1057 long long, memory_order );
1058 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1059 long long, memory_order );
1067 typedef struct atomic_ullong
1070 bool is_lock_free() const volatile;
1071 void store( unsigned long long,
1072 memory_order = memory_order_seq_cst ) volatile;
1073 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1074 unsigned long long swap( unsigned long long,
1075 memory_order = memory_order_seq_cst ) volatile;
1076 bool compare_swap( unsigned long long&, unsigned long long,
1077 memory_order, memory_order ) volatile;
1078 bool compare_swap( unsigned long long&, unsigned long long,
1079 memory_order = memory_order_seq_cst ) volatile;
1080 void fence( memory_order ) const volatile;
1081 unsigned long long fetch_add( unsigned long long,
1082 memory_order = memory_order_seq_cst ) volatile;
1083 unsigned long long fetch_sub( unsigned long long,
1084 memory_order = memory_order_seq_cst ) volatile;
1085 unsigned long long fetch_and( unsigned long long,
1086 memory_order = memory_order_seq_cst ) volatile;
1087 unsigned long long fetch_or( unsigned long long,
1088 memory_order = memory_order_seq_cst ) volatile;
1089 unsigned long long fetch_xor( unsigned long long,
1090 memory_order = memory_order_seq_cst ) volatile;
1092 CPP0X( atomic_ullong() = default; )
1093 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1094 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1095 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1097 unsigned long long operator =( unsigned long long __v__ ) volatile
1098 { store( __v__ ); return __v__; }
1100 unsigned long long operator ++( int ) volatile
1101 { return fetch_add( 1 ); }
1103 unsigned long long operator --( int ) volatile
1104 { return fetch_sub( 1 ); }
1106 unsigned long long operator ++() volatile
1107 { return fetch_add( 1 ) + 1; }
1109 unsigned long long operator --() volatile
1110 { return fetch_sub( 1 ) - 1; }
1112 unsigned long long operator +=( unsigned long long __v__ ) volatile
1113 { return fetch_add( __v__ ) + __v__; }
1115 unsigned long long operator -=( unsigned long long __v__ ) volatile
1116 { return fetch_sub( __v__ ) - __v__; }
1118 unsigned long long operator &=( unsigned long long __v__ ) volatile
1119 { return fetch_and( __v__ ) & __v__; }
1121 unsigned long long operator |=( unsigned long long __v__ ) volatile
1122 { return fetch_or( __v__ ) | __v__; }
1124 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1125 { return fetch_xor( __v__ ) ^ __v__; }
1127 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1129 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1131 friend unsigned long long atomic_swap_explicit( volatile atomic_ullong*,
1132 unsigned long long, memory_order );
1133 friend bool atomic_compare_swap_explicit( volatile atomic_ullong*,
1134 unsigned long long*, unsigned long long, memory_order, memory_order );
1135 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
1136 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1137 unsigned long long, memory_order );
1138 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1139 unsigned long long, memory_order );
1140 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1141 unsigned long long, memory_order );
1142 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1143 unsigned long long, memory_order );
1144 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1145 unsigned long long, memory_order );
1149 unsigned long long __f__;
1153 typedef atomic_schar atomic_int_least8_t;
1154 typedef atomic_uchar atomic_uint_least8_t;
1155 typedef atomic_short atomic_int_least16_t;
1156 typedef atomic_ushort atomic_uint_least16_t;
1157 typedef atomic_int atomic_int_least32_t;
1158 typedef atomic_uint atomic_uint_least32_t;
1159 typedef atomic_llong atomic_int_least64_t;
1160 typedef atomic_ullong atomic_uint_least64_t;
1162 typedef atomic_schar atomic_int_fast8_t;
1163 typedef atomic_uchar atomic_uint_fast8_t;
1164 typedef atomic_short atomic_int_fast16_t;
1165 typedef atomic_ushort atomic_uint_fast16_t;
1166 typedef atomic_int atomic_int_fast32_t;
1167 typedef atomic_uint atomic_uint_fast32_t;
1168 typedef atomic_llong atomic_int_fast64_t;
1169 typedef atomic_ullong atomic_uint_fast64_t;
1171 typedef atomic_long atomic_intptr_t;
1172 typedef atomic_ulong atomic_uintptr_t;
1174 typedef atomic_long atomic_ssize_t;
1175 typedef atomic_ulong atomic_size_t;
1177 typedef atomic_long atomic_ptrdiff_t;
1179 typedef atomic_llong atomic_intmax_t;
1180 typedef atomic_ullong atomic_uintmax_t;
1186 typedef struct atomic_wchar_t
1189 bool is_lock_free() const volatile;
1190 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1191 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1192 wchar_t swap( wchar_t,
1193 memory_order = memory_order_seq_cst ) volatile;
1194 bool compare_swap( wchar_t&, wchar_t,
1195 memory_order, memory_order ) volatile;
1196 bool compare_swap( wchar_t&, wchar_t,
1197 memory_order = memory_order_seq_cst ) volatile;
1198 void fence( memory_order ) const volatile;
1199 wchar_t fetch_add( wchar_t,
1200 memory_order = memory_order_seq_cst ) volatile;
1201 wchar_t fetch_sub( wchar_t,
1202 memory_order = memory_order_seq_cst ) volatile;
1203 wchar_t fetch_and( wchar_t,
1204 memory_order = memory_order_seq_cst ) volatile;
1205 wchar_t fetch_or( wchar_t,
1206 memory_order = memory_order_seq_cst ) volatile;
1207 wchar_t fetch_xor( wchar_t,
1208 memory_order = memory_order_seq_cst ) volatile;
1210 CPP0X( atomic_wchar_t() = default; )
1211 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1212 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1213 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1215 wchar_t operator =( wchar_t __v__ ) volatile
1216 { store( __v__ ); return __v__; }
1218 wchar_t operator ++( int ) volatile
1219 { return fetch_add( 1 ); }
1221 wchar_t operator --( int ) volatile
1222 { return fetch_sub( 1 ); }
1224 wchar_t operator ++() volatile
1225 { return fetch_add( 1 ) + 1; }
1227 wchar_t operator --() volatile
1228 { return fetch_sub( 1 ) - 1; }
1230 wchar_t operator +=( wchar_t __v__ ) volatile
1231 { return fetch_add( __v__ ) + __v__; }
1233 wchar_t operator -=( wchar_t __v__ ) volatile
1234 { return fetch_sub( __v__ ) - __v__; }
1236 wchar_t operator &=( wchar_t __v__ ) volatile
1237 { return fetch_and( __v__ ) & __v__; }
1239 wchar_t operator |=( wchar_t __v__ ) volatile
1240 { return fetch_or( __v__ ) | __v__; }
1242 wchar_t operator ^=( wchar_t __v__ ) volatile
1243 { return fetch_xor( __v__ ) ^ __v__; }
1245 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1247 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1249 friend wchar_t atomic_swap_explicit( volatile atomic_wchar_t*,
1250 wchar_t, memory_order );
1251 friend bool atomic_compare_swap_explicit( volatile atomic_wchar_t*,
1252 wchar_t*, wchar_t, memory_order, memory_order );
1253 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
1254 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1255 wchar_t, memory_order );
1256 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1257 wchar_t, memory_order );
1258 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1259 wchar_t, memory_order );
1260 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1261 wchar_t, memory_order );
1262 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1263 wchar_t, memory_order );
1273 typedef atomic_int_least16_t atomic_char16_t;
1274 typedef atomic_int_least32_t atomic_char32_t;
1275 typedef atomic_int_least32_t atomic_wchar_t;
1282 template< typename T >
1287 bool is_lock_free() const volatile;
1288 void store( T, memory_order = memory_order_seq_cst ) volatile;
1289 T load( memory_order = memory_order_seq_cst ) volatile;
1290 T swap( T __v__, memory_order = memory_order_seq_cst ) volatile;
1291 bool compare_swap( T&, T, memory_order, memory_order ) volatile;
1292 bool compare_swap( T&, T, memory_order = memory_order_seq_cst ) volatile;
1293 void fence( memory_order ) const volatile;
1295 CPP0X( atomic() = default; )
1296 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1297 CPP0X( atomic( const atomic& ) = delete; )
1298 atomic& operator =( const atomic& ) CPP0X(=delete);
1300 T operator =( T __v__ ) volatile
1301 { store( __v__ ); return __v__; }
1312 template<typename T> struct atomic< T* > : atomic_address
1314 T* load( memory_order = memory_order_seq_cst ) volatile;
1315 T* swap( T*, memory_order = memory_order_seq_cst ) volatile;
1316 bool compare_swap( T*&, T*, memory_order, memory_order ) volatile;
1317 bool compare_swap( T*&, T*,
1318 memory_order = memory_order_seq_cst ) volatile;
1319 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1320 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1322 CPP0X( atomic() = default; )
1323 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1324 CPP0X( atomic( const atomic& ) = delete; )
1325 atomic& operator =( const atomic& ) CPP0X(=delete);
1327 T* operator =( T* __v__ ) volatile
1328 { store( __v__ ); return __v__; }
1330 T* operator ++( int ) volatile
1331 { return fetch_add( 1 ); }
1333 T* operator --( int ) volatile
1334 { return fetch_sub( 1 ); }
1336 T* operator ++() volatile
1337 { return fetch_add( 1 ) + 1; }
1339 T* operator --() volatile
1340 { return fetch_sub( 1 ) - 1; }
1342 T* operator +=( T* __v__ ) volatile
1343 { return fetch_add( __v__ ) + __v__; }
1345 T* operator -=( T* __v__ ) volatile
1346 { return fetch_sub( __v__ ) - __v__; }
1354 template<> struct atomic< bool > : atomic_bool
1356 CPP0X( atomic() = default; )
1357 CPP0X( constexpr explicit atomic( bool __v__ )
1358 : atomic_bool( __v__ ) { } )
1359 CPP0X( atomic( const atomic& ) = delete; )
1360 atomic& operator =( const atomic& ) CPP0X(=delete);
1362 bool operator =( bool __v__ ) volatile
1363 { store( __v__ ); return __v__; }
1367 template<> struct atomic< void* > : atomic_address
1369 CPP0X( atomic() = default; )
1370 CPP0X( constexpr explicit atomic( void* __v__ )
1371 : atomic_address( __v__ ) { } )
1372 CPP0X( atomic( const atomic& ) = delete; )
1373 atomic& operator =( const atomic& ) CPP0X(=delete);
1375 void* operator =( void* __v__ ) volatile
1376 { store( __v__ ); return __v__; }
1380 template<> struct atomic< char > : atomic_char
1382 CPP0X( atomic() = default; )
1383 CPP0X( constexpr explicit atomic( char __v__ )
1384 : atomic_char( __v__ ) { } )
1385 CPP0X( atomic( const atomic& ) = delete; )
1386 atomic& operator =( const atomic& ) CPP0X(=delete);
1388 char operator =( char __v__ ) volatile
1389 { store( __v__ ); return __v__; }
1393 template<> struct atomic< signed char > : atomic_schar
1395 CPP0X( atomic() = default; )
1396 CPP0X( constexpr explicit atomic( signed char __v__ )
1397 : atomic_schar( __v__ ) { } )
1398 CPP0X( atomic( const atomic& ) = delete; )
1399 atomic& operator =( const atomic& ) CPP0X(=delete);
1401 signed char operator =( signed char __v__ ) volatile
1402 { store( __v__ ); return __v__; }
1406 template<> struct atomic< unsigned char > : atomic_uchar
1408 CPP0X( atomic() = default; )
1409 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1410 : atomic_uchar( __v__ ) { } )
1411 CPP0X( atomic( const atomic& ) = delete; )
1412 atomic& operator =( const atomic& ) CPP0X(=delete);
1414 unsigned char operator =( unsigned char __v__ ) volatile
1415 { store( __v__ ); return __v__; }
1419 template<> struct atomic< short > : atomic_short
1421 CPP0X( atomic() = default; )
1422 CPP0X( constexpr explicit atomic( short __v__ )
1423 : atomic_short( __v__ ) { } )
1424 CPP0X( atomic( const atomic& ) = delete; )
1425 atomic& operator =( const atomic& ) CPP0X(=delete);
1427 short operator =( short __v__ ) volatile
1428 { store( __v__ ); return __v__; }
1432 template<> struct atomic< unsigned short > : atomic_ushort
1434 CPP0X( atomic() = default; )
1435 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1436 : atomic_ushort( __v__ ) { } )
1437 CPP0X( atomic( const atomic& ) = delete; )
1438 atomic& operator =( const atomic& ) CPP0X(=delete);
1440 unsigned short operator =( unsigned short __v__ ) volatile
1441 { store( __v__ ); return __v__; }
1445 template<> struct atomic< int > : atomic_int
1447 CPP0X( atomic() = default; )
1448 CPP0X( constexpr explicit atomic( int __v__ )
1449 : atomic_int( __v__ ) { } )
1450 CPP0X( atomic( const atomic& ) = delete; )
1451 atomic& operator =( const atomic& ) CPP0X(=delete);
1453 int operator =( int __v__ ) volatile
1454 { store( __v__ ); return __v__; }
1458 template<> struct atomic< unsigned int > : atomic_uint
1460 CPP0X( atomic() = default; )
1461 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1462 : atomic_uint( __v__ ) { } )
1463 CPP0X( atomic( const atomic& ) = delete; )
1464 atomic& operator =( const atomic& ) CPP0X(=delete);
1466 unsigned int operator =( unsigned int __v__ ) volatile
1467 { store( __v__ ); return __v__; }
1471 template<> struct atomic< long > : atomic_long
1473 CPP0X( atomic() = default; )
1474 CPP0X( constexpr explicit atomic( long __v__ )
1475 : atomic_long( __v__ ) { } )
1476 CPP0X( atomic( const atomic& ) = delete; )
1477 atomic& operator =( const atomic& ) CPP0X(=delete);
1479 long operator =( long __v__ ) volatile
1480 { store( __v__ ); return __v__; }
1484 template<> struct atomic< unsigned long > : atomic_ulong
1486 CPP0X( atomic() = default; )
1487 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1488 : atomic_ulong( __v__ ) { } )
1489 CPP0X( atomic( const atomic& ) = delete; )
1490 atomic& operator =( const atomic& ) CPP0X(=delete);
1492 unsigned long operator =( unsigned long __v__ ) volatile
1493 { store( __v__ ); return __v__; }
1497 template<> struct atomic< long long > : atomic_llong
1499 CPP0X( atomic() = default; )
1500 CPP0X( constexpr explicit atomic( long long __v__ )
1501 : atomic_llong( __v__ ) { } )
1502 CPP0X( atomic( const atomic& ) = delete; )
1503 atomic& operator =( const atomic& ) CPP0X(=delete);
1505 long long operator =( long long __v__ ) volatile
1506 { store( __v__ ); return __v__; }
1510 template<> struct atomic< unsigned long long > : atomic_ullong
1512 CPP0X( atomic() = default; )
1513 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1514 : atomic_ullong( __v__ ) { } )
1515 CPP0X( atomic( const atomic& ) = delete; )
1516 atomic& operator =( const atomic& ) CPP0X(=delete);
1518 unsigned long long operator =( unsigned long long __v__ ) volatile
1519 { store( __v__ ); return __v__; }
1523 template<> struct atomic< wchar_t > : atomic_wchar_t
1525 CPP0X( atomic() = default; )
1526 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1527 : atomic_wchar_t( __v__ ) { } )
1528 CPP0X( atomic( const atomic& ) = delete; )
1529 atomic& operator =( const atomic& ) CPP0X(=delete);
1531 wchar_t operator =( wchar_t __v__ ) volatile
1532 { store( __v__ ); return __v__; }
1542 inline bool atomic_is_lock_free( const volatile atomic_bool* __a__ )
1545 inline bool atomic_load_explicit
1546 ( volatile atomic_bool* __a__, memory_order __x__ )
1547 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1549 inline bool atomic_load( volatile atomic_bool* __a__ )
1550 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1552 inline void atomic_store_explicit
1553 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1554 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1556 inline void atomic_store
1557 ( volatile atomic_bool* __a__, bool __m__ )
1558 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1560 inline bool atomic_swap_explicit
1561 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1562 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1564 inline bool atomic_swap
1565 ( volatile atomic_bool* __a__, bool __m__ )
1566 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1568 inline bool atomic_compare_swap_explicit
1569 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1570 memory_order __x__, memory_order __y__ )
1571 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1573 inline bool atomic_compare_swap
1574 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1575 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1576 memory_order_seq_cst, memory_order_seq_cst ); }
1578 inline void atomic_fence
1579 ( const volatile atomic_bool* __a__, memory_order __x__ )
1580 { _ATOMIC_FENCE_( __a__, __x__ ); }
1583 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1586 inline void* atomic_load_explicit
1587 ( volatile atomic_address* __a__, memory_order __x__ )
1588 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1590 inline void* atomic_load( volatile atomic_address* __a__ )
1591 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1593 inline void atomic_store_explicit
1594 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1595 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1597 inline void atomic_store
1598 ( volatile atomic_address* __a__, void* __m__ )
1599 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1601 inline void* atomic_swap_explicit
1602 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1603 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1605 inline void* atomic_swap
1606 ( volatile atomic_address* __a__, void* __m__ )
1607 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1609 inline bool atomic_compare_swap_explicit
1610 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1611 memory_order __x__, memory_order __y__ )
1612 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1614 inline bool atomic_compare_swap
1615 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1616 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1617 memory_order_seq_cst, memory_order_seq_cst ); }
1619 inline void atomic_fence
1620 ( const volatile atomic_address* __a__, memory_order __x__ )
1621 { _ATOMIC_FENCE_( __a__, __x__ ); }
1624 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1627 inline char atomic_load_explicit
1628 ( volatile atomic_char* __a__, memory_order __x__ )
1629 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1631 inline char atomic_load( volatile atomic_char* __a__ )
1632 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1634 inline void atomic_store_explicit
1635 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1636 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1638 inline void atomic_store
1639 ( volatile atomic_char* __a__, char __m__ )
1640 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1642 inline char atomic_swap_explicit
1643 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1644 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1646 inline char atomic_swap
1647 ( volatile atomic_char* __a__, char __m__ )
1648 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1650 inline bool atomic_compare_swap_explicit
1651 ( volatile atomic_char* __a__, char* __e__, char __m__,
1652 memory_order __x__, memory_order __y__ )
1653 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1655 inline bool atomic_compare_swap
1656 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1657 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1658 memory_order_seq_cst, memory_order_seq_cst ); }
1660 inline void atomic_fence
1661 ( const volatile atomic_char* __a__, memory_order __x__ )
1662 { _ATOMIC_FENCE_( __a__, __x__ ); }
1665 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1668 inline signed char atomic_load_explicit
1669 ( volatile atomic_schar* __a__, memory_order __x__ )
1670 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1672 inline signed char atomic_load( volatile atomic_schar* __a__ )
1673 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1675 inline void atomic_store_explicit
1676 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1677 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1679 inline void atomic_store
1680 ( volatile atomic_schar* __a__, signed char __m__ )
1681 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1683 inline signed char atomic_swap_explicit
1684 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1685 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1687 inline signed char atomic_swap
1688 ( volatile atomic_schar* __a__, signed char __m__ )
1689 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1691 inline bool atomic_compare_swap_explicit
1692 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1693 memory_order __x__, memory_order __y__ )
1694 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1696 inline bool atomic_compare_swap
1697 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1698 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1699 memory_order_seq_cst, memory_order_seq_cst ); }
1701 inline void atomic_fence
1702 ( const volatile atomic_schar* __a__, memory_order __x__ )
1703 { _ATOMIC_FENCE_( __a__, __x__ ); }
1706 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1709 inline unsigned char atomic_load_explicit
1710 ( volatile atomic_uchar* __a__, memory_order __x__ )
1711 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1713 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1714 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1716 inline void atomic_store_explicit
1717 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1718 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1720 inline void atomic_store
1721 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1722 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1724 inline unsigned char atomic_swap_explicit
1725 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1726 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1728 inline unsigned char atomic_swap
1729 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1730 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1732 inline bool atomic_compare_swap_explicit
1733 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1734 memory_order __x__, memory_order __y__ )
1735 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1737 inline bool atomic_compare_swap
1738 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1739 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1740 memory_order_seq_cst, memory_order_seq_cst ); }
1742 inline void atomic_fence
1743 ( const volatile atomic_uchar* __a__, memory_order __x__ )
1744 { _ATOMIC_FENCE_( __a__, __x__ ); }
1747 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1750 inline short atomic_load_explicit
1751 ( volatile atomic_short* __a__, memory_order __x__ )
1752 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1754 inline short atomic_load( volatile atomic_short* __a__ )
1755 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1757 inline void atomic_store_explicit
1758 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1759 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1761 inline void atomic_store
1762 ( volatile atomic_short* __a__, short __m__ )
1763 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1765 inline short atomic_swap_explicit
1766 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1767 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1769 inline short atomic_swap
1770 ( volatile atomic_short* __a__, short __m__ )
1771 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1773 inline bool atomic_compare_swap_explicit
1774 ( volatile atomic_short* __a__, short* __e__, short __m__,
1775 memory_order __x__, memory_order __y__ )
1776 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1778 inline bool atomic_compare_swap
1779 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1780 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1781 memory_order_seq_cst, memory_order_seq_cst ); }
1783 inline void atomic_fence
1784 ( const volatile atomic_short* __a__, memory_order __x__ )
1785 { _ATOMIC_FENCE_( __a__, __x__ ); }
1788 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1791 inline unsigned short atomic_load_explicit
1792 ( volatile atomic_ushort* __a__, memory_order __x__ )
1793 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1795 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1796 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1798 inline void atomic_store_explicit
1799 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1800 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1802 inline void atomic_store
1803 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1804 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1806 inline unsigned short atomic_swap_explicit
1807 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1808 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1810 inline unsigned short atomic_swap
1811 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1812 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1814 inline bool atomic_compare_swap_explicit
1815 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1816 memory_order __x__, memory_order __y__ )
1817 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1819 inline bool atomic_compare_swap
1820 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1821 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1822 memory_order_seq_cst, memory_order_seq_cst ); }
1824 inline void atomic_fence
1825 ( const volatile atomic_ushort* __a__, memory_order __x__ )
1826 { _ATOMIC_FENCE_( __a__, __x__ ); }
1829 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1832 inline int atomic_load_explicit
1833 ( volatile atomic_int* __a__, memory_order __x__ )
1834 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1836 inline int atomic_load( volatile atomic_int* __a__ )
1837 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1839 inline void atomic_store_explicit
1840 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1841 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1843 inline void atomic_store
1844 ( volatile atomic_int* __a__, int __m__ )
1845 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1847 inline int atomic_swap_explicit
1848 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1849 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1851 inline int atomic_swap
1852 ( volatile atomic_int* __a__, int __m__ )
1853 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1855 inline bool atomic_compare_swap_explicit
1856 ( volatile atomic_int* __a__, int* __e__, int __m__,
1857 memory_order __x__, memory_order __y__ )
1858 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1860 inline bool atomic_compare_swap
1861 ( volatile atomic_int* __a__, int* __e__, int __m__ )
1862 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1863 memory_order_seq_cst, memory_order_seq_cst ); }
1865 inline void atomic_fence
1866 ( const volatile atomic_int* __a__, memory_order __x__ )
1867 { _ATOMIC_FENCE_( __a__, __x__ ); }
1870 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
1873 inline unsigned int atomic_load_explicit
1874 ( volatile atomic_uint* __a__, memory_order __x__ )
1875 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1877 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
1878 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1880 inline void atomic_store_explicit
1881 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1882 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1884 inline void atomic_store
1885 ( volatile atomic_uint* __a__, unsigned int __m__ )
1886 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1888 inline unsigned int atomic_swap_explicit
1889 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1890 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1892 inline unsigned int atomic_swap
1893 ( volatile atomic_uint* __a__, unsigned int __m__ )
1894 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1896 inline bool atomic_compare_swap_explicit
1897 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
1898 memory_order __x__, memory_order __y__ )
1899 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1901 inline bool atomic_compare_swap
1902 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
1903 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1904 memory_order_seq_cst, memory_order_seq_cst ); }
1906 inline void atomic_fence
1907 ( const volatile atomic_uint* __a__, memory_order __x__ )
1908 { _ATOMIC_FENCE_( __a__, __x__ ); }
1911 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
1914 inline long atomic_load_explicit
1915 ( volatile atomic_long* __a__, memory_order __x__ )
1916 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1918 inline long atomic_load( volatile atomic_long* __a__ )
1919 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1921 inline void atomic_store_explicit
1922 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1923 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1925 inline void atomic_store
1926 ( volatile atomic_long* __a__, long __m__ )
1927 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1929 inline long atomic_swap_explicit
1930 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1931 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1933 inline long atomic_swap
1934 ( volatile atomic_long* __a__, long __m__ )
1935 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1937 inline bool atomic_compare_swap_explicit
1938 ( volatile atomic_long* __a__, long* __e__, long __m__,
1939 memory_order __x__, memory_order __y__ )
1940 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1942 inline bool atomic_compare_swap
1943 ( volatile atomic_long* __a__, long* __e__, long __m__ )
1944 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1945 memory_order_seq_cst, memory_order_seq_cst ); }
1947 inline void atomic_fence
1948 ( const volatile atomic_long* __a__, memory_order __x__ )
1949 { _ATOMIC_FENCE_( __a__, __x__ ); }
1952 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
1955 inline unsigned long atomic_load_explicit
1956 ( volatile atomic_ulong* __a__, memory_order __x__ )
1957 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1959 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
1960 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1962 inline void atomic_store_explicit
1963 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1964 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1966 inline void atomic_store
1967 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1968 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1970 inline unsigned long atomic_swap_explicit
1971 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1972 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1974 inline unsigned long atomic_swap
1975 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1976 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1978 inline bool atomic_compare_swap_explicit
1979 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
1980 memory_order __x__, memory_order __y__ )
1981 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1983 inline bool atomic_compare_swap
1984 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
1985 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1986 memory_order_seq_cst, memory_order_seq_cst ); }
1988 inline void atomic_fence
1989 ( const volatile atomic_ulong* __a__, memory_order __x__ )
1990 { _ATOMIC_FENCE_( __a__, __x__ ); }
1993 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
1996 inline long long atomic_load_explicit
1997 ( volatile atomic_llong* __a__, memory_order __x__ )
1998 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2000 inline long long atomic_load( volatile atomic_llong* __a__ )
2001 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2003 inline void atomic_store_explicit
2004 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2005 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2007 inline void atomic_store
2008 ( volatile atomic_llong* __a__, long long __m__ )
2009 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2011 inline long long atomic_swap_explicit
2012 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2013 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2015 inline long long atomic_swap
2016 ( volatile atomic_llong* __a__, long long __m__ )
2017 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2019 inline bool atomic_compare_swap_explicit
2020 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2021 memory_order __x__, memory_order __y__ )
2022 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2024 inline bool atomic_compare_swap
2025 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2026 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2027 memory_order_seq_cst, memory_order_seq_cst ); }
2029 inline void atomic_fence
2030 ( const volatile atomic_llong* __a__, memory_order __x__ )
2031 { _ATOMIC_FENCE_( __a__, __x__ ); }
2034 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2037 inline unsigned long long atomic_load_explicit
2038 ( volatile atomic_ullong* __a__, memory_order __x__ )
2039 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2041 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2042 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2044 inline void atomic_store_explicit
2045 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2046 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2048 inline void atomic_store
2049 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2050 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2052 inline unsigned long long atomic_swap_explicit
2053 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2054 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2056 inline unsigned long long atomic_swap
2057 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2058 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2060 inline bool atomic_compare_swap_explicit
2061 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2062 memory_order __x__, memory_order __y__ )
2063 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2065 inline bool atomic_compare_swap
2066 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2067 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2068 memory_order_seq_cst, memory_order_seq_cst ); }
2070 inline void atomic_fence
2071 ( const volatile atomic_ullong* __a__, memory_order __x__ )
2072 { _ATOMIC_FENCE_( __a__, __x__ ); }
2075 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2078 inline wchar_t atomic_load_explicit
2079 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2080 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2082 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2083 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2085 inline void atomic_store_explicit
2086 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2087 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2089 inline void atomic_store
2090 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2091 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2093 inline wchar_t atomic_swap_explicit
2094 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2095 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2097 inline wchar_t atomic_swap
2098 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2099 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2101 inline bool atomic_compare_swap_explicit
2102 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2103 memory_order __x__, memory_order __y__ )
2104 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2106 inline bool atomic_compare_swap
2107 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2108 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2109 memory_order_seq_cst, memory_order_seq_cst ); }
2111 inline void atomic_fence
2112 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
2113 { _ATOMIC_FENCE_( __a__, __x__ ); }
2116 inline void* atomic_fetch_add_explicit
2117 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2118 { void* volatile* __p__ = &((__a__)->__f__);
2119 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ );
2120 __atomic_flag_wait_explicit__( __g__, __x__ );
2121 void* __r__ = *__p__;
2122 *__p__ = (void*)((char*)(*__p__) + __m__);
2123 atomic_flag_clear_explicit( __g__, __x__ );
2126 inline void* atomic_fetch_add
2127 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2128 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2131 inline void* atomic_fetch_sub_explicit
2132 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2133 { void* volatile* __p__ = &((__a__)->__f__);
2134 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ );
2135 __atomic_flag_wait_explicit__( __g__, __x__ );
2136 void* __r__ = *__p__;
2137 *__p__ = (void*)((char*)(*__p__) - __m__);
2138 atomic_flag_clear_explicit( __g__, __x__ );
2141 inline void* atomic_fetch_sub
2142 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2143 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2146 inline char atomic_fetch_add_explicit
2147 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2148 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2150 inline char atomic_fetch_add
2151 ( volatile atomic_char* __a__, char __m__ )
2152 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2155 inline char atomic_fetch_sub_explicit
2156 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2157 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2159 inline char atomic_fetch_sub
2160 ( volatile atomic_char* __a__, char __m__ )
2161 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2164 inline char atomic_fetch_and_explicit
2165 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2166 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2168 inline char atomic_fetch_and
2169 ( volatile atomic_char* __a__, char __m__ )
2170 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2173 inline char atomic_fetch_or_explicit
2174 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2175 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2177 inline char atomic_fetch_or
2178 ( volatile atomic_char* __a__, char __m__ )
2179 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2182 inline char atomic_fetch_xor_explicit
2183 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2184 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2186 inline char atomic_fetch_xor
2187 ( volatile atomic_char* __a__, char __m__ )
2188 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2191 inline signed char atomic_fetch_add_explicit
2192 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2193 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2195 inline signed char atomic_fetch_add
2196 ( volatile atomic_schar* __a__, signed char __m__ )
2197 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2200 inline signed char atomic_fetch_sub_explicit
2201 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2202 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2204 inline signed char atomic_fetch_sub
2205 ( volatile atomic_schar* __a__, signed char __m__ )
2206 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2209 inline signed char atomic_fetch_and_explicit
2210 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2211 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2213 inline signed char atomic_fetch_and
2214 ( volatile atomic_schar* __a__, signed char __m__ )
2215 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2218 inline signed char atomic_fetch_or_explicit
2219 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2220 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2222 inline signed char atomic_fetch_or
2223 ( volatile atomic_schar* __a__, signed char __m__ )
2224 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2227 inline signed char atomic_fetch_xor_explicit
2228 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2229 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2231 inline signed char atomic_fetch_xor
2232 ( volatile atomic_schar* __a__, signed char __m__ )
2233 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2236 inline unsigned char atomic_fetch_add_explicit
2237 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2238 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2240 inline unsigned char atomic_fetch_add
2241 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2242 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2245 inline unsigned char atomic_fetch_sub_explicit
2246 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2247 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2249 inline unsigned char atomic_fetch_sub
2250 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2251 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2254 inline unsigned char atomic_fetch_and_explicit
2255 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2256 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2258 inline unsigned char atomic_fetch_and
2259 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2260 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2263 inline unsigned char atomic_fetch_or_explicit
2264 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2265 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2267 inline unsigned char atomic_fetch_or
2268 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2269 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2272 inline unsigned char atomic_fetch_xor_explicit
2273 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2274 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2276 inline unsigned char atomic_fetch_xor
2277 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2278 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2281 inline short atomic_fetch_add_explicit
2282 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2283 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2285 inline short atomic_fetch_add
2286 ( volatile atomic_short* __a__, short __m__ )
2287 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2290 inline short atomic_fetch_sub_explicit
2291 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2292 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2294 inline short atomic_fetch_sub
2295 ( volatile atomic_short* __a__, short __m__ )
2296 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2299 inline short atomic_fetch_and_explicit
2300 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2301 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2303 inline short atomic_fetch_and
2304 ( volatile atomic_short* __a__, short __m__ )
2305 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2308 inline short atomic_fetch_or_explicit
2309 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2310 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2312 inline short atomic_fetch_or
2313 ( volatile atomic_short* __a__, short __m__ )
2314 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2317 inline short atomic_fetch_xor_explicit
2318 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2319 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2321 inline short atomic_fetch_xor
2322 ( volatile atomic_short* __a__, short __m__ )
2323 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2326 inline unsigned short atomic_fetch_add_explicit
2327 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2328 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2330 inline unsigned short atomic_fetch_add
2331 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2332 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2335 inline unsigned short atomic_fetch_sub_explicit
2336 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2337 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2339 inline unsigned short atomic_fetch_sub
2340 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2341 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2344 inline unsigned short atomic_fetch_and_explicit
2345 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2346 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2348 inline unsigned short atomic_fetch_and
2349 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2350 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2353 inline unsigned short atomic_fetch_or_explicit
2354 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2355 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2357 inline unsigned short atomic_fetch_or
2358 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2359 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2362 inline unsigned short atomic_fetch_xor_explicit
2363 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2364 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2366 inline unsigned short atomic_fetch_xor
2367 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2368 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2371 inline int atomic_fetch_add_explicit
2372 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2373 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2375 inline int atomic_fetch_add
2376 ( volatile atomic_int* __a__, int __m__ )
2377 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2380 inline int atomic_fetch_sub_explicit
2381 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2382 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2384 inline int atomic_fetch_sub
2385 ( volatile atomic_int* __a__, int __m__ )
2386 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2389 inline int atomic_fetch_and_explicit
2390 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2391 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2393 inline int atomic_fetch_and
2394 ( volatile atomic_int* __a__, int __m__ )
2395 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2398 inline int atomic_fetch_or_explicit
2399 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2400 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2402 inline int atomic_fetch_or
2403 ( volatile atomic_int* __a__, int __m__ )
2404 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2407 inline int atomic_fetch_xor_explicit
2408 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2409 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2411 inline int atomic_fetch_xor
2412 ( volatile atomic_int* __a__, int __m__ )
2413 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2416 inline unsigned int atomic_fetch_add_explicit
2417 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2418 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2420 inline unsigned int atomic_fetch_add
2421 ( volatile atomic_uint* __a__, unsigned int __m__ )
2422 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2425 inline unsigned int atomic_fetch_sub_explicit
2426 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2427 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2429 inline unsigned int atomic_fetch_sub
2430 ( volatile atomic_uint* __a__, unsigned int __m__ )
2431 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2434 inline unsigned int atomic_fetch_and_explicit
2435 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2436 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2438 inline unsigned int atomic_fetch_and
2439 ( volatile atomic_uint* __a__, unsigned int __m__ )
2440 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2443 inline unsigned int atomic_fetch_or_explicit
2444 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2445 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2447 inline unsigned int atomic_fetch_or
2448 ( volatile atomic_uint* __a__, unsigned int __m__ )
2449 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2452 inline unsigned int atomic_fetch_xor_explicit
2453 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2454 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2456 inline unsigned int atomic_fetch_xor
2457 ( volatile atomic_uint* __a__, unsigned int __m__ )
2458 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2461 inline long atomic_fetch_add_explicit
2462 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2463 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2465 inline long atomic_fetch_add
2466 ( volatile atomic_long* __a__, long __m__ )
2467 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2470 inline long atomic_fetch_sub_explicit
2471 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2472 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2474 inline long atomic_fetch_sub
2475 ( volatile atomic_long* __a__, long __m__ )
2476 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2479 inline long atomic_fetch_and_explicit
2480 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2481 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2483 inline long atomic_fetch_and
2484 ( volatile atomic_long* __a__, long __m__ )
2485 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2488 inline long atomic_fetch_or_explicit
2489 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2490 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2492 inline long atomic_fetch_or
2493 ( volatile atomic_long* __a__, long __m__ )
2494 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2497 inline long atomic_fetch_xor_explicit
2498 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2499 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2501 inline long atomic_fetch_xor
2502 ( volatile atomic_long* __a__, long __m__ )
2503 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2506 inline unsigned long atomic_fetch_add_explicit
2507 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2508 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2510 inline unsigned long atomic_fetch_add
2511 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2512 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2515 inline unsigned long atomic_fetch_sub_explicit
2516 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2517 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2519 inline unsigned long atomic_fetch_sub
2520 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2521 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2524 inline unsigned long atomic_fetch_and_explicit
2525 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2526 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2528 inline unsigned long atomic_fetch_and
2529 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2530 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2533 inline unsigned long atomic_fetch_or_explicit
2534 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2535 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2537 inline unsigned long atomic_fetch_or
2538 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2539 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2542 inline unsigned long atomic_fetch_xor_explicit
2543 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2544 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2546 inline unsigned long atomic_fetch_xor
2547 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2548 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2551 inline long long atomic_fetch_add_explicit
2552 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2553 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2555 inline long long atomic_fetch_add
2556 ( volatile atomic_llong* __a__, long long __m__ )
2557 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2560 inline long long atomic_fetch_sub_explicit
2561 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2562 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2564 inline long long atomic_fetch_sub
2565 ( volatile atomic_llong* __a__, long long __m__ )
2566 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2569 inline long long atomic_fetch_and_explicit
2570 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2571 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2573 inline long long atomic_fetch_and
2574 ( volatile atomic_llong* __a__, long long __m__ )
2575 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2578 inline long long atomic_fetch_or_explicit
2579 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2580 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2582 inline long long atomic_fetch_or
2583 ( volatile atomic_llong* __a__, long long __m__ )
2584 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2587 inline long long atomic_fetch_xor_explicit
2588 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2589 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2591 inline long long atomic_fetch_xor
2592 ( volatile atomic_llong* __a__, long long __m__ )
2593 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2596 inline unsigned long long atomic_fetch_add_explicit
2597 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2598 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2600 inline unsigned long long atomic_fetch_add
2601 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2602 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2605 inline unsigned long long atomic_fetch_sub_explicit
2606 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2607 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2609 inline unsigned long long atomic_fetch_sub
2610 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2611 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2614 inline unsigned long long atomic_fetch_and_explicit
2615 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2616 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2618 inline unsigned long long atomic_fetch_and
2619 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2620 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2623 inline unsigned long long atomic_fetch_or_explicit
2624 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2625 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2627 inline unsigned long long atomic_fetch_or
2628 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2629 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2632 inline unsigned long long atomic_fetch_xor_explicit
2633 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2634 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2636 inline unsigned long long atomic_fetch_xor
2637 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2638 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2641 inline wchar_t atomic_fetch_add_explicit
2642 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2643 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2645 inline wchar_t atomic_fetch_add
2646 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2647 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2650 inline wchar_t atomic_fetch_sub_explicit
2651 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2652 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2654 inline wchar_t atomic_fetch_sub
2655 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2656 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2659 inline wchar_t atomic_fetch_and_explicit
2660 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2661 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2663 inline wchar_t atomic_fetch_and
2664 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2665 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2668 inline wchar_t atomic_fetch_or_explicit
2669 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2670 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2672 inline wchar_t atomic_fetch_or
2673 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2674 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2677 inline wchar_t atomic_fetch_xor_explicit
2678 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2679 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2681 inline wchar_t atomic_fetch_xor
2682 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2683 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2689 #define atomic_is_lock_free( __a__ ) \
2692 #define atomic_load( __a__ ) \
2693 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2695 #define atomic_load_explicit( __a__, __x__ ) \
2696 _ATOMIC_LOAD_( __a__, __x__ )
2698 #define atomic_store( __a__, __m__ ) \
2699 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2701 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2702 _ATOMIC_STORE_( __a__, __m__, __x__ )
2704 #define atomic_swap( __a__, __m__ ) \
2705 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2707 #define atomic_swap_explicit( __a__, __m__, __x__ ) \
2708 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2710 #define atomic_compare_swap( __a__, __e__, __m__ ) \
2711 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2713 #define atomic_compare_swap_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2714 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2716 #define atomic_fence( __a__, __x__ ) \
2717 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
2720 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2721 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2723 #define atomic_fetch_add( __a__, __m__ ) \
2724 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2727 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2728 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2730 #define atomic_fetch_sub( __a__, __m__ ) \
2731 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2734 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2735 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2737 #define atomic_fetch_and( __a__, __m__ ) \
2738 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2741 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2742 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2744 #define atomic_fetch_or( __a__, __m__ ) \
2745 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2748 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2749 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2751 #define atomic_fetch_xor( __a__, __m__ ) \
2752 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2761 inline bool atomic_bool::is_lock_free() const volatile
2764 inline void atomic_bool::store
2765 ( bool __m__, memory_order __x__ ) volatile
2766 { atomic_store_explicit( this, __m__, __x__ ); }
2768 inline bool atomic_bool::load
2769 ( memory_order __x__ ) volatile
2770 { return atomic_load_explicit( this, __x__ ); }
2772 inline bool atomic_bool::swap
2773 ( bool __m__, memory_order __x__ ) volatile
2774 { return atomic_swap_explicit( this, __m__, __x__ ); }
2776 inline bool atomic_bool::compare_swap
2777 ( bool& __e__, bool __m__,
2778 memory_order __x__, memory_order __y__ ) volatile
2779 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2781 inline bool atomic_bool::compare_swap
2782 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
2783 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2784 __x__ == memory_order_acq_rel ? memory_order_acquire :
2785 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2787 inline void atomic_bool::fence
2788 ( memory_order __x__ ) const volatile
2789 { return atomic_fence( this, __x__ ); }
2792 inline bool atomic_address::is_lock_free() const volatile
2795 inline void atomic_address::store
2796 ( void* __m__, memory_order __x__ ) volatile
2797 { atomic_store_explicit( this, __m__, __x__ ); }
2799 inline void* atomic_address::load
2800 ( memory_order __x__ ) volatile
2801 { return atomic_load_explicit( this, __x__ ); }
2803 inline void* atomic_address::swap
2804 ( void* __m__, memory_order __x__ ) volatile
2805 { return atomic_swap_explicit( this, __m__, __x__ ); }
2807 inline bool atomic_address::compare_swap
2808 ( void*& __e__, void* __m__,
2809 memory_order __x__, memory_order __y__ ) volatile
2810 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2812 inline bool atomic_address::compare_swap
2813 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
2814 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2815 __x__ == memory_order_acq_rel ? memory_order_acquire :
2816 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2818 inline void atomic_address::fence
2819 ( memory_order __x__ ) const volatile
2820 { return atomic_fence( this, __x__ ); }
2823 inline bool atomic_char::is_lock_free() const volatile
2826 inline void atomic_char::store
2827 ( char __m__, memory_order __x__ ) volatile
2828 { atomic_store_explicit( this, __m__, __x__ ); }
2830 inline char atomic_char::load
2831 ( memory_order __x__ ) volatile
2832 { return atomic_load_explicit( this, __x__ ); }
2834 inline char atomic_char::swap
2835 ( char __m__, memory_order __x__ ) volatile
2836 { return atomic_swap_explicit( this, __m__, __x__ ); }
2838 inline bool atomic_char::compare_swap
2839 ( char& __e__, char __m__,
2840 memory_order __x__, memory_order __y__ ) volatile
2841 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2843 inline bool atomic_char::compare_swap
2844 ( char& __e__, char __m__, memory_order __x__ ) volatile
2845 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2846 __x__ == memory_order_acq_rel ? memory_order_acquire :
2847 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2849 inline void atomic_char::fence
2850 ( memory_order __x__ ) const volatile
2851 { return atomic_fence( this, __x__ ); }
2854 inline bool atomic_schar::is_lock_free() const volatile
2857 inline void atomic_schar::store
2858 ( signed char __m__, memory_order __x__ ) volatile
2859 { atomic_store_explicit( this, __m__, __x__ ); }
2861 inline signed char atomic_schar::load
2862 ( memory_order __x__ ) volatile
2863 { return atomic_load_explicit( this, __x__ ); }
2865 inline signed char atomic_schar::swap
2866 ( signed char __m__, memory_order __x__ ) volatile
2867 { return atomic_swap_explicit( this, __m__, __x__ ); }
2869 inline bool atomic_schar::compare_swap
2870 ( signed char& __e__, signed char __m__,
2871 memory_order __x__, memory_order __y__ ) volatile
2872 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2874 inline bool atomic_schar::compare_swap
2875 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
2876 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2877 __x__ == memory_order_acq_rel ? memory_order_acquire :
2878 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2880 inline void atomic_schar::fence
2881 ( memory_order __x__ ) const volatile
2882 { return atomic_fence( this, __x__ ); }
2885 inline bool atomic_uchar::is_lock_free() const volatile
2888 inline void atomic_uchar::store
2889 ( unsigned char __m__, memory_order __x__ ) volatile
2890 { atomic_store_explicit( this, __m__, __x__ ); }
2892 inline unsigned char atomic_uchar::load
2893 ( memory_order __x__ ) volatile
2894 { return atomic_load_explicit( this, __x__ ); }
2896 inline unsigned char atomic_uchar::swap
2897 ( unsigned char __m__, memory_order __x__ ) volatile
2898 { return atomic_swap_explicit( this, __m__, __x__ ); }
2900 inline bool atomic_uchar::compare_swap
2901 ( unsigned char& __e__, unsigned char __m__,
2902 memory_order __x__, memory_order __y__ ) volatile
2903 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2905 inline bool atomic_uchar::compare_swap
2906 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
2907 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2908 __x__ == memory_order_acq_rel ? memory_order_acquire :
2909 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2911 inline void atomic_uchar::fence
2912 ( memory_order __x__ ) const volatile
2913 { return atomic_fence( this, __x__ ); }
2916 inline bool atomic_short::is_lock_free() const volatile
2919 inline void atomic_short::store
2920 ( short __m__, memory_order __x__ ) volatile
2921 { atomic_store_explicit( this, __m__, __x__ ); }
2923 inline short atomic_short::load
2924 ( memory_order __x__ ) volatile
2925 { return atomic_load_explicit( this, __x__ ); }
2927 inline short atomic_short::swap
2928 ( short __m__, memory_order __x__ ) volatile
2929 { return atomic_swap_explicit( this, __m__, __x__ ); }
2931 inline bool atomic_short::compare_swap
2932 ( short& __e__, short __m__,
2933 memory_order __x__, memory_order __y__ ) volatile
2934 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2936 inline bool atomic_short::compare_swap
2937 ( short& __e__, short __m__, memory_order __x__ ) volatile
2938 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2939 __x__ == memory_order_acq_rel ? memory_order_acquire :
2940 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2942 inline void atomic_short::fence
2943 ( memory_order __x__ ) const volatile
2944 { return atomic_fence( this, __x__ ); }
2947 inline bool atomic_ushort::is_lock_free() const volatile
2950 inline void atomic_ushort::store
2951 ( unsigned short __m__, memory_order __x__ ) volatile
2952 { atomic_store_explicit( this, __m__, __x__ ); }
2954 inline unsigned short atomic_ushort::load
2955 ( memory_order __x__ ) volatile
2956 { return atomic_load_explicit( this, __x__ ); }
2958 inline unsigned short atomic_ushort::swap
2959 ( unsigned short __m__, memory_order __x__ ) volatile
2960 { return atomic_swap_explicit( this, __m__, __x__ ); }
2962 inline bool atomic_ushort::compare_swap
2963 ( unsigned short& __e__, unsigned short __m__,
2964 memory_order __x__, memory_order __y__ ) volatile
2965 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2967 inline bool atomic_ushort::compare_swap
2968 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
2969 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2970 __x__ == memory_order_acq_rel ? memory_order_acquire :
2971 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2973 inline void atomic_ushort::fence
2974 ( memory_order __x__ ) const volatile
2975 { return atomic_fence( this, __x__ ); }
2978 inline bool atomic_int::is_lock_free() const volatile
2981 inline void atomic_int::store
2982 ( int __m__, memory_order __x__ ) volatile
2983 { atomic_store_explicit( this, __m__, __x__ ); }
2985 inline int atomic_int::load
2986 ( memory_order __x__ ) volatile
2987 { return atomic_load_explicit( this, __x__ ); }
2989 inline int atomic_int::swap
2990 ( int __m__, memory_order __x__ ) volatile
2991 { return atomic_swap_explicit( this, __m__, __x__ ); }
2993 inline bool atomic_int::compare_swap
2994 ( int& __e__, int __m__,
2995 memory_order __x__, memory_order __y__ ) volatile
2996 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2998 inline bool atomic_int::compare_swap
2999 ( int& __e__, int __m__, memory_order __x__ ) volatile
3000 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3001 __x__ == memory_order_acq_rel ? memory_order_acquire :
3002 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3004 inline void atomic_int::fence
3005 ( memory_order __x__ ) const volatile
3006 { return atomic_fence( this, __x__ ); }
3009 inline bool atomic_uint::is_lock_free() const volatile
3012 inline void atomic_uint::store
3013 ( unsigned int __m__, memory_order __x__ ) volatile
3014 { atomic_store_explicit( this, __m__, __x__ ); }
3016 inline unsigned int atomic_uint::load
3017 ( memory_order __x__ ) volatile
3018 { return atomic_load_explicit( this, __x__ ); }
3020 inline unsigned int atomic_uint::swap
3021 ( unsigned int __m__, memory_order __x__ ) volatile
3022 { return atomic_swap_explicit( this, __m__, __x__ ); }
3024 inline bool atomic_uint::compare_swap
3025 ( unsigned int& __e__, unsigned int __m__,
3026 memory_order __x__, memory_order __y__ ) volatile
3027 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3029 inline bool atomic_uint::compare_swap
3030 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3031 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3032 __x__ == memory_order_acq_rel ? memory_order_acquire :
3033 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3035 inline void atomic_uint::fence
3036 ( memory_order __x__ ) const volatile
3037 { return atomic_fence( this, __x__ ); }
3040 inline bool atomic_long::is_lock_free() const volatile
3043 inline void atomic_long::store
3044 ( long __m__, memory_order __x__ ) volatile
3045 { atomic_store_explicit( this, __m__, __x__ ); }
3047 inline long atomic_long::load
3048 ( memory_order __x__ ) volatile
3049 { return atomic_load_explicit( this, __x__ ); }
3051 inline long atomic_long::swap
3052 ( long __m__, memory_order __x__ ) volatile
3053 { return atomic_swap_explicit( this, __m__, __x__ ); }
3055 inline bool atomic_long::compare_swap
3056 ( long& __e__, long __m__,
3057 memory_order __x__, memory_order __y__ ) volatile
3058 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3060 inline bool atomic_long::compare_swap
3061 ( long& __e__, long __m__, memory_order __x__ ) volatile
3062 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3063 __x__ == memory_order_acq_rel ? memory_order_acquire :
3064 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3066 inline void atomic_long::fence
3067 ( memory_order __x__ ) const volatile
3068 { return atomic_fence( this, __x__ ); }
3071 inline bool atomic_ulong::is_lock_free() const volatile
3074 inline void atomic_ulong::store
3075 ( unsigned long __m__, memory_order __x__ ) volatile
3076 { atomic_store_explicit( this, __m__, __x__ ); }
3078 inline unsigned long atomic_ulong::load
3079 ( memory_order __x__ ) volatile
3080 { return atomic_load_explicit( this, __x__ ); }
3082 inline unsigned long atomic_ulong::swap
3083 ( unsigned long __m__, memory_order __x__ ) volatile
3084 { return atomic_swap_explicit( this, __m__, __x__ ); }
3086 inline bool atomic_ulong::compare_swap
3087 ( unsigned long& __e__, unsigned long __m__,
3088 memory_order __x__, memory_order __y__ ) volatile
3089 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3091 inline bool atomic_ulong::compare_swap
3092 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3093 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3094 __x__ == memory_order_acq_rel ? memory_order_acquire :
3095 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3097 inline void atomic_ulong::fence
3098 ( memory_order __x__ ) const volatile
3099 { return atomic_fence( this, __x__ ); }
3102 inline bool atomic_llong::is_lock_free() const volatile
3105 inline void atomic_llong::store
3106 ( long long __m__, memory_order __x__ ) volatile
3107 { atomic_store_explicit( this, __m__, __x__ ); }
3109 inline long long atomic_llong::load
3110 ( memory_order __x__ ) volatile
3111 { return atomic_load_explicit( this, __x__ ); }
3113 inline long long atomic_llong::swap
3114 ( long long __m__, memory_order __x__ ) volatile
3115 { return atomic_swap_explicit( this, __m__, __x__ ); }
3117 inline bool atomic_llong::compare_swap
3118 ( long long& __e__, long long __m__,
3119 memory_order __x__, memory_order __y__ ) volatile
3120 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3122 inline bool atomic_llong::compare_swap
3123 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3124 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3125 __x__ == memory_order_acq_rel ? memory_order_acquire :
3126 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3128 inline void atomic_llong::fence
3129 ( memory_order __x__ ) const volatile
3130 { return atomic_fence( this, __x__ ); }
3133 inline bool atomic_ullong::is_lock_free() const volatile
3136 inline void atomic_ullong::store
3137 ( unsigned long long __m__, memory_order __x__ ) volatile
3138 { atomic_store_explicit( this, __m__, __x__ ); }
3140 inline unsigned long long atomic_ullong::load
3141 ( memory_order __x__ ) volatile
3142 { return atomic_load_explicit( this, __x__ ); }
3144 inline unsigned long long atomic_ullong::swap
3145 ( unsigned long long __m__, memory_order __x__ ) volatile
3146 { return atomic_swap_explicit( this, __m__, __x__ ); }
3148 inline bool atomic_ullong::compare_swap
3149 ( unsigned long long& __e__, unsigned long long __m__,
3150 memory_order __x__, memory_order __y__ ) volatile
3151 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3153 inline bool atomic_ullong::compare_swap
3154 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3155 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3156 __x__ == memory_order_acq_rel ? memory_order_acquire :
3157 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3159 inline void atomic_ullong::fence
3160 ( memory_order __x__ ) const volatile
3161 { return atomic_fence( this, __x__ ); }
3164 inline bool atomic_wchar_t::is_lock_free() const volatile
3167 inline void atomic_wchar_t::store
3168 ( wchar_t __m__, memory_order __x__ ) volatile
3169 { atomic_store_explicit( this, __m__, __x__ ); }
3171 inline wchar_t atomic_wchar_t::load
3172 ( memory_order __x__ ) volatile
3173 { return atomic_load_explicit( this, __x__ ); }
3175 inline wchar_t atomic_wchar_t::swap
3176 ( wchar_t __m__, memory_order __x__ ) volatile
3177 { return atomic_swap_explicit( this, __m__, __x__ ); }
3179 inline bool atomic_wchar_t::compare_swap
3180 ( wchar_t& __e__, wchar_t __m__,
3181 memory_order __x__, memory_order __y__ ) volatile
3182 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3184 inline bool atomic_wchar_t::compare_swap
3185 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3186 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3187 __x__ == memory_order_acq_rel ? memory_order_acquire :
3188 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3190 inline void atomic_wchar_t::fence
3191 ( memory_order __x__ ) const volatile
3192 { return atomic_fence( this, __x__ ); }
3195 template< typename T >
3196 inline bool atomic<T>::is_lock_free() const volatile
3199 template< typename T >
3200 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3201 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3203 template< typename T >
3204 inline T atomic<T>::load( memory_order __x__ ) volatile
3205 { return _ATOMIC_LOAD_( this, __x__ ); }
3207 template< typename T >
3208 inline T atomic<T>::swap( T __v__, memory_order __x__ ) volatile
3209 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3211 template< typename T >
3212 inline bool atomic<T>::compare_swap
3213 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3214 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3216 template< typename T >
3217 inline bool atomic<T>::compare_swap
3218 ( T& __r__, T __v__, memory_order __x__ ) volatile
3219 { return compare_swap( __r__, __v__, __x__,
3220 __x__ == memory_order_acq_rel ? memory_order_acquire :
3221 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3224 inline void* atomic_address::fetch_add
3225 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3226 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3228 inline void* atomic_address::fetch_sub
3229 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3230 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3233 inline char atomic_char::fetch_add
3234 ( char __m__, memory_order __x__ ) volatile
3235 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3238 inline char atomic_char::fetch_sub
3239 ( char __m__, memory_order __x__ ) volatile
3240 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3243 inline char atomic_char::fetch_and
3244 ( char __m__, memory_order __x__ ) volatile
3245 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3248 inline char atomic_char::fetch_or
3249 ( char __m__, memory_order __x__ ) volatile
3250 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3253 inline char atomic_char::fetch_xor
3254 ( char __m__, memory_order __x__ ) volatile
3255 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3258 inline signed char atomic_schar::fetch_add
3259 ( signed char __m__, memory_order __x__ ) volatile
3260 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3263 inline signed char atomic_schar::fetch_sub
3264 ( signed char __m__, memory_order __x__ ) volatile
3265 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3268 inline signed char atomic_schar::fetch_and
3269 ( signed char __m__, memory_order __x__ ) volatile
3270 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3273 inline signed char atomic_schar::fetch_or
3274 ( signed char __m__, memory_order __x__ ) volatile
3275 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3278 inline signed char atomic_schar::fetch_xor
3279 ( signed char __m__, memory_order __x__ ) volatile
3280 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3283 inline unsigned char atomic_uchar::fetch_add
3284 ( unsigned char __m__, memory_order __x__ ) volatile
3285 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3288 inline unsigned char atomic_uchar::fetch_sub
3289 ( unsigned char __m__, memory_order __x__ ) volatile
3290 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3293 inline unsigned char atomic_uchar::fetch_and
3294 ( unsigned char __m__, memory_order __x__ ) volatile
3295 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3298 inline unsigned char atomic_uchar::fetch_or
3299 ( unsigned char __m__, memory_order __x__ ) volatile
3300 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3303 inline unsigned char atomic_uchar::fetch_xor
3304 ( unsigned char __m__, memory_order __x__ ) volatile
3305 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3308 inline short atomic_short::fetch_add
3309 ( short __m__, memory_order __x__ ) volatile
3310 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3313 inline short atomic_short::fetch_sub
3314 ( short __m__, memory_order __x__ ) volatile
3315 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3318 inline short atomic_short::fetch_and
3319 ( short __m__, memory_order __x__ ) volatile
3320 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3323 inline short atomic_short::fetch_or
3324 ( short __m__, memory_order __x__ ) volatile
3325 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3328 inline short atomic_short::fetch_xor
3329 ( short __m__, memory_order __x__ ) volatile
3330 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3333 inline unsigned short atomic_ushort::fetch_add
3334 ( unsigned short __m__, memory_order __x__ ) volatile
3335 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3338 inline unsigned short atomic_ushort::fetch_sub
3339 ( unsigned short __m__, memory_order __x__ ) volatile
3340 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3343 inline unsigned short atomic_ushort::fetch_and
3344 ( unsigned short __m__, memory_order __x__ ) volatile
3345 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3348 inline unsigned short atomic_ushort::fetch_or
3349 ( unsigned short __m__, memory_order __x__ ) volatile
3350 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3353 inline unsigned short atomic_ushort::fetch_xor
3354 ( unsigned short __m__, memory_order __x__ ) volatile
3355 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3358 inline int atomic_int::fetch_add
3359 ( int __m__, memory_order __x__ ) volatile
3360 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3363 inline int atomic_int::fetch_sub
3364 ( int __m__, memory_order __x__ ) volatile
3365 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3368 inline int atomic_int::fetch_and
3369 ( int __m__, memory_order __x__ ) volatile
3370 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3373 inline int atomic_int::fetch_or
3374 ( int __m__, memory_order __x__ ) volatile
3375 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3378 inline int atomic_int::fetch_xor
3379 ( int __m__, memory_order __x__ ) volatile
3380 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3383 inline unsigned int atomic_uint::fetch_add
3384 ( unsigned int __m__, memory_order __x__ ) volatile
3385 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3388 inline unsigned int atomic_uint::fetch_sub
3389 ( unsigned int __m__, memory_order __x__ ) volatile
3390 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3393 inline unsigned int atomic_uint::fetch_and
3394 ( unsigned int __m__, memory_order __x__ ) volatile
3395 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3398 inline unsigned int atomic_uint::fetch_or
3399 ( unsigned int __m__, memory_order __x__ ) volatile
3400 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3403 inline unsigned int atomic_uint::fetch_xor
3404 ( unsigned int __m__, memory_order __x__ ) volatile
3405 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3408 inline long atomic_long::fetch_add
3409 ( long __m__, memory_order __x__ ) volatile
3410 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3413 inline long atomic_long::fetch_sub
3414 ( long __m__, memory_order __x__ ) volatile
3415 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3418 inline long atomic_long::fetch_and
3419 ( long __m__, memory_order __x__ ) volatile
3420 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3423 inline long atomic_long::fetch_or
3424 ( long __m__, memory_order __x__ ) volatile
3425 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3428 inline long atomic_long::fetch_xor
3429 ( long __m__, memory_order __x__ ) volatile
3430 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3433 inline unsigned long atomic_ulong::fetch_add
3434 ( unsigned long __m__, memory_order __x__ ) volatile
3435 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3438 inline unsigned long atomic_ulong::fetch_sub
3439 ( unsigned long __m__, memory_order __x__ ) volatile
3440 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3443 inline unsigned long atomic_ulong::fetch_and
3444 ( unsigned long __m__, memory_order __x__ ) volatile
3445 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3448 inline unsigned long atomic_ulong::fetch_or
3449 ( unsigned long __m__, memory_order __x__ ) volatile
3450 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3453 inline unsigned long atomic_ulong::fetch_xor
3454 ( unsigned long __m__, memory_order __x__ ) volatile
3455 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3458 inline long long atomic_llong::fetch_add
3459 ( long long __m__, memory_order __x__ ) volatile
3460 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3463 inline long long atomic_llong::fetch_sub
3464 ( long long __m__, memory_order __x__ ) volatile
3465 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3468 inline long long atomic_llong::fetch_and
3469 ( long long __m__, memory_order __x__ ) volatile
3470 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3473 inline long long atomic_llong::fetch_or
3474 ( long long __m__, memory_order __x__ ) volatile
3475 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3478 inline long long atomic_llong::fetch_xor
3479 ( long long __m__, memory_order __x__ ) volatile
3480 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3483 inline unsigned long long atomic_ullong::fetch_add
3484 ( unsigned long long __m__, memory_order __x__ ) volatile
3485 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3488 inline unsigned long long atomic_ullong::fetch_sub
3489 ( unsigned long long __m__, memory_order __x__ ) volatile
3490 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3493 inline unsigned long long atomic_ullong::fetch_and
3494 ( unsigned long long __m__, memory_order __x__ ) volatile
3495 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3498 inline unsigned long long atomic_ullong::fetch_or
3499 ( unsigned long long __m__, memory_order __x__ ) volatile
3500 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3503 inline unsigned long long atomic_ullong::fetch_xor
3504 ( unsigned long long __m__, memory_order __x__ ) volatile
3505 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3508 inline wchar_t atomic_wchar_t::fetch_add
3509 ( wchar_t __m__, memory_order __x__ ) volatile
3510 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3513 inline wchar_t atomic_wchar_t::fetch_sub
3514 ( wchar_t __m__, memory_order __x__ ) volatile
3515 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3518 inline wchar_t atomic_wchar_t::fetch_and
3519 ( wchar_t __m__, memory_order __x__ ) volatile
3520 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3523 inline wchar_t atomic_wchar_t::fetch_or
3524 ( wchar_t __m__, memory_order __x__ ) volatile
3525 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3528 inline wchar_t atomic_wchar_t::fetch_xor
3529 ( wchar_t __m__, memory_order __x__ ) volatile
3530 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3533 template< typename T >
3534 T* atomic<T*>::load( memory_order __x__ ) volatile
3535 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3537 template< typename T >
3538 T* atomic<T*>::swap( T* __v__, memory_order __x__ ) volatile
3539 { return static_cast<T*>( atomic_address::swap( __v__, __x__ ) ); }
3541 template< typename T >
3542 bool atomic<T*>::compare_swap
3543 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3544 { return atomic_address::compare_swap( *reinterpret_cast<void**>( &__r__ ),
3545 static_cast<void*>( __v__ ), __x__, __y__ ); }
3546 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3548 template< typename T >
3549 bool atomic<T*>::compare_swap
3550 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3551 { return compare_swap( __r__, __v__, __x__,
3552 __x__ == memory_order_acq_rel ? memory_order_acquire :
3553 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3555 template< typename T >
3556 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3557 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3559 template< typename T >
3560 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3561 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }