1 #ifndef __IMPATOMIC_H__
2 #define __IMPATOMIC_H__
4 #include "memoryorder.h"
11 #define CPP0X( feature )
13 typedef struct atomic_flag
16 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
17 void clear( memory_order = memory_order_seq_cst ) volatile;
18 void fence( memory_order ) const volatile;
20 CPP0X( atomic_flag() = default; )
21 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
22 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
29 #define ATOMIC_FLAG_INIT { false }
35 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
36 extern bool atomic_flag_test_and_set_explicit
37 ( volatile atomic_flag*, memory_order );
38 extern void atomic_flag_clear( volatile atomic_flag* );
39 extern void atomic_flag_clear_explicit
40 ( volatile atomic_flag*, memory_order );
41 extern void atomic_flag_fence
42 ( const volatile atomic_flag*, memory_order );
43 extern void __atomic_flag_wait__
44 ( volatile atomic_flag* );
45 extern void __atomic_flag_wait_explicit__
46 ( volatile atomic_flag*, memory_order );
54 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
55 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
57 inline void atomic_flag::clear( memory_order __x__ ) volatile
58 { atomic_flag_clear_explicit( this, __x__ ); }
60 inline void atomic_flag::fence( memory_order __x__ ) const volatile
61 { atomic_flag_fence( this, __x__ ); }
67 The remainder of the example implementation uses the following
68 macros. These macros exploit GNU extensions for value-returning
69 blocks (AKA statement expressions) and __typeof__.
71 The macros rely on data fields of atomic structs being named __f__.
72 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
73 __g__=flag, __m__=modified, __o__=operation, __r__=result,
74 __p__=pointer to field, __v__=value (for single evaluation),
75 __x__=memory-ordering, and __y__=memory-ordering.
78 #define _ATOMIC_LOAD_( __a__, __x__ ) \
79 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
80 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
83 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
84 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
85 __typeof__(__m__) __v__ = (__m__); \
86 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
90 #define _ATOMIC_INIT_( __a__, __m__ ) \
91 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
92 __typeof__(__m__) __v__ = (__m__); \
93 model_init_action((void *) __p__, (uint64_t) __v__); \
96 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
97 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
98 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
99 __typeof__(__m__) __v__ = (__m__); \
100 __typeof__((__a__)->__f__) __copy__= __old__; \
101 __copy__ __o__ __v__; \
102 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
105 /* No spurious failure for now */
106 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
108 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
109 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
110 __typeof__(__e__) __q__ = (__e__); \
111 __typeof__(__m__) __v__ = (__m__); \
113 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
114 if (__t__ == * __q__ ) { \
115 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
116 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
119 #define _ATOMIC_FENCE_( __a__, __x__ ) \
120 ({ model_fence_action(__x__);})
123 #define ATOMIC_CHAR_LOCK_FREE 1
124 #define ATOMIC_CHAR16_T_LOCK_FREE 1
125 #define ATOMIC_CHAR32_T_LOCK_FREE 1
126 #define ATOMIC_WCHAR_T_LOCK_FREE 1
127 #define ATOMIC_SHORT_LOCK_FREE 1
128 #define ATOMIC_INT_LOCK_FREE 1
129 #define ATOMIC_LONG_LOCK_FREE 1
130 #define ATOMIC_LLONG_LOCK_FREE 1
131 #define ATOMIC_ADDRESS_LOCK_FREE 1
133 typedef struct atomic_bool
136 bool is_lock_free() const volatile;
137 void store( bool, memory_order = memory_order_seq_cst ) volatile;
138 bool load( memory_order = memory_order_seq_cst ) volatile;
139 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
140 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
141 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
142 bool compare_exchange_weak ( bool&, bool,
143 memory_order = memory_order_seq_cst) volatile;
144 bool compare_exchange_strong ( bool&, bool,
145 memory_order = memory_order_seq_cst) volatile;
146 void fence( memory_order ) const volatile;
148 CPP0X( atomic_bool() = delete; )
149 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
150 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
151 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
153 bool operator =( bool __v__ ) volatile
154 { store( __v__ ); return __v__; }
156 friend void atomic_store_explicit( volatile atomic_bool*, bool,
158 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
159 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
161 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
162 memory_order, memory_order );
163 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
164 memory_order, memory_order );
165 friend void atomic_fence( const volatile atomic_bool*, memory_order );
173 typedef struct atomic_address
176 bool is_lock_free() const volatile;
177 void store( void*, memory_order = memory_order_seq_cst ) volatile;
178 void* load( memory_order = memory_order_seq_cst ) volatile;
179 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
180 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
181 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
182 bool compare_exchange_weak( void*&, void*,
183 memory_order = memory_order_seq_cst ) volatile;
184 bool compare_exchange_strong( void*&, void*,
185 memory_order = memory_order_seq_cst ) volatile;
186 void fence( memory_order ) const volatile;
187 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
188 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
190 CPP0X( atomic_address() = default; )
191 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
192 CPP0X( atomic_address( const atomic_address& ) = delete; )
193 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
195 void* operator =( void* __v__ ) volatile
196 { store( __v__ ); return __v__; }
198 void* operator +=( ptrdiff_t __v__ ) volatile
199 { return fetch_add( __v__ ); }
201 void* operator -=( ptrdiff_t __v__ ) volatile
202 { return fetch_sub( __v__ ); }
204 friend void atomic_store_explicit( volatile atomic_address*, void*,
206 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
207 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
209 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
210 void**, void*, memory_order, memory_order );
211 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
212 void**, void*, memory_order, memory_order );
213 friend void atomic_fence( const volatile atomic_address*, memory_order );
214 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
216 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
225 typedef struct atomic_char
228 bool is_lock_free() const volatile;
230 memory_order = memory_order_seq_cst ) volatile;
231 char load( memory_order = memory_order_seq_cst ) volatile;
233 memory_order = memory_order_seq_cst ) volatile;
234 bool compare_exchange_weak( char&, char,
235 memory_order, memory_order ) volatile;
236 bool compare_exchange_strong( char&, char,
237 memory_order, memory_order ) volatile;
238 bool compare_exchange_weak( char&, char,
239 memory_order = memory_order_seq_cst ) volatile;
240 bool compare_exchange_strong( char&, char,
241 memory_order = memory_order_seq_cst ) volatile;
242 void fence( memory_order ) const volatile;
243 char fetch_add( char,
244 memory_order = memory_order_seq_cst ) volatile;
245 char fetch_sub( char,
246 memory_order = memory_order_seq_cst ) volatile;
247 char fetch_and( char,
248 memory_order = memory_order_seq_cst ) volatile;
250 memory_order = memory_order_seq_cst ) volatile;
251 char fetch_xor( char,
252 memory_order = memory_order_seq_cst ) volatile;
254 CPP0X( atomic_char() = default; )
255 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
256 CPP0X( atomic_char( const atomic_char& ) = delete; )
257 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
259 char operator =( char __v__ ) volatile
260 { store( __v__ ); return __v__; }
262 char operator ++( int ) volatile
263 { return fetch_add( 1 ); }
265 char operator --( int ) volatile
266 { return fetch_sub( 1 ); }
268 char operator ++() volatile
269 { return fetch_add( 1 ) + 1; }
271 char operator --() volatile
272 { return fetch_sub( 1 ) - 1; }
274 char operator +=( char __v__ ) volatile
275 { return fetch_add( __v__ ) + __v__; }
277 char operator -=( char __v__ ) volatile
278 { return fetch_sub( __v__ ) - __v__; }
280 char operator &=( char __v__ ) volatile
281 { return fetch_and( __v__ ) & __v__; }
283 char operator |=( char __v__ ) volatile
284 { return fetch_or( __v__ ) | __v__; }
286 char operator ^=( char __v__ ) volatile
287 { return fetch_xor( __v__ ) ^ __v__; }
289 friend void atomic_store_explicit( volatile atomic_char*, char,
291 friend char atomic_load_explicit( volatile atomic_char*,
293 friend char atomic_exchange_explicit( volatile atomic_char*,
294 char, memory_order );
295 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
296 char*, char, memory_order, memory_order );
297 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
298 char*, char, memory_order, memory_order );
299 friend void atomic_fence( const volatile atomic_char*, memory_order );
300 friend char atomic_fetch_add_explicit( volatile atomic_char*,
301 char, memory_order );
302 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
303 char, memory_order );
304 friend char atomic_fetch_and_explicit( volatile atomic_char*,
305 char, memory_order );
306 friend char atomic_fetch_or_explicit( volatile atomic_char*,
307 char, memory_order );
308 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
309 char, memory_order );
317 typedef struct atomic_schar
320 bool is_lock_free() const volatile;
321 void store( signed char,
322 memory_order = memory_order_seq_cst ) volatile;
323 signed char load( memory_order = memory_order_seq_cst ) volatile;
324 signed char exchange( signed char,
325 memory_order = memory_order_seq_cst ) volatile;
326 bool compare_exchange_weak( signed char&, signed char,
327 memory_order, memory_order ) volatile;
328 bool compare_exchange_strong( signed char&, signed char,
329 memory_order, memory_order ) volatile;
330 bool compare_exchange_weak( signed char&, signed char,
331 memory_order = memory_order_seq_cst ) volatile;
332 bool compare_exchange_strong( signed char&, signed char,
333 memory_order = memory_order_seq_cst ) volatile;
334 void fence( memory_order ) const volatile;
335 signed char fetch_add( signed char,
336 memory_order = memory_order_seq_cst ) volatile;
337 signed char fetch_sub( signed char,
338 memory_order = memory_order_seq_cst ) volatile;
339 signed char fetch_and( signed char,
340 memory_order = memory_order_seq_cst ) volatile;
341 signed char fetch_or( signed char,
342 memory_order = memory_order_seq_cst ) volatile;
343 signed char fetch_xor( signed char,
344 memory_order = memory_order_seq_cst ) volatile;
346 CPP0X( atomic_schar() = default; )
347 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
348 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
349 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
351 signed char operator =( signed char __v__ ) volatile
352 { store( __v__ ); return __v__; }
354 signed char operator ++( int ) volatile
355 { return fetch_add( 1 ); }
357 signed char operator --( int ) volatile
358 { return fetch_sub( 1 ); }
360 signed char operator ++() volatile
361 { return fetch_add( 1 ) + 1; }
363 signed char operator --() volatile
364 { return fetch_sub( 1 ) - 1; }
366 signed char operator +=( signed char __v__ ) volatile
367 { return fetch_add( __v__ ) + __v__; }
369 signed char operator -=( signed char __v__ ) volatile
370 { return fetch_sub( __v__ ) - __v__; }
372 signed char operator &=( signed char __v__ ) volatile
373 { return fetch_and( __v__ ) & __v__; }
375 signed char operator |=( signed char __v__ ) volatile
376 { return fetch_or( __v__ ) | __v__; }
378 signed char operator ^=( signed char __v__ ) volatile
379 { return fetch_xor( __v__ ) ^ __v__; }
381 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
383 friend signed char atomic_load_explicit( volatile atomic_schar*,
385 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
386 signed char, memory_order );
387 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
388 signed char*, signed char, memory_order, memory_order );
389 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
390 signed char*, signed char, memory_order, memory_order );
391 friend void atomic_fence( const volatile atomic_schar*, memory_order );
392 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
393 signed char, memory_order );
394 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
395 signed char, memory_order );
396 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
397 signed char, memory_order );
398 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
399 signed char, memory_order );
400 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
401 signed char, memory_order );
409 typedef struct atomic_uchar
412 bool is_lock_free() const volatile;
413 void store( unsigned char,
414 memory_order = memory_order_seq_cst ) volatile;
415 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
416 unsigned char exchange( unsigned char,
417 memory_order = memory_order_seq_cst ) volatile;
418 bool compare_exchange_weak( unsigned char&, unsigned char,
419 memory_order, memory_order ) volatile;
420 bool compare_exchange_strong( unsigned char&, unsigned char,
421 memory_order, memory_order ) volatile;
422 bool compare_exchange_weak( unsigned char&, unsigned char,
423 memory_order = memory_order_seq_cst ) volatile;
424 bool compare_exchange_strong( unsigned char&, unsigned char,
425 memory_order = memory_order_seq_cst ) volatile;
426 void fence( memory_order ) const volatile;
427 unsigned char fetch_add( unsigned char,
428 memory_order = memory_order_seq_cst ) volatile;
429 unsigned char fetch_sub( unsigned char,
430 memory_order = memory_order_seq_cst ) volatile;
431 unsigned char fetch_and( unsigned char,
432 memory_order = memory_order_seq_cst ) volatile;
433 unsigned char fetch_or( unsigned char,
434 memory_order = memory_order_seq_cst ) volatile;
435 unsigned char fetch_xor( unsigned char,
436 memory_order = memory_order_seq_cst ) volatile;
438 CPP0X( atomic_uchar() = default; )
439 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
440 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
441 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
443 unsigned char operator =( unsigned char __v__ ) volatile
444 { store( __v__ ); return __v__; }
446 unsigned char operator ++( int ) volatile
447 { return fetch_add( 1 ); }
449 unsigned char operator --( int ) volatile
450 { return fetch_sub( 1 ); }
452 unsigned char operator ++() volatile
453 { return fetch_add( 1 ) + 1; }
455 unsigned char operator --() volatile
456 { return fetch_sub( 1 ) - 1; }
458 unsigned char operator +=( unsigned char __v__ ) volatile
459 { return fetch_add( __v__ ) + __v__; }
461 unsigned char operator -=( unsigned char __v__ ) volatile
462 { return fetch_sub( __v__ ) - __v__; }
464 unsigned char operator &=( unsigned char __v__ ) volatile
465 { return fetch_and( __v__ ) & __v__; }
467 unsigned char operator |=( unsigned char __v__ ) volatile
468 { return fetch_or( __v__ ) | __v__; }
470 unsigned char operator ^=( unsigned char __v__ ) volatile
471 { return fetch_xor( __v__ ) ^ __v__; }
473 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
475 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
477 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
478 unsigned char, memory_order );
479 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
480 unsigned char*, unsigned char, memory_order, memory_order );
481 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
482 unsigned char*, unsigned char, memory_order, memory_order );
483 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
484 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
485 unsigned char, memory_order );
486 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
487 unsigned char, memory_order );
488 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
489 unsigned char, memory_order );
490 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
491 unsigned char, memory_order );
492 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
493 unsigned char, memory_order );
501 typedef struct atomic_short
504 bool is_lock_free() const volatile;
506 memory_order = memory_order_seq_cst ) volatile;
507 short load( memory_order = memory_order_seq_cst ) volatile;
508 short exchange( short,
509 memory_order = memory_order_seq_cst ) volatile;
510 bool compare_exchange_weak( short&, short,
511 memory_order, memory_order ) volatile;
512 bool compare_exchange_strong( short&, short,
513 memory_order, memory_order ) volatile;
514 bool compare_exchange_weak( short&, short,
515 memory_order = memory_order_seq_cst ) volatile;
516 bool compare_exchange_strong( short&, short,
517 memory_order = memory_order_seq_cst ) volatile;
518 void fence( memory_order ) const volatile;
519 short fetch_add( short,
520 memory_order = memory_order_seq_cst ) volatile;
521 short fetch_sub( short,
522 memory_order = memory_order_seq_cst ) volatile;
523 short fetch_and( short,
524 memory_order = memory_order_seq_cst ) volatile;
525 short fetch_or( short,
526 memory_order = memory_order_seq_cst ) volatile;
527 short fetch_xor( short,
528 memory_order = memory_order_seq_cst ) volatile;
530 CPP0X( atomic_short() = default; )
531 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
532 CPP0X( atomic_short( const atomic_short& ) = delete; )
533 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
535 short operator =( short __v__ ) volatile
536 { store( __v__ ); return __v__; }
538 short operator ++( int ) volatile
539 { return fetch_add( 1 ); }
541 short operator --( int ) volatile
542 { return fetch_sub( 1 ); }
544 short operator ++() volatile
545 { return fetch_add( 1 ) + 1; }
547 short operator --() volatile
548 { return fetch_sub( 1 ) - 1; }
550 short operator +=( short __v__ ) volatile
551 { return fetch_add( __v__ ) + __v__; }
553 short operator -=( short __v__ ) volatile
554 { return fetch_sub( __v__ ) - __v__; }
556 short operator &=( short __v__ ) volatile
557 { return fetch_and( __v__ ) & __v__; }
559 short operator |=( short __v__ ) volatile
560 { return fetch_or( __v__ ) | __v__; }
562 short operator ^=( short __v__ ) volatile
563 { return fetch_xor( __v__ ) ^ __v__; }
565 friend void atomic_store_explicit( volatile atomic_short*, short,
567 friend short atomic_load_explicit( volatile atomic_short*,
569 friend short atomic_exchange_explicit( volatile atomic_short*,
570 short, memory_order );
571 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
572 short*, short, memory_order, memory_order );
573 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
574 short*, short, memory_order, memory_order );
575 friend void atomic_fence( const volatile atomic_short*, memory_order );
576 friend short atomic_fetch_add_explicit( volatile atomic_short*,
577 short, memory_order );
578 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
579 short, memory_order );
580 friend short atomic_fetch_and_explicit( volatile atomic_short*,
581 short, memory_order );
582 friend short atomic_fetch_or_explicit( volatile atomic_short*,
583 short, memory_order );
584 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
585 short, memory_order );
593 typedef struct atomic_ushort
596 bool is_lock_free() const volatile;
597 void store( unsigned short,
598 memory_order = memory_order_seq_cst ) volatile;
599 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
600 unsigned short exchange( unsigned short,
601 memory_order = memory_order_seq_cst ) volatile;
602 bool compare_exchange_weak( unsigned short&, unsigned short,
603 memory_order, memory_order ) volatile;
604 bool compare_exchange_strong( unsigned short&, unsigned short,
605 memory_order, memory_order ) volatile;
606 bool compare_exchange_weak( unsigned short&, unsigned short,
607 memory_order = memory_order_seq_cst ) volatile;
608 bool compare_exchange_strong( unsigned short&, unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
610 void fence( memory_order ) const volatile;
611 unsigned short fetch_add( unsigned short,
612 memory_order = memory_order_seq_cst ) volatile;
613 unsigned short fetch_sub( unsigned short,
614 memory_order = memory_order_seq_cst ) volatile;
615 unsigned short fetch_and( unsigned short,
616 memory_order = memory_order_seq_cst ) volatile;
617 unsigned short fetch_or( unsigned short,
618 memory_order = memory_order_seq_cst ) volatile;
619 unsigned short fetch_xor( unsigned short,
620 memory_order = memory_order_seq_cst ) volatile;
622 CPP0X( atomic_ushort() = default; )
623 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
624 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
625 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
627 unsigned short operator =( unsigned short __v__ ) volatile
628 { store( __v__ ); return __v__; }
630 unsigned short operator ++( int ) volatile
631 { return fetch_add( 1 ); }
633 unsigned short operator --( int ) volatile
634 { return fetch_sub( 1 ); }
636 unsigned short operator ++() volatile
637 { return fetch_add( 1 ) + 1; }
639 unsigned short operator --() volatile
640 { return fetch_sub( 1 ) - 1; }
642 unsigned short operator +=( unsigned short __v__ ) volatile
643 { return fetch_add( __v__ ) + __v__; }
645 unsigned short operator -=( unsigned short __v__ ) volatile
646 { return fetch_sub( __v__ ) - __v__; }
648 unsigned short operator &=( unsigned short __v__ ) volatile
649 { return fetch_and( __v__ ) & __v__; }
651 unsigned short operator |=( unsigned short __v__ ) volatile
652 { return fetch_or( __v__ ) | __v__; }
654 unsigned short operator ^=( unsigned short __v__ ) volatile
655 { return fetch_xor( __v__ ) ^ __v__; }
657 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
659 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
661 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
662 unsigned short, memory_order );
663 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
664 unsigned short*, unsigned short, memory_order, memory_order );
665 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
666 unsigned short*, unsigned short, memory_order, memory_order );
667 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
668 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
669 unsigned short, memory_order );
670 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
671 unsigned short, memory_order );
672 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
673 unsigned short, memory_order );
674 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
675 unsigned short, memory_order );
676 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
677 unsigned short, memory_order );
681 unsigned short __f__;
685 typedef struct atomic_int
688 bool is_lock_free() const volatile;
690 memory_order = memory_order_seq_cst ) volatile;
691 int load( memory_order = memory_order_seq_cst ) volatile;
693 memory_order = memory_order_seq_cst ) volatile;
694 bool compare_exchange_weak( int&, int,
695 memory_order, memory_order ) volatile;
696 bool compare_exchange_strong( int&, int,
697 memory_order, memory_order ) volatile;
698 bool compare_exchange_weak( int&, int,
699 memory_order = memory_order_seq_cst ) volatile;
700 bool compare_exchange_strong( int&, int,
701 memory_order = memory_order_seq_cst ) volatile;
702 void fence( memory_order ) const volatile;
704 memory_order = memory_order_seq_cst ) volatile;
706 memory_order = memory_order_seq_cst ) volatile;
708 memory_order = memory_order_seq_cst ) volatile;
710 memory_order = memory_order_seq_cst ) volatile;
712 memory_order = memory_order_seq_cst ) volatile;
714 CPP0X( atomic_int() = default; )
715 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
716 CPP0X( atomic_int( const atomic_int& ) = delete; )
717 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
719 int operator =( int __v__ ) volatile
720 { store( __v__ ); return __v__; }
722 int operator ++( int ) volatile
723 { return fetch_add( 1 ); }
725 int operator --( int ) volatile
726 { return fetch_sub( 1 ); }
728 int operator ++() volatile
729 { return fetch_add( 1 ) + 1; }
731 int operator --() volatile
732 { return fetch_sub( 1 ) - 1; }
734 int operator +=( int __v__ ) volatile
735 { return fetch_add( __v__ ) + __v__; }
737 int operator -=( int __v__ ) volatile
738 { return fetch_sub( __v__ ) - __v__; }
740 int operator &=( int __v__ ) volatile
741 { return fetch_and( __v__ ) & __v__; }
743 int operator |=( int __v__ ) volatile
744 { return fetch_or( __v__ ) | __v__; }
746 int operator ^=( int __v__ ) volatile
747 { return fetch_xor( __v__ ) ^ __v__; }
749 friend void atomic_store_explicit( volatile atomic_int*, int,
751 friend int atomic_load_explicit( volatile atomic_int*,
753 friend int atomic_exchange_explicit( volatile atomic_int*,
755 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
756 int*, int, memory_order, memory_order );
757 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
758 int*, int, memory_order, memory_order );
759 friend void atomic_fence( const volatile atomic_int*, memory_order );
760 friend int atomic_fetch_add_explicit( volatile atomic_int*,
762 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
764 friend int atomic_fetch_and_explicit( volatile atomic_int*,
766 friend int atomic_fetch_or_explicit( volatile atomic_int*,
768 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
777 typedef struct atomic_uint
780 bool is_lock_free() const volatile;
781 void store( unsigned int,
782 memory_order = memory_order_seq_cst ) volatile;
783 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
784 unsigned int exchange( unsigned int,
785 memory_order = memory_order_seq_cst ) volatile;
786 bool compare_exchange_weak( unsigned int&, unsigned int,
787 memory_order, memory_order ) volatile;
788 bool compare_exchange_strong( unsigned int&, unsigned int,
789 memory_order, memory_order ) volatile;
790 bool compare_exchange_weak( unsigned int&, unsigned int,
791 memory_order = memory_order_seq_cst ) volatile;
792 bool compare_exchange_strong( unsigned int&, unsigned int,
793 memory_order = memory_order_seq_cst ) volatile;
794 void fence( memory_order ) const volatile;
795 unsigned int fetch_add( unsigned int,
796 memory_order = memory_order_seq_cst ) volatile;
797 unsigned int fetch_sub( unsigned int,
798 memory_order = memory_order_seq_cst ) volatile;
799 unsigned int fetch_and( unsigned int,
800 memory_order = memory_order_seq_cst ) volatile;
801 unsigned int fetch_or( unsigned int,
802 memory_order = memory_order_seq_cst ) volatile;
803 unsigned int fetch_xor( unsigned int,
804 memory_order = memory_order_seq_cst ) volatile;
806 CPP0X( atomic_uint() = default; )
807 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
808 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
809 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
811 unsigned int operator =( unsigned int __v__ ) volatile
812 { store( __v__ ); return __v__; }
814 unsigned int operator ++( int ) volatile
815 { return fetch_add( 1 ); }
817 unsigned int operator --( int ) volatile
818 { return fetch_sub( 1 ); }
820 unsigned int operator ++() volatile
821 { return fetch_add( 1 ) + 1; }
823 unsigned int operator --() volatile
824 { return fetch_sub( 1 ) - 1; }
826 unsigned int operator +=( unsigned int __v__ ) volatile
827 { return fetch_add( __v__ ) + __v__; }
829 unsigned int operator -=( unsigned int __v__ ) volatile
830 { return fetch_sub( __v__ ) - __v__; }
832 unsigned int operator &=( unsigned int __v__ ) volatile
833 { return fetch_and( __v__ ) & __v__; }
835 unsigned int operator |=( unsigned int __v__ ) volatile
836 { return fetch_or( __v__ ) | __v__; }
838 unsigned int operator ^=( unsigned int __v__ ) volatile
839 { return fetch_xor( __v__ ) ^ __v__; }
841 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
843 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
845 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
846 unsigned int, memory_order );
847 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
848 unsigned int*, unsigned int, memory_order, memory_order );
849 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
850 unsigned int*, unsigned int, memory_order, memory_order );
851 friend void atomic_fence( const volatile atomic_uint*, memory_order );
852 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
853 unsigned int, memory_order );
854 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
855 unsigned int, memory_order );
856 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
857 unsigned int, memory_order );
858 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
859 unsigned int, memory_order );
860 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
861 unsigned int, memory_order );
869 typedef struct atomic_long
872 bool is_lock_free() const volatile;
874 memory_order = memory_order_seq_cst ) volatile;
875 long load( memory_order = memory_order_seq_cst ) volatile;
877 memory_order = memory_order_seq_cst ) volatile;
878 bool compare_exchange_weak( long&, long,
879 memory_order, memory_order ) volatile;
880 bool compare_exchange_strong( long&, long,
881 memory_order, memory_order ) volatile;
882 bool compare_exchange_weak( long&, long,
883 memory_order = memory_order_seq_cst ) volatile;
884 bool compare_exchange_strong( long&, long,
885 memory_order = memory_order_seq_cst ) volatile;
886 void fence( memory_order ) const volatile;
887 long fetch_add( long,
888 memory_order = memory_order_seq_cst ) volatile;
889 long fetch_sub( long,
890 memory_order = memory_order_seq_cst ) volatile;
891 long fetch_and( long,
892 memory_order = memory_order_seq_cst ) volatile;
894 memory_order = memory_order_seq_cst ) volatile;
895 long fetch_xor( long,
896 memory_order = memory_order_seq_cst ) volatile;
898 CPP0X( atomic_long() = default; )
899 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
900 CPP0X( atomic_long( const atomic_long& ) = delete; )
901 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
903 long operator =( long __v__ ) volatile
904 { store( __v__ ); return __v__; }
906 long operator ++( int ) volatile
907 { return fetch_add( 1 ); }
909 long operator --( int ) volatile
910 { return fetch_sub( 1 ); }
912 long operator ++() volatile
913 { return fetch_add( 1 ) + 1; }
915 long operator --() volatile
916 { return fetch_sub( 1 ) - 1; }
918 long operator +=( long __v__ ) volatile
919 { return fetch_add( __v__ ) + __v__; }
921 long operator -=( long __v__ ) volatile
922 { return fetch_sub( __v__ ) - __v__; }
924 long operator &=( long __v__ ) volatile
925 { return fetch_and( __v__ ) & __v__; }
927 long operator |=( long __v__ ) volatile
928 { return fetch_or( __v__ ) | __v__; }
930 long operator ^=( long __v__ ) volatile
931 { return fetch_xor( __v__ ) ^ __v__; }
933 friend void atomic_store_explicit( volatile atomic_long*, long,
935 friend long atomic_load_explicit( volatile atomic_long*,
937 friend long atomic_exchange_explicit( volatile atomic_long*,
938 long, memory_order );
939 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
940 long*, long, memory_order, memory_order );
941 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
942 long*, long, memory_order, memory_order );
943 friend void atomic_fence( const volatile atomic_long*, memory_order );
944 friend long atomic_fetch_add_explicit( volatile atomic_long*,
945 long, memory_order );
946 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
947 long, memory_order );
948 friend long atomic_fetch_and_explicit( volatile atomic_long*,
949 long, memory_order );
950 friend long atomic_fetch_or_explicit( volatile atomic_long*,
951 long, memory_order );
952 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
953 long, memory_order );
961 typedef struct atomic_ulong
964 bool is_lock_free() const volatile;
965 void store( unsigned long,
966 memory_order = memory_order_seq_cst ) volatile;
967 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
968 unsigned long exchange( unsigned long,
969 memory_order = memory_order_seq_cst ) volatile;
970 bool compare_exchange_weak( unsigned long&, unsigned long,
971 memory_order, memory_order ) volatile;
972 bool compare_exchange_strong( unsigned long&, unsigned long,
973 memory_order, memory_order ) volatile;
974 bool compare_exchange_weak( unsigned long&, unsigned long,
975 memory_order = memory_order_seq_cst ) volatile;
976 bool compare_exchange_strong( unsigned long&, unsigned long,
977 memory_order = memory_order_seq_cst ) volatile;
978 void fence( memory_order ) const volatile;
979 unsigned long fetch_add( unsigned long,
980 memory_order = memory_order_seq_cst ) volatile;
981 unsigned long fetch_sub( unsigned long,
982 memory_order = memory_order_seq_cst ) volatile;
983 unsigned long fetch_and( unsigned long,
984 memory_order = memory_order_seq_cst ) volatile;
985 unsigned long fetch_or( unsigned long,
986 memory_order = memory_order_seq_cst ) volatile;
987 unsigned long fetch_xor( unsigned long,
988 memory_order = memory_order_seq_cst ) volatile;
990 CPP0X( atomic_ulong() = default; )
991 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
992 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
993 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
995 unsigned long operator =( unsigned long __v__ ) volatile
996 { store( __v__ ); return __v__; }
998 unsigned long operator ++( int ) volatile
999 { return fetch_add( 1 ); }
1001 unsigned long operator --( int ) volatile
1002 { return fetch_sub( 1 ); }
1004 unsigned long operator ++() volatile
1005 { return fetch_add( 1 ) + 1; }
1007 unsigned long operator --() volatile
1008 { return fetch_sub( 1 ) - 1; }
1010 unsigned long operator +=( unsigned long __v__ ) volatile
1011 { return fetch_add( __v__ ) + __v__; }
1013 unsigned long operator -=( unsigned long __v__ ) volatile
1014 { return fetch_sub( __v__ ) - __v__; }
1016 unsigned long operator &=( unsigned long __v__ ) volatile
1017 { return fetch_and( __v__ ) & __v__; }
1019 unsigned long operator |=( unsigned long __v__ ) volatile
1020 { return fetch_or( __v__ ) | __v__; }
1022 unsigned long operator ^=( unsigned long __v__ ) volatile
1023 { return fetch_xor( __v__ ) ^ __v__; }
1025 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1027 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1029 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1030 unsigned long, memory_order );
1031 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1032 unsigned long*, unsigned long, memory_order, memory_order );
1033 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1034 unsigned long*, unsigned long, memory_order, memory_order );
1035 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
1036 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1037 unsigned long, memory_order );
1038 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1039 unsigned long, memory_order );
1040 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1041 unsigned long, memory_order );
1042 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1043 unsigned long, memory_order );
1044 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1045 unsigned long, memory_order );
1049 unsigned long __f__;
1053 typedef struct atomic_llong
1056 bool is_lock_free() const volatile;
1057 void store( long long,
1058 memory_order = memory_order_seq_cst ) volatile;
1059 long long load( memory_order = memory_order_seq_cst ) volatile;
1060 long long exchange( long long,
1061 memory_order = memory_order_seq_cst ) volatile;
1062 bool compare_exchange_weak( long long&, long long,
1063 memory_order, memory_order ) volatile;
1064 bool compare_exchange_strong( long long&, long long,
1065 memory_order, memory_order ) volatile;
1066 bool compare_exchange_weak( long long&, long long,
1067 memory_order = memory_order_seq_cst ) volatile;
1068 bool compare_exchange_strong( long long&, long long,
1069 memory_order = memory_order_seq_cst ) volatile;
1070 void fence( memory_order ) const volatile;
1071 long long fetch_add( long long,
1072 memory_order = memory_order_seq_cst ) volatile;
1073 long long fetch_sub( long long,
1074 memory_order = memory_order_seq_cst ) volatile;
1075 long long fetch_and( long long,
1076 memory_order = memory_order_seq_cst ) volatile;
1077 long long fetch_or( long long,
1078 memory_order = memory_order_seq_cst ) volatile;
1079 long long fetch_xor( long long,
1080 memory_order = memory_order_seq_cst ) volatile;
1082 CPP0X( atomic_llong() = default; )
1083 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1084 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1085 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1087 long long operator =( long long __v__ ) volatile
1088 { store( __v__ ); return __v__; }
1090 long long operator ++( int ) volatile
1091 { return fetch_add( 1 ); }
1093 long long operator --( int ) volatile
1094 { return fetch_sub( 1 ); }
1096 long long operator ++() volatile
1097 { return fetch_add( 1 ) + 1; }
1099 long long operator --() volatile
1100 { return fetch_sub( 1 ) - 1; }
1102 long long operator +=( long long __v__ ) volatile
1103 { return fetch_add( __v__ ) + __v__; }
1105 long long operator -=( long long __v__ ) volatile
1106 { return fetch_sub( __v__ ) - __v__; }
1108 long long operator &=( long long __v__ ) volatile
1109 { return fetch_and( __v__ ) & __v__; }
1111 long long operator |=( long long __v__ ) volatile
1112 { return fetch_or( __v__ ) | __v__; }
1114 long long operator ^=( long long __v__ ) volatile
1115 { return fetch_xor( __v__ ) ^ __v__; }
1117 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1119 friend long long atomic_load_explicit( volatile atomic_llong*,
1121 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1122 long long, memory_order );
1123 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1124 long long*, long long, memory_order, memory_order );
1125 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1126 long long*, long long, memory_order, memory_order );
1127 friend void atomic_fence( const volatile atomic_llong*, memory_order );
1128 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1129 long long, memory_order );
1130 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1131 long long, memory_order );
1132 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1133 long long, memory_order );
1134 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1135 long long, memory_order );
1136 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1137 long long, memory_order );
1145 typedef struct atomic_ullong
1148 bool is_lock_free() const volatile;
1149 void store( unsigned long long,
1150 memory_order = memory_order_seq_cst ) volatile;
1151 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1152 unsigned long long exchange( unsigned long long,
1153 memory_order = memory_order_seq_cst ) volatile;
1154 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1155 memory_order, memory_order ) volatile;
1156 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1157 memory_order, memory_order ) volatile;
1158 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1159 memory_order = memory_order_seq_cst ) volatile;
1160 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1161 memory_order = memory_order_seq_cst ) volatile;
1162 void fence( memory_order ) const volatile;
1163 unsigned long long fetch_add( unsigned long long,
1164 memory_order = memory_order_seq_cst ) volatile;
1165 unsigned long long fetch_sub( unsigned long long,
1166 memory_order = memory_order_seq_cst ) volatile;
1167 unsigned long long fetch_and( unsigned long long,
1168 memory_order = memory_order_seq_cst ) volatile;
1169 unsigned long long fetch_or( unsigned long long,
1170 memory_order = memory_order_seq_cst ) volatile;
1171 unsigned long long fetch_xor( unsigned long long,
1172 memory_order = memory_order_seq_cst ) volatile;
1174 CPP0X( atomic_ullong() = default; )
1175 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1176 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1177 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1179 unsigned long long operator =( unsigned long long __v__ ) volatile
1180 { store( __v__ ); return __v__; }
1182 unsigned long long operator ++( int ) volatile
1183 { return fetch_add( 1 ); }
1185 unsigned long long operator --( int ) volatile
1186 { return fetch_sub( 1 ); }
1188 unsigned long long operator ++() volatile
1189 { return fetch_add( 1 ) + 1; }
1191 unsigned long long operator --() volatile
1192 { return fetch_sub( 1 ) - 1; }
1194 unsigned long long operator +=( unsigned long long __v__ ) volatile
1195 { return fetch_add( __v__ ) + __v__; }
1197 unsigned long long operator -=( unsigned long long __v__ ) volatile
1198 { return fetch_sub( __v__ ) - __v__; }
1200 unsigned long long operator &=( unsigned long long __v__ ) volatile
1201 { return fetch_and( __v__ ) & __v__; }
1203 unsigned long long operator |=( unsigned long long __v__ ) volatile
1204 { return fetch_or( __v__ ) | __v__; }
1206 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1207 { return fetch_xor( __v__ ) ^ __v__; }
1209 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1211 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1213 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1214 unsigned long long, memory_order );
1215 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1216 unsigned long long*, unsigned long long, memory_order, memory_order );
1217 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1218 unsigned long long*, unsigned long long, memory_order, memory_order );
1219 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
1220 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1221 unsigned long long, memory_order );
1222 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1223 unsigned long long, memory_order );
1224 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1225 unsigned long long, memory_order );
1226 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1227 unsigned long long, memory_order );
1228 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1229 unsigned long long, memory_order );
1233 unsigned long long __f__;
1237 typedef atomic_schar atomic_int_least8_t;
1238 typedef atomic_uchar atomic_uint_least8_t;
1239 typedef atomic_short atomic_int_least16_t;
1240 typedef atomic_ushort atomic_uint_least16_t;
1241 typedef atomic_int atomic_int_least32_t;
1242 typedef atomic_uint atomic_uint_least32_t;
1243 typedef atomic_llong atomic_int_least64_t;
1244 typedef atomic_ullong atomic_uint_least64_t;
1246 typedef atomic_schar atomic_int_fast8_t;
1247 typedef atomic_uchar atomic_uint_fast8_t;
1248 typedef atomic_short atomic_int_fast16_t;
1249 typedef atomic_ushort atomic_uint_fast16_t;
1250 typedef atomic_int atomic_int_fast32_t;
1251 typedef atomic_uint atomic_uint_fast32_t;
1252 typedef atomic_llong atomic_int_fast64_t;
1253 typedef atomic_ullong atomic_uint_fast64_t;
1255 typedef atomic_long atomic_intptr_t;
1256 typedef atomic_ulong atomic_uintptr_t;
1258 typedef atomic_long atomic_ssize_t;
1259 typedef atomic_ulong atomic_size_t;
1261 typedef atomic_long atomic_ptrdiff_t;
1263 typedef atomic_llong atomic_intmax_t;
1264 typedef atomic_ullong atomic_uintmax_t;
1270 typedef struct atomic_wchar_t
1273 bool is_lock_free() const volatile;
1274 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1275 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1276 wchar_t exchange( wchar_t,
1277 memory_order = memory_order_seq_cst ) volatile;
1278 bool compare_exchange_weak( wchar_t&, wchar_t,
1279 memory_order, memory_order ) volatile;
1280 bool compare_exchange_strong( wchar_t&, wchar_t,
1281 memory_order, memory_order ) volatile;
1282 bool compare_exchange_weak( wchar_t&, wchar_t,
1283 memory_order = memory_order_seq_cst ) volatile;
1284 bool compare_exchange_strong( wchar_t&, wchar_t,
1285 memory_order = memory_order_seq_cst ) volatile;
1286 void fence( memory_order ) const volatile;
1287 wchar_t fetch_add( wchar_t,
1288 memory_order = memory_order_seq_cst ) volatile;
1289 wchar_t fetch_sub( wchar_t,
1290 memory_order = memory_order_seq_cst ) volatile;
1291 wchar_t fetch_and( wchar_t,
1292 memory_order = memory_order_seq_cst ) volatile;
1293 wchar_t fetch_or( wchar_t,
1294 memory_order = memory_order_seq_cst ) volatile;
1295 wchar_t fetch_xor( wchar_t,
1296 memory_order = memory_order_seq_cst ) volatile;
1298 CPP0X( atomic_wchar_t() = default; )
1299 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1300 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1301 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1303 wchar_t operator =( wchar_t __v__ ) volatile
1304 { store( __v__ ); return __v__; }
1306 wchar_t operator ++( int ) volatile
1307 { return fetch_add( 1 ); }
1309 wchar_t operator --( int ) volatile
1310 { return fetch_sub( 1 ); }
1312 wchar_t operator ++() volatile
1313 { return fetch_add( 1 ) + 1; }
1315 wchar_t operator --() volatile
1316 { return fetch_sub( 1 ) - 1; }
1318 wchar_t operator +=( wchar_t __v__ ) volatile
1319 { return fetch_add( __v__ ) + __v__; }
1321 wchar_t operator -=( wchar_t __v__ ) volatile
1322 { return fetch_sub( __v__ ) - __v__; }
1324 wchar_t operator &=( wchar_t __v__ ) volatile
1325 { return fetch_and( __v__ ) & __v__; }
1327 wchar_t operator |=( wchar_t __v__ ) volatile
1328 { return fetch_or( __v__ ) | __v__; }
1330 wchar_t operator ^=( wchar_t __v__ ) volatile
1331 { return fetch_xor( __v__ ) ^ __v__; }
1333 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1335 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1337 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1338 wchar_t, memory_order );
1339 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1340 wchar_t*, wchar_t, memory_order, memory_order );
1341 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1342 wchar_t*, wchar_t, memory_order, memory_order );
1343 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
1344 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1345 wchar_t, memory_order );
1346 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1347 wchar_t, memory_order );
1348 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1349 wchar_t, memory_order );
1350 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1351 wchar_t, memory_order );
1352 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1353 wchar_t, memory_order );
1363 typedef atomic_int_least16_t atomic_char16_t;
1364 typedef atomic_int_least32_t atomic_char32_t;
1365 typedef atomic_int_least32_t atomic_wchar_t;
1372 template< typename T >
1377 bool is_lock_free() const volatile;
1378 void store( T, memory_order = memory_order_seq_cst ) volatile;
1379 T load( memory_order = memory_order_seq_cst ) volatile;
1380 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1381 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1382 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1383 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1384 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1385 void fence( memory_order ) const volatile;
1387 CPP0X( atomic() = default; )
1388 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1389 CPP0X( atomic( const atomic& ) = delete; )
1390 atomic& operator =( const atomic& ) CPP0X(=delete);
1392 T operator =( T __v__ ) volatile
1393 { store( __v__ ); return __v__; }
1404 template<typename T> struct atomic< T* > : atomic_address
1406 T* load( memory_order = memory_order_seq_cst ) volatile;
1407 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1408 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1409 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1410 bool compare_exchange_weak( T*&, T*,
1411 memory_order = memory_order_seq_cst ) volatile;
1412 bool compare_exchange_strong( T*&, T*,
1413 memory_order = memory_order_seq_cst ) volatile;
1414 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1415 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1417 CPP0X( atomic() = default; )
1418 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1419 CPP0X( atomic( const atomic& ) = delete; )
1420 atomic& operator =( const atomic& ) CPP0X(=delete);
1422 T* operator =( T* __v__ ) volatile
1423 { store( __v__ ); return __v__; }
1425 T* operator ++( int ) volatile
1426 { return fetch_add( 1 ); }
1428 T* operator --( int ) volatile
1429 { return fetch_sub( 1 ); }
1431 T* operator ++() volatile
1432 { return fetch_add( 1 ) + 1; }
1434 T* operator --() volatile
1435 { return fetch_sub( 1 ) - 1; }
1437 T* operator +=( T* __v__ ) volatile
1438 { return fetch_add( __v__ ) + __v__; }
1440 T* operator -=( T* __v__ ) volatile
1441 { return fetch_sub( __v__ ) - __v__; }
1449 template<> struct atomic< bool > : atomic_bool
1451 CPP0X( atomic() = default; )
1452 CPP0X( constexpr explicit atomic( bool __v__ )
1453 : atomic_bool( __v__ ) { } )
1454 CPP0X( atomic( const atomic& ) = delete; )
1455 atomic& operator =( const atomic& ) CPP0X(=delete);
1457 bool operator =( bool __v__ ) volatile
1458 { store( __v__ ); return __v__; }
1462 template<> struct atomic< void* > : atomic_address
1464 CPP0X( atomic() = default; )
1465 CPP0X( constexpr explicit atomic( void* __v__ )
1466 : atomic_address( __v__ ) { } )
1467 CPP0X( atomic( const atomic& ) = delete; )
1468 atomic& operator =( const atomic& ) CPP0X(=delete);
1470 void* operator =( void* __v__ ) volatile
1471 { store( __v__ ); return __v__; }
1475 template<> struct atomic< char > : atomic_char
1477 CPP0X( atomic() = default; )
1478 CPP0X( constexpr explicit atomic( char __v__ )
1479 : atomic_char( __v__ ) { } )
1480 CPP0X( atomic( const atomic& ) = delete; )
1481 atomic& operator =( const atomic& ) CPP0X(=delete);
1483 char operator =( char __v__ ) volatile
1484 { store( __v__ ); return __v__; }
1488 template<> struct atomic< signed char > : atomic_schar
1490 CPP0X( atomic() = default; )
1491 CPP0X( constexpr explicit atomic( signed char __v__ )
1492 : atomic_schar( __v__ ) { } )
1493 CPP0X( atomic( const atomic& ) = delete; )
1494 atomic& operator =( const atomic& ) CPP0X(=delete);
1496 signed char operator =( signed char __v__ ) volatile
1497 { store( __v__ ); return __v__; }
1501 template<> struct atomic< unsigned char > : atomic_uchar
1503 CPP0X( atomic() = default; )
1504 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1505 : atomic_uchar( __v__ ) { } )
1506 CPP0X( atomic( const atomic& ) = delete; )
1507 atomic& operator =( const atomic& ) CPP0X(=delete);
1509 unsigned char operator =( unsigned char __v__ ) volatile
1510 { store( __v__ ); return __v__; }
1514 template<> struct atomic< short > : atomic_short
1516 CPP0X( atomic() = default; )
1517 CPP0X( constexpr explicit atomic( short __v__ )
1518 : atomic_short( __v__ ) { } )
1519 CPP0X( atomic( const atomic& ) = delete; )
1520 atomic& operator =( const atomic& ) CPP0X(=delete);
1522 short operator =( short __v__ ) volatile
1523 { store( __v__ ); return __v__; }
1527 template<> struct atomic< unsigned short > : atomic_ushort
1529 CPP0X( atomic() = default; )
1530 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1531 : atomic_ushort( __v__ ) { } )
1532 CPP0X( atomic( const atomic& ) = delete; )
1533 atomic& operator =( const atomic& ) CPP0X(=delete);
1535 unsigned short operator =( unsigned short __v__ ) volatile
1536 { store( __v__ ); return __v__; }
1540 template<> struct atomic< int > : atomic_int
1542 CPP0X( atomic() = default; )
1543 CPP0X( constexpr explicit atomic( int __v__ )
1544 : atomic_int( __v__ ) { } )
1545 CPP0X( atomic( const atomic& ) = delete; )
1546 atomic& operator =( const atomic& ) CPP0X(=delete);
1548 int operator =( int __v__ ) volatile
1549 { store( __v__ ); return __v__; }
1553 template<> struct atomic< unsigned int > : atomic_uint
1555 CPP0X( atomic() = default; )
1556 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1557 : atomic_uint( __v__ ) { } )
1558 CPP0X( atomic( const atomic& ) = delete; )
1559 atomic& operator =( const atomic& ) CPP0X(=delete);
1561 unsigned int operator =( unsigned int __v__ ) volatile
1562 { store( __v__ ); return __v__; }
1566 template<> struct atomic< long > : atomic_long
1568 CPP0X( atomic() = default; )
1569 CPP0X( constexpr explicit atomic( long __v__ )
1570 : atomic_long( __v__ ) { } )
1571 CPP0X( atomic( const atomic& ) = delete; )
1572 atomic& operator =( const atomic& ) CPP0X(=delete);
1574 long operator =( long __v__ ) volatile
1575 { store( __v__ ); return __v__; }
1579 template<> struct atomic< unsigned long > : atomic_ulong
1581 CPP0X( atomic() = default; )
1582 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1583 : atomic_ulong( __v__ ) { } )
1584 CPP0X( atomic( const atomic& ) = delete; )
1585 atomic& operator =( const atomic& ) CPP0X(=delete);
1587 unsigned long operator =( unsigned long __v__ ) volatile
1588 { store( __v__ ); return __v__; }
1592 template<> struct atomic< long long > : atomic_llong
1594 CPP0X( atomic() = default; )
1595 CPP0X( constexpr explicit atomic( long long __v__ )
1596 : atomic_llong( __v__ ) { } )
1597 CPP0X( atomic( const atomic& ) = delete; )
1598 atomic& operator =( const atomic& ) CPP0X(=delete);
1600 long long operator =( long long __v__ ) volatile
1601 { store( __v__ ); return __v__; }
1605 template<> struct atomic< unsigned long long > : atomic_ullong
1607 CPP0X( atomic() = default; )
1608 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1609 : atomic_ullong( __v__ ) { } )
1610 CPP0X( atomic( const atomic& ) = delete; )
1611 atomic& operator =( const atomic& ) CPP0X(=delete);
1613 unsigned long long operator =( unsigned long long __v__ ) volatile
1614 { store( __v__ ); return __v__; }
1618 template<> struct atomic< wchar_t > : atomic_wchar_t
1620 CPP0X( atomic() = default; )
1621 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1622 : atomic_wchar_t( __v__ ) { } )
1623 CPP0X( atomic( const atomic& ) = delete; )
1624 atomic& operator =( const atomic& ) CPP0X(=delete);
1626 wchar_t operator =( wchar_t __v__ ) volatile
1627 { store( __v__ ); return __v__; }
1637 inline bool atomic_is_lock_free
1638 ( const volatile atomic_bool* __a__ )
1641 inline bool atomic_load_explicit
1642 ( volatile atomic_bool* __a__, memory_order __x__ )
1643 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1645 inline bool atomic_load
1646 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1648 inline void atomic_store_explicit
1649 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1650 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1652 inline void atomic_store
1653 ( volatile atomic_bool* __a__, bool __m__ )
1654 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1656 inline bool atomic_exchange_explicit
1657 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1658 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1660 inline bool atomic_exchange
1661 ( volatile atomic_bool* __a__, bool __m__ )
1662 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1664 inline bool atomic_compare_exchange_weak_explicit
1665 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1666 memory_order __x__, memory_order __y__ )
1667 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1669 inline bool atomic_compare_exchange_strong_explicit
1670 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1671 memory_order __x__, memory_order __y__ )
1672 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1674 inline bool atomic_compare_exchange_weak
1675 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1676 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1677 memory_order_seq_cst, memory_order_seq_cst ); }
1679 inline bool atomic_compare_exchange_strong
1680 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1681 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1682 memory_order_seq_cst, memory_order_seq_cst ); }
1684 inline void atomic_fence
1685 ( const volatile atomic_bool* __a__, memory_order __x__ )
1686 { _ATOMIC_FENCE_( __a__, __x__ ); }
1689 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1692 inline void* atomic_load_explicit
1693 ( volatile atomic_address* __a__, memory_order __x__ )
1694 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1696 inline void* atomic_load( volatile atomic_address* __a__ )
1697 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1699 inline void atomic_store_explicit
1700 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1701 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1703 inline void atomic_store
1704 ( volatile atomic_address* __a__, void* __m__ )
1705 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1707 inline void* atomic_exchange_explicit
1708 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1709 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1711 inline void* atomic_exchange
1712 ( volatile atomic_address* __a__, void* __m__ )
1713 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1715 inline bool atomic_compare_exchange_weak_explicit
1716 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1717 memory_order __x__, memory_order __y__ )
1718 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1720 inline bool atomic_compare_exchange_strong_explicit
1721 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1722 memory_order __x__, memory_order __y__ )
1723 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1725 inline bool atomic_compare_exchange_weak
1726 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1727 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1728 memory_order_seq_cst, memory_order_seq_cst ); }
1730 inline bool atomic_compare_exchange_strong
1731 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1732 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1733 memory_order_seq_cst, memory_order_seq_cst ); }
1735 inline void atomic_fence
1736 ( const volatile atomic_address* __a__, memory_order __x__ )
1737 { _ATOMIC_FENCE_( __a__, __x__ ); }
1740 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1743 inline char atomic_load_explicit
1744 ( volatile atomic_char* __a__, memory_order __x__ )
1745 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1747 inline char atomic_load( volatile atomic_char* __a__ )
1748 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1750 inline void atomic_store_explicit
1751 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1752 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1754 inline void atomic_store
1755 ( volatile atomic_char* __a__, char __m__ )
1756 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1758 inline char atomic_exchange_explicit
1759 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1760 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1762 inline char atomic_exchange
1763 ( volatile atomic_char* __a__, char __m__ )
1764 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1766 inline bool atomic_compare_exchange_weak_explicit
1767 ( volatile atomic_char* __a__, char* __e__, char __m__,
1768 memory_order __x__, memory_order __y__ )
1769 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1771 inline bool atomic_compare_exchange_strong_explicit
1772 ( volatile atomic_char* __a__, char* __e__, char __m__,
1773 memory_order __x__, memory_order __y__ )
1774 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1776 inline bool atomic_compare_exchange_weak
1777 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1778 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1779 memory_order_seq_cst, memory_order_seq_cst ); }
1781 inline bool atomic_compare_exchange_strong
1782 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1783 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1784 memory_order_seq_cst, memory_order_seq_cst ); }
1786 inline void atomic_fence
1787 ( const volatile atomic_char* __a__, memory_order __x__ )
1788 { _ATOMIC_FENCE_( __a__, __x__ ); }
1791 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1794 inline signed char atomic_load_explicit
1795 ( volatile atomic_schar* __a__, memory_order __x__ )
1796 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1798 inline signed char atomic_load( volatile atomic_schar* __a__ )
1799 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1801 inline void atomic_store_explicit
1802 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1803 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1805 inline void atomic_store
1806 ( volatile atomic_schar* __a__, signed char __m__ )
1807 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1809 inline signed char atomic_exchange_explicit
1810 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1811 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1813 inline signed char atomic_exchange
1814 ( volatile atomic_schar* __a__, signed char __m__ )
1815 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1817 inline bool atomic_compare_exchange_weak_explicit
1818 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1819 memory_order __x__, memory_order __y__ )
1820 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1822 inline bool atomic_compare_exchange_strong_explicit
1823 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1824 memory_order __x__, memory_order __y__ )
1825 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1827 inline bool atomic_compare_exchange_weak
1828 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1829 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1830 memory_order_seq_cst, memory_order_seq_cst ); }
1832 inline bool atomic_compare_exchange_strong
1833 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1834 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1835 memory_order_seq_cst, memory_order_seq_cst ); }
1837 inline void atomic_fence
1838 ( const volatile atomic_schar* __a__, memory_order __x__ )
1839 { _ATOMIC_FENCE_( __a__, __x__ ); }
1842 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1845 inline unsigned char atomic_load_explicit
1846 ( volatile atomic_uchar* __a__, memory_order __x__ )
1847 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1849 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1850 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1852 inline void atomic_store_explicit
1853 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1854 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1856 inline void atomic_store
1857 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1858 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1860 inline unsigned char atomic_exchange_explicit
1861 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1862 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1864 inline unsigned char atomic_exchange
1865 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1866 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1868 inline bool atomic_compare_exchange_weak_explicit
1869 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1870 memory_order __x__, memory_order __y__ )
1871 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1873 inline bool atomic_compare_exchange_strong_explicit
1874 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1875 memory_order __x__, memory_order __y__ )
1876 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1878 inline bool atomic_compare_exchange_weak
1879 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1880 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1881 memory_order_seq_cst, memory_order_seq_cst ); }
1883 inline bool atomic_compare_exchange_strong
1884 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1885 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1886 memory_order_seq_cst, memory_order_seq_cst ); }
1888 inline void atomic_fence
1889 ( const volatile atomic_uchar* __a__, memory_order __x__ )
1890 { _ATOMIC_FENCE_( __a__, __x__ ); }
1893 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1896 inline short atomic_load_explicit
1897 ( volatile atomic_short* __a__, memory_order __x__ )
1898 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1900 inline short atomic_load( volatile atomic_short* __a__ )
1901 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1903 inline void atomic_store_explicit
1904 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1905 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1907 inline void atomic_store
1908 ( volatile atomic_short* __a__, short __m__ )
1909 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1911 inline short atomic_exchange_explicit
1912 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1913 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1915 inline short atomic_exchange
1916 ( volatile atomic_short* __a__, short __m__ )
1917 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1919 inline bool atomic_compare_exchange_weak_explicit
1920 ( volatile atomic_short* __a__, short* __e__, short __m__,
1921 memory_order __x__, memory_order __y__ )
1922 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1924 inline bool atomic_compare_exchange_strong_explicit
1925 ( volatile atomic_short* __a__, short* __e__, short __m__,
1926 memory_order __x__, memory_order __y__ )
1927 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1929 inline bool atomic_compare_exchange_weak
1930 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1931 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1932 memory_order_seq_cst, memory_order_seq_cst ); }
1934 inline bool atomic_compare_exchange_strong
1935 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1936 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1937 memory_order_seq_cst, memory_order_seq_cst ); }
1939 inline void atomic_fence
1940 ( const volatile atomic_short* __a__, memory_order __x__ )
1941 { _ATOMIC_FENCE_( __a__, __x__ ); }
1944 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1947 inline unsigned short atomic_load_explicit
1948 ( volatile atomic_ushort* __a__, memory_order __x__ )
1949 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1951 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1952 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1954 inline void atomic_store_explicit
1955 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1956 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1958 inline void atomic_store
1959 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1960 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1962 inline unsigned short atomic_exchange_explicit
1963 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1964 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1966 inline unsigned short atomic_exchange
1967 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1968 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1970 inline bool atomic_compare_exchange_weak_explicit
1971 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1972 memory_order __x__, memory_order __y__ )
1973 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1975 inline bool atomic_compare_exchange_strong_explicit
1976 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1977 memory_order __x__, memory_order __y__ )
1978 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1980 inline bool atomic_compare_exchange_weak
1981 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1982 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1983 memory_order_seq_cst, memory_order_seq_cst ); }
1985 inline bool atomic_compare_exchange_strong
1986 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1987 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1988 memory_order_seq_cst, memory_order_seq_cst ); }
1990 inline void atomic_fence
1991 ( const volatile atomic_ushort* __a__, memory_order __x__ )
1992 { _ATOMIC_FENCE_( __a__, __x__ ); }
1995 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1998 inline int atomic_load_explicit
1999 ( volatile atomic_int* __a__, memory_order __x__ )
2000 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2002 inline int atomic_load( volatile atomic_int* __a__ )
2003 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2005 inline void atomic_store_explicit
2006 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2007 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2009 inline void atomic_store
2010 ( volatile atomic_int* __a__, int __m__ )
2011 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2013 inline int atomic_exchange_explicit
2014 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2015 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2017 inline int atomic_exchange
2018 ( volatile atomic_int* __a__, int __m__ )
2019 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2021 inline bool atomic_compare_exchange_weak_explicit
2022 ( volatile atomic_int* __a__, int* __e__, int __m__,
2023 memory_order __x__, memory_order __y__ )
2024 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2026 inline bool atomic_compare_exchange_strong_explicit
2027 ( volatile atomic_int* __a__, int* __e__, int __m__,
2028 memory_order __x__, memory_order __y__ )
2029 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2031 inline bool atomic_compare_exchange_weak
2032 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2033 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2034 memory_order_seq_cst, memory_order_seq_cst ); }
2036 inline bool atomic_compare_exchange_strong
2037 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2038 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2039 memory_order_seq_cst, memory_order_seq_cst ); }
2041 inline void atomic_fence
2042 ( const volatile atomic_int* __a__, memory_order __x__ )
2043 { _ATOMIC_FENCE_( __a__, __x__ ); }
2046 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2049 inline unsigned int atomic_load_explicit
2050 ( volatile atomic_uint* __a__, memory_order __x__ )
2051 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2053 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2054 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2056 inline void atomic_store_explicit
2057 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2058 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2060 inline void atomic_store
2061 ( volatile atomic_uint* __a__, unsigned int __m__ )
2062 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2064 inline unsigned int atomic_exchange_explicit
2065 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2066 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2068 inline unsigned int atomic_exchange
2069 ( volatile atomic_uint* __a__, unsigned int __m__ )
2070 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2072 inline bool atomic_compare_exchange_weak_explicit
2073 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2074 memory_order __x__, memory_order __y__ )
2075 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2077 inline bool atomic_compare_exchange_strong_explicit
2078 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2079 memory_order __x__, memory_order __y__ )
2080 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2082 inline bool atomic_compare_exchange_weak
2083 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2084 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2085 memory_order_seq_cst, memory_order_seq_cst ); }
2087 inline bool atomic_compare_exchange_strong
2088 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2089 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2090 memory_order_seq_cst, memory_order_seq_cst ); }
2092 inline void atomic_fence
2093 ( const volatile atomic_uint* __a__, memory_order __x__ )
2094 { _ATOMIC_FENCE_( __a__, __x__ ); }
2097 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2100 inline long atomic_load_explicit
2101 ( volatile atomic_long* __a__, memory_order __x__ )
2102 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2104 inline long atomic_load( volatile atomic_long* __a__ )
2105 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2107 inline void atomic_store_explicit
2108 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2109 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2111 inline void atomic_store
2112 ( volatile atomic_long* __a__, long __m__ )
2113 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2115 inline long atomic_exchange_explicit
2116 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2117 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2119 inline long atomic_exchange
2120 ( volatile atomic_long* __a__, long __m__ )
2121 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2123 inline bool atomic_compare_exchange_weak_explicit
2124 ( volatile atomic_long* __a__, long* __e__, long __m__,
2125 memory_order __x__, memory_order __y__ )
2126 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2128 inline bool atomic_compare_exchange_strong_explicit
2129 ( volatile atomic_long* __a__, long* __e__, long __m__,
2130 memory_order __x__, memory_order __y__ )
2131 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2133 inline bool atomic_compare_exchange_weak
2134 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2135 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2136 memory_order_seq_cst, memory_order_seq_cst ); }
2138 inline bool atomic_compare_exchange_strong
2139 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2140 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2141 memory_order_seq_cst, memory_order_seq_cst ); }
2143 inline void atomic_fence
2144 ( const volatile atomic_long* __a__, memory_order __x__ )
2145 { _ATOMIC_FENCE_( __a__, __x__ ); }
2148 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2151 inline unsigned long atomic_load_explicit
2152 ( volatile atomic_ulong* __a__, memory_order __x__ )
2153 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2155 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2156 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2158 inline void atomic_store_explicit
2159 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2160 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2162 inline void atomic_store
2163 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2164 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2166 inline unsigned long atomic_exchange_explicit
2167 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2168 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2170 inline unsigned long atomic_exchange
2171 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2172 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2174 inline bool atomic_compare_exchange_weak_explicit
2175 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2176 memory_order __x__, memory_order __y__ )
2177 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2179 inline bool atomic_compare_exchange_strong_explicit
2180 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2181 memory_order __x__, memory_order __y__ )
2182 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2184 inline bool atomic_compare_exchange_weak
2185 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2186 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2187 memory_order_seq_cst, memory_order_seq_cst ); }
2189 inline bool atomic_compare_exchange_strong
2190 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2191 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2192 memory_order_seq_cst, memory_order_seq_cst ); }
2194 inline void atomic_fence
2195 ( const volatile atomic_ulong* __a__, memory_order __x__ )
2196 { _ATOMIC_FENCE_( __a__, __x__ ); }
2199 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2202 inline long long atomic_load_explicit
2203 ( volatile atomic_llong* __a__, memory_order __x__ )
2204 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2206 inline long long atomic_load( volatile atomic_llong* __a__ )
2207 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2209 inline void atomic_store_explicit
2210 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2211 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2213 inline void atomic_store
2214 ( volatile atomic_llong* __a__, long long __m__ )
2215 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2217 inline long long atomic_exchange_explicit
2218 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2219 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2221 inline long long atomic_exchange
2222 ( volatile atomic_llong* __a__, long long __m__ )
2223 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2225 inline bool atomic_compare_exchange_weak_explicit
2226 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2227 memory_order __x__, memory_order __y__ )
2228 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2230 inline bool atomic_compare_exchange_strong_explicit
2231 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2232 memory_order __x__, memory_order __y__ )
2233 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2235 inline bool atomic_compare_exchange_weak
2236 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2237 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2238 memory_order_seq_cst, memory_order_seq_cst ); }
2240 inline bool atomic_compare_exchange_strong
2241 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2242 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2243 memory_order_seq_cst, memory_order_seq_cst ); }
2245 inline void atomic_fence
2246 ( const volatile atomic_llong* __a__, memory_order __x__ )
2247 { _ATOMIC_FENCE_( __a__, __x__ ); }
2250 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2253 inline unsigned long long atomic_load_explicit
2254 ( volatile atomic_ullong* __a__, memory_order __x__ )
2255 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2257 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2258 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2260 inline void atomic_store_explicit
2261 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2262 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2264 inline void atomic_store
2265 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2266 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2268 inline unsigned long long atomic_exchange_explicit
2269 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2270 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2272 inline unsigned long long atomic_exchange
2273 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2274 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2276 inline bool atomic_compare_exchange_weak_explicit
2277 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2278 memory_order __x__, memory_order __y__ )
2279 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2281 inline bool atomic_compare_exchange_strong_explicit
2282 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2283 memory_order __x__, memory_order __y__ )
2284 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2286 inline bool atomic_compare_exchange_weak
2287 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2288 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2289 memory_order_seq_cst, memory_order_seq_cst ); }
2291 inline bool atomic_compare_exchange_strong
2292 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2293 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2294 memory_order_seq_cst, memory_order_seq_cst ); }
2296 inline void atomic_fence
2297 ( const volatile atomic_ullong* __a__, memory_order __x__ )
2298 { _ATOMIC_FENCE_( __a__, __x__ ); }
2301 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2304 inline wchar_t atomic_load_explicit
2305 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2306 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2308 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2309 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2311 inline void atomic_store_explicit
2312 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2313 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2315 inline void atomic_store
2316 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2317 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2319 inline wchar_t atomic_exchange_explicit
2320 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2321 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2323 inline wchar_t atomic_exchange
2324 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2325 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2327 inline bool atomic_compare_exchange_weak_explicit
2328 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2329 memory_order __x__, memory_order __y__ )
2330 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2332 inline bool atomic_compare_exchange_strong_explicit
2333 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2334 memory_order __x__, memory_order __y__ )
2335 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2337 inline bool atomic_compare_exchange_weak
2338 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2339 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2340 memory_order_seq_cst, memory_order_seq_cst ); }
2342 inline bool atomic_compare_exchange_strong
2343 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2344 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2345 memory_order_seq_cst, memory_order_seq_cst ); }
2347 inline void atomic_fence
2348 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
2349 { _ATOMIC_FENCE_( __a__, __x__ ); }
2352 inline void* atomic_fetch_add_explicit
2353 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2355 void* volatile* __p__ = &((__a__)->__f__);
2356 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2357 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2360 inline void* atomic_fetch_add
2361 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2362 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2365 inline void* atomic_fetch_sub_explicit
2366 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2368 void* volatile* __p__ = &((__a__)->__f__);
2369 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2370 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2373 inline void* atomic_fetch_sub
2374 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2375 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2377 inline char atomic_fetch_add_explicit
2378 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2379 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2381 inline char atomic_fetch_add
2382 ( volatile atomic_char* __a__, char __m__ )
2383 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2386 inline char atomic_fetch_sub_explicit
2387 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2388 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2390 inline char atomic_fetch_sub
2391 ( volatile atomic_char* __a__, char __m__ )
2392 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2395 inline char atomic_fetch_and_explicit
2396 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2397 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2399 inline char atomic_fetch_and
2400 ( volatile atomic_char* __a__, char __m__ )
2401 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2404 inline char atomic_fetch_or_explicit
2405 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2406 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2408 inline char atomic_fetch_or
2409 ( volatile atomic_char* __a__, char __m__ )
2410 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2413 inline char atomic_fetch_xor_explicit
2414 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2415 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2417 inline char atomic_fetch_xor
2418 ( volatile atomic_char* __a__, char __m__ )
2419 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2422 inline signed char atomic_fetch_add_explicit
2423 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2424 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2426 inline signed char atomic_fetch_add
2427 ( volatile atomic_schar* __a__, signed char __m__ )
2428 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2431 inline signed char atomic_fetch_sub_explicit
2432 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2433 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2435 inline signed char atomic_fetch_sub
2436 ( volatile atomic_schar* __a__, signed char __m__ )
2437 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2440 inline signed char atomic_fetch_and_explicit
2441 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2442 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2444 inline signed char atomic_fetch_and
2445 ( volatile atomic_schar* __a__, signed char __m__ )
2446 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2449 inline signed char atomic_fetch_or_explicit
2450 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2451 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2453 inline signed char atomic_fetch_or
2454 ( volatile atomic_schar* __a__, signed char __m__ )
2455 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2458 inline signed char atomic_fetch_xor_explicit
2459 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2460 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2462 inline signed char atomic_fetch_xor
2463 ( volatile atomic_schar* __a__, signed char __m__ )
2464 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2467 inline unsigned char atomic_fetch_add_explicit
2468 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2469 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2471 inline unsigned char atomic_fetch_add
2472 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2473 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2476 inline unsigned char atomic_fetch_sub_explicit
2477 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2478 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2480 inline unsigned char atomic_fetch_sub
2481 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2482 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2485 inline unsigned char atomic_fetch_and_explicit
2486 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2487 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2489 inline unsigned char atomic_fetch_and
2490 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2491 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2494 inline unsigned char atomic_fetch_or_explicit
2495 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2496 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2498 inline unsigned char atomic_fetch_or
2499 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2500 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2503 inline unsigned char atomic_fetch_xor_explicit
2504 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2505 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2507 inline unsigned char atomic_fetch_xor
2508 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2509 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2512 inline short atomic_fetch_add_explicit
2513 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2514 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2516 inline short atomic_fetch_add
2517 ( volatile atomic_short* __a__, short __m__ )
2518 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2521 inline short atomic_fetch_sub_explicit
2522 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2523 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2525 inline short atomic_fetch_sub
2526 ( volatile atomic_short* __a__, short __m__ )
2527 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2530 inline short atomic_fetch_and_explicit
2531 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2532 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2534 inline short atomic_fetch_and
2535 ( volatile atomic_short* __a__, short __m__ )
2536 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2539 inline short atomic_fetch_or_explicit
2540 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2541 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2543 inline short atomic_fetch_or
2544 ( volatile atomic_short* __a__, short __m__ )
2545 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2548 inline short atomic_fetch_xor_explicit
2549 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2550 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2552 inline short atomic_fetch_xor
2553 ( volatile atomic_short* __a__, short __m__ )
2554 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2557 inline unsigned short atomic_fetch_add_explicit
2558 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2559 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2561 inline unsigned short atomic_fetch_add
2562 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2563 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2566 inline unsigned short atomic_fetch_sub_explicit
2567 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2568 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2570 inline unsigned short atomic_fetch_sub
2571 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2572 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2575 inline unsigned short atomic_fetch_and_explicit
2576 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2577 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2579 inline unsigned short atomic_fetch_and
2580 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2581 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2584 inline unsigned short atomic_fetch_or_explicit
2585 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2586 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2588 inline unsigned short atomic_fetch_or
2589 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2590 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2593 inline unsigned short atomic_fetch_xor_explicit
2594 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2595 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2597 inline unsigned short atomic_fetch_xor
2598 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2599 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2602 inline int atomic_fetch_add_explicit
2603 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2604 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2606 inline int atomic_fetch_add
2607 ( volatile atomic_int* __a__, int __m__ )
2608 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2611 inline int atomic_fetch_sub_explicit
2612 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2613 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2615 inline int atomic_fetch_sub
2616 ( volatile atomic_int* __a__, int __m__ )
2617 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2620 inline int atomic_fetch_and_explicit
2621 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2622 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2624 inline int atomic_fetch_and
2625 ( volatile atomic_int* __a__, int __m__ )
2626 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2629 inline int atomic_fetch_or_explicit
2630 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2631 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2633 inline int atomic_fetch_or
2634 ( volatile atomic_int* __a__, int __m__ )
2635 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2638 inline int atomic_fetch_xor_explicit
2639 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2640 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2642 inline int atomic_fetch_xor
2643 ( volatile atomic_int* __a__, int __m__ )
2644 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2647 inline unsigned int atomic_fetch_add_explicit
2648 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2649 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2651 inline unsigned int atomic_fetch_add
2652 ( volatile atomic_uint* __a__, unsigned int __m__ )
2653 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2656 inline unsigned int atomic_fetch_sub_explicit
2657 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2658 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2660 inline unsigned int atomic_fetch_sub
2661 ( volatile atomic_uint* __a__, unsigned int __m__ )
2662 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2665 inline unsigned int atomic_fetch_and_explicit
2666 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2667 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2669 inline unsigned int atomic_fetch_and
2670 ( volatile atomic_uint* __a__, unsigned int __m__ )
2671 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2674 inline unsigned int atomic_fetch_or_explicit
2675 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2676 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2678 inline unsigned int atomic_fetch_or
2679 ( volatile atomic_uint* __a__, unsigned int __m__ )
2680 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2683 inline unsigned int atomic_fetch_xor_explicit
2684 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2685 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2687 inline unsigned int atomic_fetch_xor
2688 ( volatile atomic_uint* __a__, unsigned int __m__ )
2689 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2692 inline long atomic_fetch_add_explicit
2693 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2694 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2696 inline long atomic_fetch_add
2697 ( volatile atomic_long* __a__, long __m__ )
2698 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2701 inline long atomic_fetch_sub_explicit
2702 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2703 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2705 inline long atomic_fetch_sub
2706 ( volatile atomic_long* __a__, long __m__ )
2707 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2710 inline long atomic_fetch_and_explicit
2711 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2712 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2714 inline long atomic_fetch_and
2715 ( volatile atomic_long* __a__, long __m__ )
2716 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2719 inline long atomic_fetch_or_explicit
2720 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2721 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2723 inline long atomic_fetch_or
2724 ( volatile atomic_long* __a__, long __m__ )
2725 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2728 inline long atomic_fetch_xor_explicit
2729 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2730 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2732 inline long atomic_fetch_xor
2733 ( volatile atomic_long* __a__, long __m__ )
2734 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2737 inline unsigned long atomic_fetch_add_explicit
2738 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2739 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2741 inline unsigned long atomic_fetch_add
2742 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2743 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2746 inline unsigned long atomic_fetch_sub_explicit
2747 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2748 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2750 inline unsigned long atomic_fetch_sub
2751 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2752 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2755 inline unsigned long atomic_fetch_and_explicit
2756 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2757 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2759 inline unsigned long atomic_fetch_and
2760 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2761 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2764 inline unsigned long atomic_fetch_or_explicit
2765 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2766 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2768 inline unsigned long atomic_fetch_or
2769 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2770 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2773 inline unsigned long atomic_fetch_xor_explicit
2774 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2775 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2777 inline unsigned long atomic_fetch_xor
2778 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2779 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2782 inline long long atomic_fetch_add_explicit
2783 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2784 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2786 inline long long atomic_fetch_add
2787 ( volatile atomic_llong* __a__, long long __m__ )
2788 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2791 inline long long atomic_fetch_sub_explicit
2792 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2793 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2795 inline long long atomic_fetch_sub
2796 ( volatile atomic_llong* __a__, long long __m__ )
2797 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2800 inline long long atomic_fetch_and_explicit
2801 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2802 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2804 inline long long atomic_fetch_and
2805 ( volatile atomic_llong* __a__, long long __m__ )
2806 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2809 inline long long atomic_fetch_or_explicit
2810 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2811 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2813 inline long long atomic_fetch_or
2814 ( volatile atomic_llong* __a__, long long __m__ )
2815 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2818 inline long long atomic_fetch_xor_explicit
2819 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2820 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2822 inline long long atomic_fetch_xor
2823 ( volatile atomic_llong* __a__, long long __m__ )
2824 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2827 inline unsigned long long atomic_fetch_add_explicit
2828 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2829 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2831 inline unsigned long long atomic_fetch_add
2832 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2833 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2836 inline unsigned long long atomic_fetch_sub_explicit
2837 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2838 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2840 inline unsigned long long atomic_fetch_sub
2841 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2842 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2845 inline unsigned long long atomic_fetch_and_explicit
2846 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2847 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2849 inline unsigned long long atomic_fetch_and
2850 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2851 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2854 inline unsigned long long atomic_fetch_or_explicit
2855 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2856 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2858 inline unsigned long long atomic_fetch_or
2859 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2860 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2863 inline unsigned long long atomic_fetch_xor_explicit
2864 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2865 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2867 inline unsigned long long atomic_fetch_xor
2868 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2869 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2872 inline wchar_t atomic_fetch_add_explicit
2873 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2874 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2876 inline wchar_t atomic_fetch_add
2877 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2878 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2881 inline wchar_t atomic_fetch_sub_explicit
2882 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2883 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2885 inline wchar_t atomic_fetch_sub
2886 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2887 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2890 inline wchar_t atomic_fetch_and_explicit
2891 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2892 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2894 inline wchar_t atomic_fetch_and
2895 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2896 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2899 inline wchar_t atomic_fetch_or_explicit
2900 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2901 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2903 inline wchar_t atomic_fetch_or
2904 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2905 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2908 inline wchar_t atomic_fetch_xor_explicit
2909 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2910 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2912 inline wchar_t atomic_fetch_xor
2913 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2914 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2920 #define atomic_is_lock_free( __a__ ) \
2923 #define atomic_load( __a__ ) \
2924 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2926 #define atomic_load_explicit( __a__, __x__ ) \
2927 _ATOMIC_LOAD_( __a__, __x__ )
2929 #define atomic_init( __a__, __m__ ) \
2930 _ATOMIC_INIT_( __a__, __m__ )
2932 #define atomic_store( __a__, __m__ ) \
2933 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2935 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2936 _ATOMIC_STORE_( __a__, __m__, __x__ )
2938 #define atomic_exchange( __a__, __m__ ) \
2939 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2941 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
2942 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2944 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
2945 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
2947 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
2948 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2950 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2951 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
2953 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2954 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2956 #define atomic_fence( __a__, __x__ ) \
2957 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
2960 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2961 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2963 #define atomic_fetch_add( __a__, __m__ ) \
2964 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2967 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2968 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2970 #define atomic_fetch_sub( __a__, __m__ ) \
2971 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2974 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2975 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2977 #define atomic_fetch_and( __a__, __m__ ) \
2978 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2981 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2982 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2984 #define atomic_fetch_or( __a__, __m__ ) \
2985 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2988 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2989 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2991 #define atomic_fetch_xor( __a__, __m__ ) \
2992 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
3001 inline bool atomic_bool::is_lock_free() const volatile
3004 inline void atomic_bool::store
3005 ( bool __m__, memory_order __x__ ) volatile
3006 { atomic_store_explicit( this, __m__, __x__ ); }
3008 inline bool atomic_bool::load
3009 ( memory_order __x__ ) volatile
3010 { return atomic_load_explicit( this, __x__ ); }
3012 inline bool atomic_bool::exchange
3013 ( bool __m__, memory_order __x__ ) volatile
3014 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3016 inline bool atomic_bool::compare_exchange_weak
3017 ( bool& __e__, bool __m__,
3018 memory_order __x__, memory_order __y__ ) volatile
3019 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3021 inline bool atomic_bool::compare_exchange_strong
3022 ( bool& __e__, bool __m__,
3023 memory_order __x__, memory_order __y__ ) volatile
3024 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3026 inline bool atomic_bool::compare_exchange_weak
3027 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3028 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3029 __x__ == memory_order_acq_rel ? memory_order_acquire :
3030 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3032 inline bool atomic_bool::compare_exchange_strong
3033 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3034 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3035 __x__ == memory_order_acq_rel ? memory_order_acquire :
3036 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3038 inline void atomic_bool::fence
3039 ( memory_order __x__ ) const volatile
3040 { return atomic_fence( this, __x__ ); }
3043 inline bool atomic_address::is_lock_free() const volatile
3046 inline void atomic_address::store
3047 ( void* __m__, memory_order __x__ ) volatile
3048 { atomic_store_explicit( this, __m__, __x__ ); }
3050 inline void* atomic_address::load
3051 ( memory_order __x__ ) volatile
3052 { return atomic_load_explicit( this, __x__ ); }
3054 inline void* atomic_address::exchange
3055 ( void* __m__, memory_order __x__ ) volatile
3056 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3058 inline bool atomic_address::compare_exchange_weak
3059 ( void*& __e__, void* __m__,
3060 memory_order __x__, memory_order __y__ ) volatile
3061 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3063 inline bool atomic_address::compare_exchange_strong
3064 ( void*& __e__, void* __m__,
3065 memory_order __x__, memory_order __y__ ) volatile
3066 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3068 inline bool atomic_address::compare_exchange_weak
3069 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3070 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3071 __x__ == memory_order_acq_rel ? memory_order_acquire :
3072 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3074 inline bool atomic_address::compare_exchange_strong
3075 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3076 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3077 __x__ == memory_order_acq_rel ? memory_order_acquire :
3078 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3080 inline void atomic_address::fence
3081 ( memory_order __x__ ) const volatile
3082 { return atomic_fence( this, __x__ ); }
3085 inline bool atomic_char::is_lock_free() const volatile
3088 inline void atomic_char::store
3089 ( char __m__, memory_order __x__ ) volatile
3090 { atomic_store_explicit( this, __m__, __x__ ); }
3092 inline char atomic_char::load
3093 ( memory_order __x__ ) volatile
3094 { return atomic_load_explicit( this, __x__ ); }
3096 inline char atomic_char::exchange
3097 ( char __m__, memory_order __x__ ) volatile
3098 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3100 inline bool atomic_char::compare_exchange_weak
3101 ( char& __e__, char __m__,
3102 memory_order __x__, memory_order __y__ ) volatile
3103 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3105 inline bool atomic_char::compare_exchange_strong
3106 ( char& __e__, char __m__,
3107 memory_order __x__, memory_order __y__ ) volatile
3108 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3110 inline bool atomic_char::compare_exchange_weak
3111 ( char& __e__, char __m__, memory_order __x__ ) volatile
3112 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3113 __x__ == memory_order_acq_rel ? memory_order_acquire :
3114 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3116 inline bool atomic_char::compare_exchange_strong
3117 ( char& __e__, char __m__, memory_order __x__ ) volatile
3118 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3119 __x__ == memory_order_acq_rel ? memory_order_acquire :
3120 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3122 inline void atomic_char::fence
3123 ( memory_order __x__ ) const volatile
3124 { return atomic_fence( this, __x__ ); }
3127 inline bool atomic_schar::is_lock_free() const volatile
3130 inline void atomic_schar::store
3131 ( signed char __m__, memory_order __x__ ) volatile
3132 { atomic_store_explicit( this, __m__, __x__ ); }
3134 inline signed char atomic_schar::load
3135 ( memory_order __x__ ) volatile
3136 { return atomic_load_explicit( this, __x__ ); }
3138 inline signed char atomic_schar::exchange
3139 ( signed char __m__, memory_order __x__ ) volatile
3140 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3142 inline bool atomic_schar::compare_exchange_weak
3143 ( signed char& __e__, signed char __m__,
3144 memory_order __x__, memory_order __y__ ) volatile
3145 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3147 inline bool atomic_schar::compare_exchange_strong
3148 ( signed char& __e__, signed char __m__,
3149 memory_order __x__, memory_order __y__ ) volatile
3150 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3152 inline bool atomic_schar::compare_exchange_weak
3153 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3154 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3155 __x__ == memory_order_acq_rel ? memory_order_acquire :
3156 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3158 inline bool atomic_schar::compare_exchange_strong
3159 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3160 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3161 __x__ == memory_order_acq_rel ? memory_order_acquire :
3162 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3164 inline void atomic_schar::fence
3165 ( memory_order __x__ ) const volatile
3166 { return atomic_fence( this, __x__ ); }
3169 inline bool atomic_uchar::is_lock_free() const volatile
3172 inline void atomic_uchar::store
3173 ( unsigned char __m__, memory_order __x__ ) volatile
3174 { atomic_store_explicit( this, __m__, __x__ ); }
3176 inline unsigned char atomic_uchar::load
3177 ( memory_order __x__ ) volatile
3178 { return atomic_load_explicit( this, __x__ ); }
3180 inline unsigned char atomic_uchar::exchange
3181 ( unsigned char __m__, memory_order __x__ ) volatile
3182 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3184 inline bool atomic_uchar::compare_exchange_weak
3185 ( unsigned char& __e__, unsigned char __m__,
3186 memory_order __x__, memory_order __y__ ) volatile
3187 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3189 inline bool atomic_uchar::compare_exchange_strong
3190 ( unsigned char& __e__, unsigned char __m__,
3191 memory_order __x__, memory_order __y__ ) volatile
3192 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3194 inline bool atomic_uchar::compare_exchange_weak
3195 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3196 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3197 __x__ == memory_order_acq_rel ? memory_order_acquire :
3198 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3200 inline bool atomic_uchar::compare_exchange_strong
3201 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3202 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3203 __x__ == memory_order_acq_rel ? memory_order_acquire :
3204 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3206 inline void atomic_uchar::fence
3207 ( memory_order __x__ ) const volatile
3208 { return atomic_fence( this, __x__ ); }
3211 inline bool atomic_short::is_lock_free() const volatile
3214 inline void atomic_short::store
3215 ( short __m__, memory_order __x__ ) volatile
3216 { atomic_store_explicit( this, __m__, __x__ ); }
3218 inline short atomic_short::load
3219 ( memory_order __x__ ) volatile
3220 { return atomic_load_explicit( this, __x__ ); }
3222 inline short atomic_short::exchange
3223 ( short __m__, memory_order __x__ ) volatile
3224 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3226 inline bool atomic_short::compare_exchange_weak
3227 ( short& __e__, short __m__,
3228 memory_order __x__, memory_order __y__ ) volatile
3229 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3231 inline bool atomic_short::compare_exchange_strong
3232 ( short& __e__, short __m__,
3233 memory_order __x__, memory_order __y__ ) volatile
3234 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3236 inline bool atomic_short::compare_exchange_weak
3237 ( short& __e__, short __m__, memory_order __x__ ) volatile
3238 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3239 __x__ == memory_order_acq_rel ? memory_order_acquire :
3240 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3242 inline bool atomic_short::compare_exchange_strong
3243 ( short& __e__, short __m__, memory_order __x__ ) volatile
3244 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3245 __x__ == memory_order_acq_rel ? memory_order_acquire :
3246 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3248 inline void atomic_short::fence
3249 ( memory_order __x__ ) const volatile
3250 { return atomic_fence( this, __x__ ); }
3253 inline bool atomic_ushort::is_lock_free() const volatile
3256 inline void atomic_ushort::store
3257 ( unsigned short __m__, memory_order __x__ ) volatile
3258 { atomic_store_explicit( this, __m__, __x__ ); }
3260 inline unsigned short atomic_ushort::load
3261 ( memory_order __x__ ) volatile
3262 { return atomic_load_explicit( this, __x__ ); }
3264 inline unsigned short atomic_ushort::exchange
3265 ( unsigned short __m__, memory_order __x__ ) volatile
3266 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3268 inline bool atomic_ushort::compare_exchange_weak
3269 ( unsigned short& __e__, unsigned short __m__,
3270 memory_order __x__, memory_order __y__ ) volatile
3271 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3273 inline bool atomic_ushort::compare_exchange_strong
3274 ( unsigned short& __e__, unsigned short __m__,
3275 memory_order __x__, memory_order __y__ ) volatile
3276 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3278 inline bool atomic_ushort::compare_exchange_weak
3279 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3280 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3281 __x__ == memory_order_acq_rel ? memory_order_acquire :
3282 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3284 inline bool atomic_ushort::compare_exchange_strong
3285 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3286 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3287 __x__ == memory_order_acq_rel ? memory_order_acquire :
3288 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3290 inline void atomic_ushort::fence
3291 ( memory_order __x__ ) const volatile
3292 { return atomic_fence( this, __x__ ); }
3295 inline bool atomic_int::is_lock_free() const volatile
3298 inline void atomic_int::store
3299 ( int __m__, memory_order __x__ ) volatile
3300 { atomic_store_explicit( this, __m__, __x__ ); }
3302 inline int atomic_int::load
3303 ( memory_order __x__ ) volatile
3304 { return atomic_load_explicit( this, __x__ ); }
3306 inline int atomic_int::exchange
3307 ( int __m__, memory_order __x__ ) volatile
3308 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3310 inline bool atomic_int::compare_exchange_weak
3311 ( int& __e__, int __m__,
3312 memory_order __x__, memory_order __y__ ) volatile
3313 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3315 inline bool atomic_int::compare_exchange_strong
3316 ( int& __e__, int __m__,
3317 memory_order __x__, memory_order __y__ ) volatile
3318 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3320 inline bool atomic_int::compare_exchange_weak
3321 ( int& __e__, int __m__, memory_order __x__ ) volatile
3322 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3323 __x__ == memory_order_acq_rel ? memory_order_acquire :
3324 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3326 inline bool atomic_int::compare_exchange_strong
3327 ( int& __e__, int __m__, memory_order __x__ ) volatile
3328 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3329 __x__ == memory_order_acq_rel ? memory_order_acquire :
3330 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3332 inline void atomic_int::fence
3333 ( memory_order __x__ ) const volatile
3334 { return atomic_fence( this, __x__ ); }
3337 inline bool atomic_uint::is_lock_free() const volatile
3340 inline void atomic_uint::store
3341 ( unsigned int __m__, memory_order __x__ ) volatile
3342 { atomic_store_explicit( this, __m__, __x__ ); }
3344 inline unsigned int atomic_uint::load
3345 ( memory_order __x__ ) volatile
3346 { return atomic_load_explicit( this, __x__ ); }
3348 inline unsigned int atomic_uint::exchange
3349 ( unsigned int __m__, memory_order __x__ ) volatile
3350 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3352 inline bool atomic_uint::compare_exchange_weak
3353 ( unsigned int& __e__, unsigned int __m__,
3354 memory_order __x__, memory_order __y__ ) volatile
3355 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3357 inline bool atomic_uint::compare_exchange_strong
3358 ( unsigned int& __e__, unsigned int __m__,
3359 memory_order __x__, memory_order __y__ ) volatile
3360 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3362 inline bool atomic_uint::compare_exchange_weak
3363 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3364 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3365 __x__ == memory_order_acq_rel ? memory_order_acquire :
3366 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3368 inline bool atomic_uint::compare_exchange_strong
3369 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3370 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3371 __x__ == memory_order_acq_rel ? memory_order_acquire :
3372 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3374 inline void atomic_uint::fence
3375 ( memory_order __x__ ) const volatile
3376 { return atomic_fence( this, __x__ ); }
3379 inline bool atomic_long::is_lock_free() const volatile
3382 inline void atomic_long::store
3383 ( long __m__, memory_order __x__ ) volatile
3384 { atomic_store_explicit( this, __m__, __x__ ); }
3386 inline long atomic_long::load
3387 ( memory_order __x__ ) volatile
3388 { return atomic_load_explicit( this, __x__ ); }
3390 inline long atomic_long::exchange
3391 ( long __m__, memory_order __x__ ) volatile
3392 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3394 inline bool atomic_long::compare_exchange_weak
3395 ( long& __e__, long __m__,
3396 memory_order __x__, memory_order __y__ ) volatile
3397 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3399 inline bool atomic_long::compare_exchange_strong
3400 ( long& __e__, long __m__,
3401 memory_order __x__, memory_order __y__ ) volatile
3402 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3404 inline bool atomic_long::compare_exchange_weak
3405 ( long& __e__, long __m__, memory_order __x__ ) volatile
3406 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3407 __x__ == memory_order_acq_rel ? memory_order_acquire :
3408 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3410 inline bool atomic_long::compare_exchange_strong
3411 ( long& __e__, long __m__, memory_order __x__ ) volatile
3412 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3413 __x__ == memory_order_acq_rel ? memory_order_acquire :
3414 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3416 inline void atomic_long::fence
3417 ( memory_order __x__ ) const volatile
3418 { return atomic_fence( this, __x__ ); }
3421 inline bool atomic_ulong::is_lock_free() const volatile
3424 inline void atomic_ulong::store
3425 ( unsigned long __m__, memory_order __x__ ) volatile
3426 { atomic_store_explicit( this, __m__, __x__ ); }
3428 inline unsigned long atomic_ulong::load
3429 ( memory_order __x__ ) volatile
3430 { return atomic_load_explicit( this, __x__ ); }
3432 inline unsigned long atomic_ulong::exchange
3433 ( unsigned long __m__, memory_order __x__ ) volatile
3434 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3436 inline bool atomic_ulong::compare_exchange_weak
3437 ( unsigned long& __e__, unsigned long __m__,
3438 memory_order __x__, memory_order __y__ ) volatile
3439 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3441 inline bool atomic_ulong::compare_exchange_strong
3442 ( unsigned long& __e__, unsigned long __m__,
3443 memory_order __x__, memory_order __y__ ) volatile
3444 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3446 inline bool atomic_ulong::compare_exchange_weak
3447 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3448 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3449 __x__ == memory_order_acq_rel ? memory_order_acquire :
3450 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3452 inline bool atomic_ulong::compare_exchange_strong
3453 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3454 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3455 __x__ == memory_order_acq_rel ? memory_order_acquire :
3456 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3458 inline void atomic_ulong::fence
3459 ( memory_order __x__ ) const volatile
3460 { return atomic_fence( this, __x__ ); }
3463 inline bool atomic_llong::is_lock_free() const volatile
3466 inline void atomic_llong::store
3467 ( long long __m__, memory_order __x__ ) volatile
3468 { atomic_store_explicit( this, __m__, __x__ ); }
3470 inline long long atomic_llong::load
3471 ( memory_order __x__ ) volatile
3472 { return atomic_load_explicit( this, __x__ ); }
3474 inline long long atomic_llong::exchange
3475 ( long long __m__, memory_order __x__ ) volatile
3476 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3478 inline bool atomic_llong::compare_exchange_weak
3479 ( long long& __e__, long long __m__,
3480 memory_order __x__, memory_order __y__ ) volatile
3481 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3483 inline bool atomic_llong::compare_exchange_strong
3484 ( long long& __e__, long long __m__,
3485 memory_order __x__, memory_order __y__ ) volatile
3486 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3488 inline bool atomic_llong::compare_exchange_weak
3489 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3490 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3491 __x__ == memory_order_acq_rel ? memory_order_acquire :
3492 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3494 inline bool atomic_llong::compare_exchange_strong
3495 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3496 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3497 __x__ == memory_order_acq_rel ? memory_order_acquire :
3498 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3500 inline void atomic_llong::fence
3501 ( memory_order __x__ ) const volatile
3502 { return atomic_fence( this, __x__ ); }
3505 inline bool atomic_ullong::is_lock_free() const volatile
3508 inline void atomic_ullong::store
3509 ( unsigned long long __m__, memory_order __x__ ) volatile
3510 { atomic_store_explicit( this, __m__, __x__ ); }
3512 inline unsigned long long atomic_ullong::load
3513 ( memory_order __x__ ) volatile
3514 { return atomic_load_explicit( this, __x__ ); }
3516 inline unsigned long long atomic_ullong::exchange
3517 ( unsigned long long __m__, memory_order __x__ ) volatile
3518 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3520 inline bool atomic_ullong::compare_exchange_weak
3521 ( unsigned long long& __e__, unsigned long long __m__,
3522 memory_order __x__, memory_order __y__ ) volatile
3523 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3525 inline bool atomic_ullong::compare_exchange_strong
3526 ( unsigned long long& __e__, unsigned long long __m__,
3527 memory_order __x__, memory_order __y__ ) volatile
3528 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3530 inline bool atomic_ullong::compare_exchange_weak
3531 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3532 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3533 __x__ == memory_order_acq_rel ? memory_order_acquire :
3534 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3536 inline bool atomic_ullong::compare_exchange_strong
3537 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3538 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3539 __x__ == memory_order_acq_rel ? memory_order_acquire :
3540 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3542 inline void atomic_ullong::fence
3543 ( memory_order __x__ ) const volatile
3544 { return atomic_fence( this, __x__ ); }
3547 inline bool atomic_wchar_t::is_lock_free() const volatile
3550 inline void atomic_wchar_t::store
3551 ( wchar_t __m__, memory_order __x__ ) volatile
3552 { atomic_store_explicit( this, __m__, __x__ ); }
3554 inline wchar_t atomic_wchar_t::load
3555 ( memory_order __x__ ) volatile
3556 { return atomic_load_explicit( this, __x__ ); }
3558 inline wchar_t atomic_wchar_t::exchange
3559 ( wchar_t __m__, memory_order __x__ ) volatile
3560 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3562 inline bool atomic_wchar_t::compare_exchange_weak
3563 ( wchar_t& __e__, wchar_t __m__,
3564 memory_order __x__, memory_order __y__ ) volatile
3565 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3567 inline bool atomic_wchar_t::compare_exchange_strong
3568 ( wchar_t& __e__, wchar_t __m__,
3569 memory_order __x__, memory_order __y__ ) volatile
3570 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3572 inline bool atomic_wchar_t::compare_exchange_weak
3573 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3574 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3575 __x__ == memory_order_acq_rel ? memory_order_acquire :
3576 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3578 inline bool atomic_wchar_t::compare_exchange_strong
3579 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3580 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3581 __x__ == memory_order_acq_rel ? memory_order_acquire :
3582 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3584 inline void atomic_wchar_t::fence
3585 ( memory_order __x__ ) const volatile
3586 { return atomic_fence( this, __x__ ); }
3589 template< typename T >
3590 inline bool atomic<T>::is_lock_free() const volatile
3593 template< typename T >
3594 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3595 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3597 template< typename T >
3598 inline T atomic<T>::load( memory_order __x__ ) volatile
3599 { return _ATOMIC_LOAD_( this, __x__ ); }
3601 template< typename T >
3602 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3603 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3605 template< typename T >
3606 inline bool atomic<T>::compare_exchange_weak
3607 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3608 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3610 template< typename T >
3611 inline bool atomic<T>::compare_exchange_strong
3612 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3613 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3615 template< typename T >
3616 inline bool atomic<T>::compare_exchange_weak
3617 ( T& __r__, T __v__, memory_order __x__ ) volatile
3618 { return compare_exchange_weak( __r__, __v__, __x__,
3619 __x__ == memory_order_acq_rel ? memory_order_acquire :
3620 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3622 template< typename T >
3623 inline bool atomic<T>::compare_exchange_strong
3624 ( T& __r__, T __v__, memory_order __x__ ) volatile
3625 { return compare_exchange_strong( __r__, __v__, __x__,
3626 __x__ == memory_order_acq_rel ? memory_order_acquire :
3627 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3630 inline void* atomic_address::fetch_add
3631 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3632 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3634 inline void* atomic_address::fetch_sub
3635 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3636 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3639 inline char atomic_char::fetch_add
3640 ( char __m__, memory_order __x__ ) volatile
3641 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3644 inline char atomic_char::fetch_sub
3645 ( char __m__, memory_order __x__ ) volatile
3646 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3649 inline char atomic_char::fetch_and
3650 ( char __m__, memory_order __x__ ) volatile
3651 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3654 inline char atomic_char::fetch_or
3655 ( char __m__, memory_order __x__ ) volatile
3656 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3659 inline char atomic_char::fetch_xor
3660 ( char __m__, memory_order __x__ ) volatile
3661 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3664 inline signed char atomic_schar::fetch_add
3665 ( signed char __m__, memory_order __x__ ) volatile
3666 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3669 inline signed char atomic_schar::fetch_sub
3670 ( signed char __m__, memory_order __x__ ) volatile
3671 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3674 inline signed char atomic_schar::fetch_and
3675 ( signed char __m__, memory_order __x__ ) volatile
3676 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3679 inline signed char atomic_schar::fetch_or
3680 ( signed char __m__, memory_order __x__ ) volatile
3681 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3684 inline signed char atomic_schar::fetch_xor
3685 ( signed char __m__, memory_order __x__ ) volatile
3686 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3689 inline unsigned char atomic_uchar::fetch_add
3690 ( unsigned char __m__, memory_order __x__ ) volatile
3691 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3694 inline unsigned char atomic_uchar::fetch_sub
3695 ( unsigned char __m__, memory_order __x__ ) volatile
3696 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3699 inline unsigned char atomic_uchar::fetch_and
3700 ( unsigned char __m__, memory_order __x__ ) volatile
3701 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3704 inline unsigned char atomic_uchar::fetch_or
3705 ( unsigned char __m__, memory_order __x__ ) volatile
3706 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3709 inline unsigned char atomic_uchar::fetch_xor
3710 ( unsigned char __m__, memory_order __x__ ) volatile
3711 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3714 inline short atomic_short::fetch_add
3715 ( short __m__, memory_order __x__ ) volatile
3716 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3719 inline short atomic_short::fetch_sub
3720 ( short __m__, memory_order __x__ ) volatile
3721 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3724 inline short atomic_short::fetch_and
3725 ( short __m__, memory_order __x__ ) volatile
3726 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3729 inline short atomic_short::fetch_or
3730 ( short __m__, memory_order __x__ ) volatile
3731 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3734 inline short atomic_short::fetch_xor
3735 ( short __m__, memory_order __x__ ) volatile
3736 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3739 inline unsigned short atomic_ushort::fetch_add
3740 ( unsigned short __m__, memory_order __x__ ) volatile
3741 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3744 inline unsigned short atomic_ushort::fetch_sub
3745 ( unsigned short __m__, memory_order __x__ ) volatile
3746 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3749 inline unsigned short atomic_ushort::fetch_and
3750 ( unsigned short __m__, memory_order __x__ ) volatile
3751 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3754 inline unsigned short atomic_ushort::fetch_or
3755 ( unsigned short __m__, memory_order __x__ ) volatile
3756 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3759 inline unsigned short atomic_ushort::fetch_xor
3760 ( unsigned short __m__, memory_order __x__ ) volatile
3761 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3764 inline int atomic_int::fetch_add
3765 ( int __m__, memory_order __x__ ) volatile
3766 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3769 inline int atomic_int::fetch_sub
3770 ( int __m__, memory_order __x__ ) volatile
3771 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3774 inline int atomic_int::fetch_and
3775 ( int __m__, memory_order __x__ ) volatile
3776 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3779 inline int atomic_int::fetch_or
3780 ( int __m__, memory_order __x__ ) volatile
3781 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3784 inline int atomic_int::fetch_xor
3785 ( int __m__, memory_order __x__ ) volatile
3786 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3789 inline unsigned int atomic_uint::fetch_add
3790 ( unsigned int __m__, memory_order __x__ ) volatile
3791 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3794 inline unsigned int atomic_uint::fetch_sub
3795 ( unsigned int __m__, memory_order __x__ ) volatile
3796 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3799 inline unsigned int atomic_uint::fetch_and
3800 ( unsigned int __m__, memory_order __x__ ) volatile
3801 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3804 inline unsigned int atomic_uint::fetch_or
3805 ( unsigned int __m__, memory_order __x__ ) volatile
3806 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3809 inline unsigned int atomic_uint::fetch_xor
3810 ( unsigned int __m__, memory_order __x__ ) volatile
3811 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3814 inline long atomic_long::fetch_add
3815 ( long __m__, memory_order __x__ ) volatile
3816 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3819 inline long atomic_long::fetch_sub
3820 ( long __m__, memory_order __x__ ) volatile
3821 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3824 inline long atomic_long::fetch_and
3825 ( long __m__, memory_order __x__ ) volatile
3826 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3829 inline long atomic_long::fetch_or
3830 ( long __m__, memory_order __x__ ) volatile
3831 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3834 inline long atomic_long::fetch_xor
3835 ( long __m__, memory_order __x__ ) volatile
3836 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3839 inline unsigned long atomic_ulong::fetch_add
3840 ( unsigned long __m__, memory_order __x__ ) volatile
3841 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3844 inline unsigned long atomic_ulong::fetch_sub
3845 ( unsigned long __m__, memory_order __x__ ) volatile
3846 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3849 inline unsigned long atomic_ulong::fetch_and
3850 ( unsigned long __m__, memory_order __x__ ) volatile
3851 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3854 inline unsigned long atomic_ulong::fetch_or
3855 ( unsigned long __m__, memory_order __x__ ) volatile
3856 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3859 inline unsigned long atomic_ulong::fetch_xor
3860 ( unsigned long __m__, memory_order __x__ ) volatile
3861 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3864 inline long long atomic_llong::fetch_add
3865 ( long long __m__, memory_order __x__ ) volatile
3866 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3869 inline long long atomic_llong::fetch_sub
3870 ( long long __m__, memory_order __x__ ) volatile
3871 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3874 inline long long atomic_llong::fetch_and
3875 ( long long __m__, memory_order __x__ ) volatile
3876 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3879 inline long long atomic_llong::fetch_or
3880 ( long long __m__, memory_order __x__ ) volatile
3881 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3884 inline long long atomic_llong::fetch_xor
3885 ( long long __m__, memory_order __x__ ) volatile
3886 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3889 inline unsigned long long atomic_ullong::fetch_add
3890 ( unsigned long long __m__, memory_order __x__ ) volatile
3891 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3894 inline unsigned long long atomic_ullong::fetch_sub
3895 ( unsigned long long __m__, memory_order __x__ ) volatile
3896 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3899 inline unsigned long long atomic_ullong::fetch_and
3900 ( unsigned long long __m__, memory_order __x__ ) volatile
3901 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3904 inline unsigned long long atomic_ullong::fetch_or
3905 ( unsigned long long __m__, memory_order __x__ ) volatile
3906 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3909 inline unsigned long long atomic_ullong::fetch_xor
3910 ( unsigned long long __m__, memory_order __x__ ) volatile
3911 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3914 inline wchar_t atomic_wchar_t::fetch_add
3915 ( wchar_t __m__, memory_order __x__ ) volatile
3916 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3919 inline wchar_t atomic_wchar_t::fetch_sub
3920 ( wchar_t __m__, memory_order __x__ ) volatile
3921 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3924 inline wchar_t atomic_wchar_t::fetch_and
3925 ( wchar_t __m__, memory_order __x__ ) volatile
3926 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3929 inline wchar_t atomic_wchar_t::fetch_or
3930 ( wchar_t __m__, memory_order __x__ ) volatile
3931 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3934 inline wchar_t atomic_wchar_t::fetch_xor
3935 ( wchar_t __m__, memory_order __x__ ) volatile
3936 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3939 template< typename T >
3940 T* atomic<T*>::load( memory_order __x__ ) volatile
3941 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3943 template< typename T >
3944 T* atomic<T*>::exchange( T* __v__, memory_order __x__ ) volatile
3945 { return static_cast<T*>( atomic_address::exchange( __v__, __x__ ) ); }
3947 template< typename T >
3948 bool atomic<T*>::compare_exchange_weak
3949 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3950 { return atomic_address::compare_exchange_weak( *reinterpret_cast<void**>( &__r__ ),
3951 static_cast<void*>( __v__ ), __x__, __y__ ); }
3952 //{ return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3954 template< typename T >
3955 bool atomic<T*>::compare_exchange_strong
3956 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3957 { return atomic_address::compare_exchange_strong( *reinterpret_cast<void**>( &__r__ ),
3958 static_cast<void*>( __v__ ), __x__, __y__ ); }
3959 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3961 template< typename T >
3962 bool atomic<T*>::compare_exchange_weak
3963 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3964 { return compare_exchange_weak( __r__, __v__, __x__,
3965 __x__ == memory_order_acq_rel ? memory_order_acquire :
3966 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3968 template< typename T >
3969 bool atomic<T*>::compare_exchange_strong
3970 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3971 { return compare_exchange_strong( __r__, __v__, __x__,
3972 __x__ == memory_order_acq_rel ? memory_order_acquire :
3973 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3975 template< typename T >
3976 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3977 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3979 template< typename T >
3980 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3981 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }
3991 #endif /* __IMPATOMIC_H__ */