Remove CDS_CXX11_EXPLICITLY_DEFAULTED_FUNCTION_SUPPORT macro and emulating code
[libcds.git] / cds / compiler / cxx11_atomic.h
1 //$$CDS-header$$
2
3 #ifndef __CDS_COMPILER_CXX11_ATOMIC_H
4 #define __CDS_COMPILER_CXX11_ATOMIC_H
5 //@cond
6
7 #include <cds/details/defs.h>
8 #include <cds/details/aligned_type.h>
9
10 namespace cds { namespace cxx11_atomic {
11     typedef enum memory_order {
12         memory_order_relaxed,
13         memory_order_consume,
14         memory_order_acquire,
15         memory_order_release,
16         memory_order_acq_rel,
17         memory_order_seq_cst
18     } memory_order;
19
20 }}  // namespace cds::cxx11_atomic
21
22
23 #if CDS_COMPILER == CDS_COMPILER_MSVC || (CDS_COMPILER == CDS_COMPILER_INTEL && CDS_OS_INTERFACE == CDS_OSI_WINDOWS)
24 #   if CDS_PROCESSOR_ARCH == CDS_PROCESSOR_X86
25 #       include <cds/compiler/vc/x86/cxx11_atomic.h>
26 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_AMD64
27 #       include <cds/compiler/vc/amd64/cxx11_atomic.h>
28 #   else
29 #       error "MS VC++ compiler: unsupported processor architecture"
30 #   endif
31 #elif CDS_COMPILER == CDS_COMPILER_GCC || CDS_COMPILER == CDS_COMPILER_CLANG || CDS_COMPILER == CDS_COMPILER_INTEL
32 #   if CDS_PROCESSOR_ARCH == CDS_PROCESSOR_X86
33 #       include <cds/compiler/gcc/x86/cxx11_atomic.h>
34 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_AMD64
35 #       include <cds/compiler/gcc/amd64/cxx11_atomic.h>
36 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_IA64
37 #       include <cds/compiler/gcc/ia64/cxx11_atomic.h>
38 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_SPARC
39 #       include <cds/compiler/gcc/sparc/cxx11_atomic.h>
40 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_PPC64
41 #       include <cds/compiler/gcc/ppc64/cxx11_atomic.h>
42 //#   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_ARM7
43 //#       include <cds/compiler/gcc/arm7/cxx11_atomic.h>
44 #   else
45 #       error "GCC compiler: unsupported processor architecture. Try to use native C++11 atomic or boost.atomic"
46 #   endif
47 #else
48 #   error "Undefined compiler"
49 #endif
50
51 // In C++11, make_unsigned is declared in <type_traits>
52 #include <boost/type_traits/make_unsigned.hpp>  // for make_unsigned
53
54 namespace cds { namespace cxx11_atomic {
55
56     // forward declarations
57     template <class T>
58     struct atomic;
59
60     namespace details {
61
62         template <typename T, size_t Size, typename Primary = T >
63         struct atomic_generic_ops;
64
65         template <typename T, size_t Size>
66         struct atomic_integral_ops;
67
68         template <size_t TypeSize>
69         struct primary_type;
70
71         template <>
72         struct primary_type<1>
73         {
74             typedef cds::uint8_t type;
75         };
76         template <>
77         struct primary_type<2>
78         {
79             typedef cds::uint16_t type;
80         };
81         template <>
82         struct primary_type<4>
83         {
84             typedef cds::uint32_t type;
85         };
86         template <>
87         struct primary_type<8>
88         {
89             typedef cds::uint64_t type;
90         };
91
92         template <typename T, typename Primary>
93         struct make_atomic_primary
94         {
95             typedef T       source_type;
96             typedef Primary primary_type;
97
98             static primary_type volatile * ptr( source_type volatile * p ) CDS_NOEXCEPT
99             {
100                 return reinterpret_cast<primary_type volatile *>(p);
101             }
102             static primary_type const volatile * ptr( source_type const volatile * p ) CDS_NOEXCEPT
103             {
104                 return reinterpret_cast<primary_type const volatile *>(p);
105             }
106
107             static primary_type val( source_type v ) CDS_NOEXCEPT
108             {
109                 return *reinterpret_cast<primary_type*>(&v);
110             }
111
112             static primary_type& ref( source_type& v ) CDS_NOEXCEPT
113             {
114                 return reinterpret_cast<primary_type&>(v);
115             }
116
117             static primary_type const& ref( source_type const& v ) CDS_NOEXCEPT
118             {
119                 return reinterpret_cast<primary_type const&>(v);
120             }
121
122             static source_type ret( primary_type r ) CDS_NOEXCEPT
123             {
124                 return *reinterpret_cast<source_type *>(&r);
125             }
126         };
127
128         template <typename T>
129         struct make_atomic_primary<T, T>
130         {
131             typedef T source_type;
132             typedef T primary_type;
133
134             static primary_type volatile * ptr( source_type volatile * p ) CDS_NOEXCEPT
135             {
136                 return p;
137             }
138             static primary_type const volatile * ptr( source_type const volatile * p ) CDS_NOEXCEPT
139             {
140                 return p;
141             }
142
143             static primary_type val( source_type v ) CDS_NOEXCEPT
144             {
145                 return v;
146             }
147
148             static primary_type& ref( source_type& v ) CDS_NOEXCEPT
149             {
150                 return v;
151             }
152
153             static source_type ret( primary_type r ) CDS_NOEXCEPT
154             {
155                 return r;
156             }
157         };
158
159         template <typename T>
160         struct atomic_integral_bitwise_ops
161         {
162         public:
163             typedef typename boost::make_unsigned<T>::type unsigned_type;
164             typedef atomic_generic_ops<unsigned_type, sizeof(unsigned_type)> atomic_ops;
165
166             static T fetch_and(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
167             {
168                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
169                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
170                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur & unsigned_type(val), order, memory_order_relaxed ));
171                 return T(cur);
172             }
173
174             static T fetch_or(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
175             {
176                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
177                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
178                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur | unsigned_type(val), order, memory_order_relaxed ));
179                 return T(cur);
180             }
181
182             static T fetch_xor(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
183             {
184                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
185                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
186                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur ^ unsigned_type(val), order, memory_order_relaxed ));
187                 return T(cur);
188             }
189         };
190
191
192         // 8-bit atomic operations
193
194         template <typename T, typename Primary>
195         struct atomic_generic_ops< T, 1, Primary >
196         {
197             typedef make_atomic_primary<T, Primary> primary;
198
199             // store
200             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
201             {
202                 platform::store8( primary::ptr(pDest), primary::val(v), order );
203             }
204             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
205             {
206                 platform::store8( primary::ptr(pDest), primary::val(v), order );
207             }
208             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
209             {
210                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
211             }
212             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
213             {
214                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
215             }
216
217             // load
218             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
219             {
220                 return primary::ret( platform::load8( primary::ptr(pSrc), order ));
221             }
222             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
223             {
224                 return primary::ret( platform::load8( primary::ptr(pSrc), order ));
225             }
226             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
227             {
228                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
229             }
230             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
231             {
232                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
233             }
234
235             // exchange
236             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
237             {
238                 return primary::ret( platform::exchange8( primary::ptr(pDest), primary::val(val), order ));
239             }
240             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
241             {
242                 return primary::ret( platform::exchange8( primary::ptr(pDest), primary::val(val), order ));
243             }
244             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
245             {
246                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
247             }
248             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
249             {
250                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
251             }
252
253             // cas
254             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
255             {
256                 assert( expected );
257                 return platform::cas8_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
258             }
259             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
260             {
261                 assert( expected );
262                 return platform::cas8_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
263             }
264             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
265             {
266                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
267             }
268             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
269             {
270                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
271             }
272             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
273             {
274                 assert( expected );
275                 return platform::cas8_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
276             }
277             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
278             {
279                 assert( expected );
280                 return platform::cas8_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
281             }
282             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
283             {
284                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
285             }
286             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
287             {
288                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
289             }
290         };
291
292         template <typename T>
293         struct atomic_integral_ops< T, 1 >
294             : atomic_generic_ops<T, 1, T >
295             , atomic_integral_bitwise_ops<T>
296         {
297             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
298
299             // fetch_add
300             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
301             {
302 #           ifdef CDS_ATOMIC_fetch8_add_defined
303                 return platform::fetch8_add( pDest, val, order );
304 #           else
305                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
306                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
307                 return cur;
308 #           endif
309             }
310             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
311             {
312                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
313             }
314             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
315             {
316                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
317             }
318             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
319             {
320                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
321             }
322
323             // fetch_sub
324             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
325             {
326 #           ifdef CDS_ATOMIC_fetch8_sub_defined
327                 return platform::fetch8_sub( pDest, val, order );
328 #           else
329                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
330                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
331                 return cur;
332 #           endif
333             }
334             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
335             {
336                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
337             }
338             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
339             {
340                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
341             }
342             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
343             {
344                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
345             }
346
347             // fetch_and
348             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
349             {
350 #           ifdef CDS_ATOMIC_fetch8_and_defined
351                 return platform::fetch8_and( pDest, val, order );
352 #           else
353                 return bitwise_ops::fetch_and( pDest, val, order );
354 #           endif
355             }
356             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
357             {
358                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
359             }
360             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
361             {
362                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
363             }
364             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
365             {
366                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
367             }
368
369             // fetch_or
370             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
371             {
372 #           ifdef CDS_ATOMIC_fetch8_or_defined
373                 return platform::fetch8_or( pDest, val, order );
374 #           else
375                 return bitwise_ops::fetch_or( pDest, val, order );
376 #           endif
377             }
378             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
379             {
380                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
381             }
382             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
383             {
384                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
385             }
386             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
387             {
388                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
389             }
390
391             // fetch_xor
392             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
393             {
394 #           ifdef CDS_ATOMIC_fetch8_xor_defined
395                 return platform::fetch8_xor( pDest, val, order );
396 #           else
397                 return bitwise_ops::fetch_xor( pDest, val, order );
398 #           endif
399             }
400             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
401             {
402                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
403             }
404             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
405             {
406                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
407             }
408             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
409             {
410                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
411             }
412         };
413
414         // 16-bit atomic operations
415
416         template <typename T, typename Primary>
417         struct atomic_generic_ops< T, 2, Primary >
418         {
419             typedef make_atomic_primary<T, Primary> primary;
420
421             // store
422             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
423             {
424                 platform::store16( primary::ptr(pDest), primary::val(v), order );
425             }
426             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
427             {
428                 platform::store16( primary::ptr(pDest), primary::val(v), order );
429             }
430             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
431             {
432                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
433             }
434             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
435             {
436                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
437             }
438
439             // load
440             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
441             {
442                 return primary::ret( platform::load16( primary::ptr(pSrc), order ));
443             }
444             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
445             {
446                 return primary::ret( platform::load16( primary::ptr(pSrc), order ));
447             }
448             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
449             {
450                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
451             }
452             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
453             {
454                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
455             }
456
457             // exchange
458             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
459             {
460                 return primary::ret( platform::exchange16( primary::ptr(pDest), primary::val(val), order ));
461             }
462             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
463             {
464                 return primary::ret( platform::exchange16( primary::ptr(pDest), primary::val(val), order ));
465             }
466             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
467             {
468                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
469             }
470             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
471             {
472                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
473             }
474
475             // cas
476             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
477             {
478                 assert( expected );
479                 return platform::cas16_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
480             }
481             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
482             {
483                 assert( expected );
484                 return platform::cas16_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
485             }
486             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
487             {
488                 return atomic_compare_exchange_weak_explicit( pDest, expected, primary::val(desired), memory_order_seq_cst, memory_order_relaxed );
489             }
490             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
491             {
492                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
493             }
494             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
495             {
496                 assert( expected );
497                 return platform::cas16_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
498             }
499             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
500             {
501                 assert( expected );
502                 return platform::cas16_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
503             }
504             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
505             {
506                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
507             }
508             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
509             {
510                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
511             }
512         };
513
514         template <typename T>
515         struct atomic_integral_ops< T, 2 >
516             : atomic_generic_ops< T, 2, T >
517             , atomic_integral_bitwise_ops<T>
518         {
519             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
520
521             // fetch_add
522             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
523             {
524 #           ifdef CDS_ATOMIC_fetch16_add_defined
525                 return platform::fetch16_add( pDest, val, order );
526 #           else
527                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
528                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
529                 return cur;
530 #           endif
531             }
532             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
533             {
534                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
535             }
536             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
537             {
538                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
539             }
540             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
541             {
542                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
543             }
544
545             // fetch_sub
546             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
547             {
548 #           ifdef CDS_ATOMIC_fetch16_sub_defined
549                 return platform::fetch16_sub( pDest, val, order );
550 #           else
551                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
552                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
553                 return cur;
554 #           endif
555             }
556             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
557             {
558                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
559             }
560             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
561             {
562                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
563             }
564             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
565             {
566                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
567             }
568
569             // fetch_and
570             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
571             {
572 #           ifdef CDS_ATOMIC_fetch16_and_defined
573                 return platform::fetch16_and( pDest, val, order );
574 #           else
575                 return bitwise_ops::fetch_and( pDest, val, order );
576 #           endif
577             }
578             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
579             {
580                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
581             }
582             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
583             {
584                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
585             }
586             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
587             {
588                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
589             }
590
591             // fetch_or
592             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
593             {
594 #           ifdef CDS_ATOMIC_fetch16_or_defined
595                 return platform::fetch16_or( pDest, val, order );
596 #           else
597                 return bitwise_ops::fetch_or( pDest, val, order );
598 #           endif
599             }
600             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
601             {
602                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
603             }
604             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
605             {
606                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
607             }
608             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
609             {
610                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
611             }
612
613             // fetch_xor
614             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
615             {
616 #           ifdef CDS_ATOMIC_fetch16_xor_defined
617                 return platform::fetch16_xor( pDest, val, order );
618 #           else
619                 return bitwise_ops::fetch_xor( pDest, val, order );
620 #           endif
621             }
622             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
623             {
624                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
625             }
626             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
627             {
628                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
629             }
630             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
631             {
632                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
633             }
634         };
635
636         // 32-bit atomic operations
637
638         template <typename T, typename Primary>
639         struct atomic_generic_ops< T, 4, Primary >
640         {
641             typedef make_atomic_primary<T, Primary> primary;
642
643             // store
644             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
645             {
646                 platform::store32( primary::ptr(pDest), primary::val(v), order );
647             }
648             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
649             {
650                 platform::store32( primary::ptr(pDest), primary::val(v), order );
651             }
652             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
653             {
654                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
655             }
656             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
657             {
658                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
659             }
660
661             // load
662             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
663             {
664                 return primary::ret( platform::load32( primary::ptr(pSrc), order ));
665             }
666             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
667             {
668                 return primary::ret( platform::load32( primary::ptr(pSrc), order ));
669             }
670             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
671             {
672                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
673             }
674             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
675             {
676                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
677             }
678
679             // exchange
680             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
681             {
682                 return primary::ret( platform::exchange32( primary::ptr(pDest), primary::val(val), order ));
683             }
684             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
685             {
686                 return primary::ret( platform::exchange32( primary::ptr(pDest), primary::val(val), order ));
687             }
688             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
689             {
690                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
691             }
692             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
693             {
694                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
695             }
696
697             // cas
698             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
699             {
700                 assert( expected );
701                 return platform::cas32_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
702             }
703             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
704             {
705                 assert( expected );
706                 return platform::cas32_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
707             }
708             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
709             {
710                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
711             }
712             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
713             {
714                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
715             }
716             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
717             {
718                 assert( expected );
719                 return platform::cas32_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
720             }
721             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
722             {
723                 assert( expected );
724                 return platform::cas32_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
725             }
726             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
727             {
728                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
729             }
730             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
731             {
732                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
733             }
734         };
735
736         template <typename T>
737         struct atomic_integral_ops< T, 4 >
738             : atomic_generic_ops< T, 4, T >
739             , atomic_integral_bitwise_ops<T>
740         {
741             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
742             // fetch_add
743             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
744             {
745 #           ifdef CDS_ATOMIC_fetch32_add_defined
746                 return platform::fetch32_add( pDest, val, order );
747 #           else
748                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
749                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
750                 return cur;
751 #           endif
752             }
753             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
754             {
755                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
756             }
757             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
758             {
759                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
760             }
761             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
762             {
763                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
764             }
765
766             // fetch_sub
767             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
768             {
769 #           ifdef CDS_ATOMIC_fetch32_sub_defined
770                 return platform::fetch32_sub( pDest, val, order );
771 #           else
772                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
773                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
774                 return cur;
775 #           endif
776             }
777             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
778             {
779                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
780             }
781             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
782             {
783                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
784             }
785             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
786             {
787                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
788             }
789
790             // fetch_and
791             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
792             {
793 #           ifdef CDS_ATOMIC_fetch32_and_defined
794                 return platform::fetch32_and( pDest, val, order );
795 #           else
796                 return bitwise_ops::fetch_and( pDest, val, order );
797 #           endif
798             }
799             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
800             {
801                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
802             }
803             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
804             {
805                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
806             }
807             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
808             {
809                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
810             }
811
812             // fetch_or
813             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
814             {
815 #           ifdef CDS_ATOMIC_fetch32_or_defined
816                 return platform::fetch32_or( pDest, val, order );
817 #           else
818                 return bitwise_ops::fetch_or( pDest, val, order );
819 #           endif
820             }
821             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
822             {
823                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
824             }
825             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
826             {
827                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
828             }
829             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
830             {
831                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
832             }
833
834             // fetch_xor
835             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
836             {
837 #           ifdef CDS_ATOMIC_fetch32_xor_defined
838                 return platform::fetch32_xor( pDest, val, order );
839 #           else
840                 return bitwise_ops::fetch_xor( pDest, val, order );
841 #           endif
842             }
843             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
844             {
845                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
846             }
847             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
848             {
849                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
850             }
851             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
852             {
853                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
854             }
855         };
856
857
858         // 64-bit atomic operations
859
860         template <typename T, typename Primary>
861         struct atomic_generic_ops< T, 8, Primary >
862         {
863             typedef make_atomic_primary<T, Primary> primary;
864
865             // store
866             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
867             {
868                 platform::store64( primary::ptr(pDest), primary::val(v), order );
869             }
870             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
871             {
872                 platform::store64( primary::ptr(pDest), primary::val(v), order );
873             }
874             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
875             {
876                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
877             }
878             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
879             {
880                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
881             }
882
883             // load
884             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
885             {
886                 return primary::ret( platform::load64( primary::ptr(pSrc), order ));
887             }
888             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
889             {
890                 return primary::ret( platform::load64( primary::ptr(pSrc), order ));
891             }
892             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
893             {
894                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
895             }
896             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
897             {
898                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
899             }
900
901             // exchange
902             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
903             {
904                 return primary::ret( platform::exchange64( primary::ptr(pDest), primary::val(val), order ));
905             }
906             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
907             {
908                 return primary::ret( platform::exchange64( primary::ptr(pDest), primary::val(val), order ));
909             }
910             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
911             {
912                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
913             }
914             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
915             {
916                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
917             }
918
919             // cas
920             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
921             {
922                 assert( expected );
923                 return platform::cas64_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
924             }
925             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
926             {
927                 assert( expected );
928                 return platform::cas64_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
929             }
930             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
931             {
932                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
933             }
934             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
935             {
936                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
937             }
938             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
939             {
940                 assert( expected );
941                 return platform::cas64_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
942             }
943             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
944             {
945                 assert( expected );
946                 return platform::cas64_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
947             }
948             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
949             {
950                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
951             }
952             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
953             {
954                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
955             }
956         };
957
958
959         template <typename T>
960         struct atomic_integral_ops< T, 8 >
961             : atomic_generic_ops< T, 8, T >
962             , atomic_integral_bitwise_ops<T>
963         {
964             typedef atomic_integral_bitwise_ops<T>  bitwise_ops;
965             typedef atomic_generic_ops<T, 8, T>     general_ops;
966
967             // fetch_add
968             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
969             {
970 #           ifdef CDS_ATOMIC_fetch64_add_defined
971                 return platform::fetch64_add( pDest, val, order );
972 #           else
973                 T cur = general_ops::atomic_load_explicit( pDest, memory_order_relaxed );
974                 do {} while ( !general_ops::atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
975                 return cur;
976 #           endif
977             }
978             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
979             {
980                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
981             }
982             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
983             {
984                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
985             }
986             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
987             {
988                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
989             }
990
991             // fetch_sub
992             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
993             {
994 #           ifdef CDS_ATOMIC_fetch64_sub_defined
995                 return platform::fetch64_sub( pDest, val, order );
996 #           else
997                 T cur = general_ops::atomic_load_explicit( pDest, memory_order_relaxed );
998                 do {} while ( !general_ops::atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
999                 return cur;
1000 #           endif
1001             }
1002             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1003             {
1004                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1005             }
1006             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
1007             {
1008                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1009             }
1010             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
1011             {
1012                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1013             }
1014
1015             // fetch_and
1016             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1017             {
1018 #           ifdef CDS_ATOMIC_fetch64_and_defined
1019                 return platform::fetch64_and( pDest, val, order );
1020 #           else
1021                 return bitwise_ops::fetch_and( pDest, val, order );
1022 #           endif
1023             }
1024             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1025             {
1026                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1027             }
1028             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
1029             {
1030                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
1031             }
1032             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
1033             {
1034                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
1035             }
1036
1037             // fetch_or
1038             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1039             {
1040 #           ifdef CDS_ATOMIC_fetch64_or_defined
1041                 return platform::fetch64_or( pDest, val, order );
1042 #           else
1043                 return bitwise_ops::fetch_or( pDest, val, order );
1044 #           endif
1045             }
1046             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1047             {
1048                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1049             }
1050             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
1051             {
1052                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
1053             }
1054             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
1055             {
1056                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
1057             }
1058
1059             // fetch_xor
1060             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1061             {
1062 #           ifdef CDS_ATOMIC_fetch64_xor_defined
1063                 return platform::fetch64_xor( pDest, val, order );
1064 #           else
1065                 return bitwise_ops::fetch_xor( pDest, val, order );
1066 #           endif
1067             }
1068             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1069             {
1070                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1071             }
1072             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
1073             {
1074                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
1075             }
1076             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
1077             {
1078                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
1079             }
1080         };
1081
1082
1083         // atomic pointer operations
1084         template <typename T>
1085         struct atomic_pointer_base
1086         {
1087             // store
1088             static void atomic_store_explicit( T * volatile * pDest, T * v, memory_order order ) CDS_NOEXCEPT
1089             {
1090                 platform::store_ptr( pDest, v, order );
1091             }
1092             static void atomic_store_explicit( T * * pDest, T * v, memory_order order ) CDS_NOEXCEPT
1093             {
1094                 platform::store_ptr( pDest, v, order );
1095             }
1096             static void atomic_store( T * volatile * pDest, T * v ) CDS_NOEXCEPT
1097             {
1098                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
1099             }
1100             static void atomic_store( T * * pDest, T * v ) CDS_NOEXCEPT
1101             {
1102                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
1103             }
1104
1105             // load
1106             static T * atomic_load_explicit( T * volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
1107             {
1108                 return platform::load_ptr( pSrc, order );
1109             }
1110             static T * atomic_load_explicit( T * const * pSrc, memory_order order ) CDS_NOEXCEPT
1111             {
1112                 return platform::load_ptr( pSrc, order );
1113             }
1114             static T * atomic_load( T * volatile const * pSrc ) CDS_NOEXCEPT
1115             {
1116                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
1117             }
1118             static T * atomic_load( T * const * pSrc ) CDS_NOEXCEPT
1119             {
1120                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
1121             }
1122
1123             // exchange
1124             static T * atomic_exchange_explicit( T * volatile * pDest, T * val, memory_order order ) CDS_NOEXCEPT
1125             {
1126                 return platform::exchange_ptr( pDest, val, order );
1127             }
1128             static T * atomic_exchange_explicit( T * * pDest, T * val, memory_order order ) CDS_NOEXCEPT
1129             {
1130                 return platform::exchange_ptr( pDest, val, order );
1131             }
1132             static T * atomic_exchange( T * volatile * pDest, T * val ) CDS_NOEXCEPT
1133             {
1134                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
1135             }
1136             static T * atomic_exchange( T * * pDest, T * val ) CDS_NOEXCEPT
1137             {
1138                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
1139             }
1140
1141             // cas
1142             static bool atomic_compare_exchange_weak_explicit( T * volatile * pDest, T * * expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1143             {
1144                 assert( expected );
1145                 return platform::cas_ptr_weak( pDest, *expected, desired, mo_success, mo_fail );
1146             }
1147             static bool atomic_compare_exchange_weak_explicit( T * * pDest, T * * expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1148             {
1149                 assert( expected );
1150                 return platform::cas_ptr_weak( pDest, *expected, desired, mo_success, mo_fail );
1151             }
1152             static bool atomic_compare_exchange_weak( T * volatile * pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1153             {
1154                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1155             }
1156             static bool atomic_compare_exchange_weak( T ** pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1157             {
1158                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1159             }
1160             static bool atomic_compare_exchange_strong_explicit( T * volatile * pDest, T ** expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1161             {
1162                 assert( expected );
1163                 return platform::cas_ptr_strong( pDest, *expected, desired, mo_success, mo_fail );
1164             }
1165             static bool atomic_compare_exchange_strong_explicit( T ** pDest, T ** expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1166             {
1167                 assert( expected );
1168                 return platform::cas_ptr_strong( pDest, *expected, desired, mo_success, mo_fail );
1169             }
1170             static bool atomic_compare_exchange_strong( T * volatile * pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1171             {
1172                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1173             }
1174             static bool atomic_compare_exchange_strong( T ** pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1175             {
1176                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1177             }
1178         };
1179
1180         template <typename T>
1181         struct atomic_pointer: public atomic_pointer_base<T>
1182         {
1183             typedef atomic_pointer_base<T> base_class;
1184             // fetch_add
1185             static T * atomic_fetch_add_explicit(T * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1186             {
1187 #           ifdef CDS_ATOMIC_fetch_ptr_add_defined
1188                 platform::fetch_ptr_add( pDest, val, order );
1189 #           else
1190                 T * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1191                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
1192                 return cur;
1193 #           endif
1194             }
1195             static T * atomic_fetch_add_explicit(T * * pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1196             {
1197                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1198             }
1199             static T * atomic_fetch_add( T * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1200             {
1201                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1202             }
1203             static T * atomic_fetch_add( T ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1204             {
1205                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1206             }
1207
1208             // fetch_sub
1209             static T * atomic_fetch_sub_explicit(T * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1210             {
1211 #           ifdef CDS_ATOMIC_fetch_ptr_sub_defined
1212                 platform::fetch_ptr_sub( pDest, val, order );
1213 #           else
1214                 T * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1215                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
1216                 return cur;
1217 #           endif
1218             }
1219             static T * atomic_fetch_sub_explicit(T ** pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1220             {
1221                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1222             }
1223             static T * atomic_fetch_sub( T volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1224             {
1225                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1226             }
1227             static T * atomic_fetch_sub( T * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1228             {
1229                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1230             }
1231         };
1232
1233         template <>
1234         struct atomic_pointer<void>: public atomic_pointer_base<void>
1235         {
1236             typedef atomic_pointer_base<void>   base_class;
1237
1238             // fetch_add
1239             static void * atomic_fetch_add_explicit(void * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1240             {
1241                 void * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1242                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, reinterpret_cast<char *>(cur) + val, order, memory_order_relaxed ));
1243                 return cur;
1244             }
1245             static void * atomic_fetch_add_explicit(void * * pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1246             {
1247                 return atomic_fetch_add_explicit( reinterpret_cast<void * volatile *>( pDest ), val, order );
1248             }
1249             static void * atomic_fetch_add( void * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1250             {
1251                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1252             }
1253             static void * atomic_fetch_add( void ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1254             {
1255                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1256             }
1257
1258             // fetch_sub
1259             static void * atomic_fetch_sub_explicit(void * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1260             {
1261                 void * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1262                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, reinterpret_cast<char *>(cur) - val, order, memory_order_relaxed ));
1263                 return cur;
1264             }
1265             static void * atomic_fetch_sub_explicit(void ** pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1266             {
1267                 return atomic_fetch_sub_explicit( reinterpret_cast<void * volatile *>( pDest ), val, order );
1268             }
1269             static void * atomic_fetch_sub( void * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1270             {
1271                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1272             }
1273             static void * atomic_fetch_sub( void ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1274             {
1275                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1276             }
1277         };
1278
1279 #ifndef CDS_CXX11_DELETE_DEFINITION_SUPPORT
1280         class atomic_noncopyable
1281         {
1282         private:
1283             atomic_noncopyable(const atomic_noncopyable&);
1284             atomic_noncopyable& operator=(const atomic_noncopyable&);
1285             //atomic_noncopyable& operator=(const atomic_noncopyable&) volatile;
1286         protected:
1287             atomic_noncopyable() = default;
1288         };
1289 #endif
1290
1291         template <typename T>
1292         struct atomic_integral
1293 #ifndef CDS_CXX11_DELETE_DEFINITION_SUPPORT
1294             : atomic_noncopyable
1295 #endif
1296         {
1297         private:
1298             typename cds::details::aligned_type<T, sizeof(T)>::type volatile m_val;
1299             //T volatile  m_val;
1300             typedef atomic_integral_ops<T, sizeof(T)>   atomic_ops;
1301         public:
1302             typedef T   atomic_type;
1303         public:
1304             bool is_lock_free() const volatile CDS_NOEXCEPT
1305             {
1306                 return true;
1307             }
1308             bool is_lock_free() const CDS_NOEXCEPT
1309             {
1310                 return true;
1311             }
1312             void store(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1313             {
1314                 atomic_ops::atomic_store_explicit( &m_val, val, order );
1315             }
1316             void store(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1317             {
1318                 atomic_ops::atomic_store_explicit( &m_val, val, order );
1319             }
1320
1321             T load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1322             {
1323                 return atomic_ops::atomic_load_explicit( &m_val, order );
1324             }
1325             T load(memory_order order  = memory_order_seq_cst) const CDS_NOEXCEPT
1326             {
1327                 return atomic_ops::atomic_load_explicit( &m_val, order );
1328             }
1329
1330             operator T() const volatile CDS_NOEXCEPT
1331             {
1332                 return load();
1333             }
1334             operator T() const CDS_NOEXCEPT
1335             {
1336                 return load();
1337             }
1338
1339             T exchange(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1340             {
1341                 return atomic_ops::atomic_exchange_explicit( &m_val, val, order );
1342             }
1343             T exchange(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1344             {
1345                 return atomic_ops::atomic_exchange_explicit( &m_val, val, order );
1346             }
1347
1348             bool compare_exchange_weak(T& expected, T desired , memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1349             {
1350                 return atomic_ops::atomic_compare_exchange_weak_explicit( &m_val, &expected, desired, success_order, failure_order );
1351             }
1352             bool compare_exchange_weak(T& expected, T desired , memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1353             {
1354                 return atomic_ops::atomic_compare_exchange_weak_explicit( &m_val, &expected, desired, success_order, failure_order );
1355             }
1356             bool compare_exchange_strong(T& expected, T desired , memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1357             {
1358                 return atomic_ops::atomic_compare_exchange_strong_explicit( &m_val, &expected, desired, success_order, failure_order );
1359             }
1360             bool compare_exchange_strong(T& expected, T desired , memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1361             {
1362                 return atomic_ops::atomic_compare_exchange_strong_explicit( &m_val, &expected, desired, success_order, failure_order );
1363             }
1364             bool compare_exchange_weak(T& expected, T desired , memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1365             {
1366                 return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1367             }
1368             bool compare_exchange_weak(T& expected, T desired , memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1369             {
1370                 return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1371             }
1372             bool compare_exchange_strong(T& expected, T desired , memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1373             {
1374                 return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1375             }
1376             bool compare_exchange_strong(T& expected, T desired , memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1377             {
1378                 return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1379             }
1380
1381             T fetch_add(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1382             {
1383                 return atomic_ops::atomic_fetch_add_explicit( &m_val, val, order );
1384             }
1385             T fetch_add(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1386             {
1387                 return atomic_ops::atomic_fetch_add_explicit( &m_val, val, order );
1388             }
1389             T fetch_sub(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1390             {
1391                 return atomic_ops::atomic_fetch_sub_explicit( &m_val, val, order );
1392             }
1393             T fetch_sub(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1394             {
1395                 return atomic_ops::atomic_fetch_sub_explicit( &m_val, val, order );
1396             }
1397             T fetch_and(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1398             {
1399                 return atomic_ops::atomic_fetch_and_explicit( &m_val, val, order );
1400             }
1401             T fetch_and(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1402             {
1403                 return atomic_ops::atomic_fetch_and_explicit( &m_val, val, order );
1404             }
1405
1406             T fetch_or(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1407             {
1408                 return atomic_ops::atomic_fetch_or_explicit( &m_val, val, order );
1409             }
1410             T fetch_or(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1411             {
1412                 return atomic_ops::atomic_fetch_or_explicit( &m_val, val, order );
1413             }
1414             T fetch_xor(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1415             {
1416                 return atomic_ops::atomic_fetch_xor_explicit( &m_val, val, order );
1417             }
1418             T fetch_xor(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1419             {
1420                 return atomic_ops::atomic_fetch_xor_explicit( &m_val, val, order );
1421             }
1422
1423             atomic_integral() = default;
1424             CDS_CONSTEXPR atomic_integral(T val) CDS_NOEXCEPT
1425                 : m_val(val)
1426                 {}
1427
1428 #ifdef CDS_CXX11_DELETE_DEFINITION_SUPPORT
1429             atomic_integral(const atomic_integral&) = delete;
1430             atomic_integral& operator=(const atomic_integral&) = delete;
1431             atomic_integral& operator=(const atomic_integral&) volatile = delete;
1432 #endif
1433             T operator=(T val) volatile CDS_NOEXCEPT
1434             {
1435                 store(val);
1436                 return val;
1437             }
1438             T operator=(T val) CDS_NOEXCEPT
1439             {
1440                 store(val);
1441                 return val;
1442             }
1443
1444             // Post inc/dec
1445             T operator++(int) volatile CDS_NOEXCEPT
1446             {
1447                 return fetch_add( 1 );
1448             }
1449             T operator++(int) CDS_NOEXCEPT
1450             {
1451                 return fetch_add( 1 );
1452             }
1453             T operator--(int) volatile CDS_NOEXCEPT
1454             {
1455                 return fetch_sub( 1 );
1456             }
1457             T operator--(int) CDS_NOEXCEPT
1458             {
1459                 return fetch_sub( 1 );
1460             }
1461
1462             // Pre inc/dec
1463             T operator++() volatile CDS_NOEXCEPT
1464             {
1465                 return fetch_add( 1 ) + 1;
1466             }
1467             T operator++() CDS_NOEXCEPT
1468             {
1469                 return fetch_add( 1 ) + 1;
1470             }
1471             T operator--() volatile CDS_NOEXCEPT
1472             {
1473                 return fetch_sub( 1 ) - 1;
1474             }
1475             T operator--() CDS_NOEXCEPT
1476             {
1477                 return fetch_sub( 1 ) - 1;
1478             }
1479
1480             // op=
1481             T operator+=(T val) volatile CDS_NOEXCEPT
1482             {
1483                 return fetch_add( val ) + val;
1484             }
1485             T operator+=(T val) CDS_NOEXCEPT
1486             {
1487                 return fetch_add( val ) + val;
1488             }
1489             T operator-=(T val) volatile CDS_NOEXCEPT
1490             {
1491                 return fetch_sub( val ) - val;
1492             }
1493             T operator-=(T val) CDS_NOEXCEPT
1494             {
1495                 return fetch_sub( val ) - val;
1496             }
1497             T operator&=(T val) volatile CDS_NOEXCEPT
1498             {
1499                 return fetch_and( val ) & val;
1500             }
1501             T operator&=(T val) CDS_NOEXCEPT
1502             {
1503                 return fetch_and( val ) & val;
1504             }
1505             T operator|=(T val) volatile CDS_NOEXCEPT
1506             {
1507                 return fetch_or( val ) | val;
1508             }
1509             T operator|=(T val) CDS_NOEXCEPT
1510             {
1511                 return fetch_or( val ) | val;
1512             }
1513             T operator^=(T val) volatile CDS_NOEXCEPT
1514             {
1515                 return fetch_xor( val ) ^ val;
1516             }
1517             T operator^=(T val) CDS_NOEXCEPT
1518             {
1519                 return fetch_xor( val ) ^ val;
1520             }
1521         };
1522
1523         template <typename Type>
1524         struct select_primary_type {
1525             typedef typename details::primary_type<sizeof(Type)>::type type;
1526         };
1527         template <>
1528         struct select_primary_type<bool> {
1529             typedef bool type;
1530         };
1531
1532     }   // namespace details
1533
1534     template <class T>
1535     struct atomic
1536 #ifndef CDS_CXX11_DELETE_DEFINITION_SUPPORT
1537         : details::atomic_noncopyable
1538 #endif
1539     {
1540     private:
1541         typedef details::atomic_generic_ops<T, sizeof(T), typename details::select_primary_type<T>::type >  atomic_ops;
1542
1543         T volatile m_data;
1544     public:
1545         bool is_lock_free() const volatile CDS_NOEXCEPT
1546         {
1547             return true;
1548         }
1549         bool is_lock_free() const CDS_NOEXCEPT
1550         {
1551             return true;
1552         }
1553
1554         void store(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1555         {
1556             atomic_ops::atomic_store_explicit( &m_data, val, order );
1557         }
1558         void store(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1559         {
1560             atomic_ops::atomic_store_explicit( &m_data, val, order );
1561         }
1562
1563         T load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1564         {
1565             return atomic_ops::atomic_load_explicit( &m_data, order );
1566         }
1567         T load(memory_order order = memory_order_seq_cst) const CDS_NOEXCEPT
1568         {
1569            return atomic_ops::atomic_load_explicit( &m_data, order );
1570         }
1571
1572         operator T() const volatile CDS_NOEXCEPT
1573         {
1574             return load();
1575         }
1576         operator T() const CDS_NOEXCEPT
1577         {
1578             return load();
1579         }
1580
1581         T exchange(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1582         {
1583             return atomic_ops::atomic_exchange_explicit( &m_data, val, order );
1584         }
1585         T exchange(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1586         {
1587             return atomic_ops::atomic_exchange_explicit( &m_data, val, order );
1588         }
1589
1590         bool compare_exchange_weak(T& expected, T desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1591         {
1592             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_data, &expected, desired, success_order, failure_order );
1593         }
1594         bool compare_exchange_weak(T& expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1595         {
1596             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_data, &expected, desired, success_order, failure_order );
1597         }
1598         bool compare_exchange_strong(T& expected, T desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1599         {
1600             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_data, &expected, desired, success_order, failure_order );
1601         }
1602         bool compare_exchange_strong(T& expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1603         {
1604             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_data, &expected, desired, success_order, failure_order );
1605         }
1606         bool compare_exchange_weak(T& expected, T desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1607         {
1608             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1609         }
1610         bool compare_exchange_weak(T& expected, T desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1611         {
1612             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1613         }
1614         bool compare_exchange_strong(T& expected, T desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1615         {
1616             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1617         }
1618         bool compare_exchange_strong(T& expected, T desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1619         {
1620             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1621         }
1622
1623         atomic() = default;
1624         CDS_CONSTEXPR atomic(T val)
1625             : m_data( val )
1626             {}
1627
1628 #ifdef CDS_CXX11_DELETE_DEFINITION_SUPPORT
1629         atomic(const atomic&) = delete;
1630         atomic& operator=(const atomic&) = delete;
1631         atomic& operator=(const atomic&) volatile = delete;
1632 #endif
1633
1634         T operator=(T val) volatile CDS_NOEXCEPT
1635         {
1636             store( val );
1637             return val;
1638         }
1639         T operator=(T val) CDS_NOEXCEPT
1640         {
1641             store( val );
1642             return val;
1643         }
1644     };
1645
1646 #if defined(CDS_CXX11_DELETE_DEFINITION_SUPPORT)
1647 #   define CDS_DECLARE_ATOMIC_INTEGRAL( _type ) \
1648     template <> \
1649     struct atomic<_type>: public details::atomic_integral<_type> \
1650     { \
1651     private: \
1652         typedef details::atomic_integral<_type>   base_class  ; \
1653     public: \
1654         atomic() = default; \
1655         atomic(_type val) CDS_NOEXCEPT : base_class(val) {} \
1656         atomic(const atomic&) = delete; \
1657         atomic& operator=(const atomic&) = delete; \
1658         atomic& operator=(const atomic&) volatile = delete; \
1659         _type operator=(_type val) volatile CDS_NOEXCEPT { return base_class::operator=(val); } \
1660         _type operator=(_type val) CDS_NOEXCEPT { return base_class::operator=(val); } \
1661     };
1662 #else
1663 #   define CDS_DECLARE_ATOMIC_INTEGRAL( _type ) \
1664     template <> \
1665     struct atomic<_type>: public details::atomic_integral<_type> \
1666     { \
1667     private: \
1668         typedef details::atomic_integral<_type>   base_class  ; \
1669     public: \
1670         atomic() {} \
1671         atomic(_type val) CDS_NOEXCEPT : base_class(val) {} \
1672         _type operator=(_type val) volatile CDS_NOEXCEPT { return base_class::operator=(val); } \
1673         _type operator=(_type val) CDS_NOEXCEPT { return base_class::operator=(val); } \
1674     };
1675 #endif
1676
1677     CDS_DECLARE_ATOMIC_INTEGRAL(char)
1678     CDS_DECLARE_ATOMIC_INTEGRAL(signed char)
1679     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned char)
1680     CDS_DECLARE_ATOMIC_INTEGRAL(short)
1681     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned short)
1682     CDS_DECLARE_ATOMIC_INTEGRAL(int)
1683     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned int)
1684     CDS_DECLARE_ATOMIC_INTEGRAL(long)
1685     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned long)
1686     CDS_DECLARE_ATOMIC_INTEGRAL(long long)
1687     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned long long)
1688 //#if CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400
1689 //    CDS_DECLARE_ATOMIC_INTEGRAL(char16_t)
1690 //    CDS_DECLARE_ATOMIC_INTEGRAL(char32_t)
1691 //#endif
1692 //    CDS_DECLARE_ATOMIC_INTEGRAL(wchar_t)
1693
1694 #   undef CDS_DECLARE_ATOMIC_INTEGRAL
1695
1696
1697     template <typename T>
1698     class atomic<T *>
1699 #ifndef CDS_CXX11_DELETE_DEFINITION_SUPPORT
1700         : details::atomic_noncopyable
1701 #endif
1702     {
1703     private:
1704         T * volatile m_ptr;
1705         typedef details::atomic_pointer<T>  atomic_ops;
1706     public:
1707         bool is_lock_free() const volatile CDS_NOEXCEPT
1708         {
1709             return true;
1710         }
1711         bool is_lock_free() const CDS_NOEXCEPT
1712         {
1713             return true;
1714         }
1715
1716         void store(T * val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1717         {
1718             atomic_ops::atomic_store_explicit( &m_ptr, val, order );
1719         }
1720         void store(T * val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1721         {
1722             atomic_ops::atomic_store_explicit( &m_ptr, val, order );
1723         }
1724
1725         T * load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1726         {
1727             return atomic_ops::atomic_load_explicit( &m_ptr, order );
1728         }
1729         T * load(memory_order order = memory_order_seq_cst) const CDS_NOEXCEPT
1730         {
1731             return atomic_ops::atomic_load_explicit( &m_ptr, order );
1732         }
1733
1734         operator T *() const volatile CDS_NOEXCEPT
1735         {
1736             return load();
1737         }
1738         operator T *() const CDS_NOEXCEPT
1739         {
1740             return load();
1741         }
1742
1743         T * exchange(T * val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1744         {
1745             return atomic_ops::atomic_exchange_explicit( &m_ptr, val, order );
1746         }
1747         T * exchange(T * val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1748         {
1749             return atomic_ops::atomic_exchange_explicit( &m_ptr, val, order );
1750         }
1751
1752         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1753         {
1754             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1755         }
1756         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1757         {
1758             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1759         }
1760         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1761         {
1762             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1763         }
1764         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1765         {
1766             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1767         }
1768         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1769         {
1770             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1771         }
1772         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1773         {
1774             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1775         }
1776         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1777         {
1778             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1779         }
1780         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1781         {
1782             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1783         }
1784
1785         T * fetch_add(ptrdiff_t offset, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1786         {
1787             return atomic_ops::atomic_fetch_add_explicit( &m_ptr, offset, order );
1788         }
1789         T * fetch_add(ptrdiff_t offset, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1790         {
1791             return atomic_ops::atomic_fetch_add_explicit( &m_ptr, offset, order );
1792         }
1793
1794         T * fetch_sub(ptrdiff_t offset, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1795         {
1796             return atomic_ops::atomic_fetch_sub_explicit( &m_ptr, offset, order );
1797         }
1798         T * fetch_sub(ptrdiff_t offset, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1799         {
1800             return atomic_ops::atomic_fetch_sub_explicit( &m_ptr, offset, order );
1801         }
1802
1803         atomic() = default;
1804         CDS_CONSTEXPR atomic(T * val) CDS_NOEXCEPT
1805             : m_ptr( val )
1806         {}
1807
1808 #ifdef CDS_CXX11_DELETE_DEFINITION_SUPPORT
1809         atomic(const atomic&) = delete;
1810         atomic& operator=(const atomic&) = delete;
1811         atomic& operator=(const atomic&) volatile = delete;
1812 #endif
1813
1814         T * operator=(T * val) volatile CDS_NOEXCEPT
1815         {
1816             store( val );
1817             return val;
1818         }
1819         T * operator=(T * val) CDS_NOEXCEPT
1820         {
1821             store( val );
1822             return val;
1823         }
1824     };
1825
1826     // Atomic typedefs
1827     typedef atomic<bool>            atomic_bool;
1828     typedef atomic<char>            atomic_char;
1829     typedef atomic<signed char>     atomic_schar;
1830     typedef atomic<unsigned char>   atomic_uchar;
1831     typedef atomic<short>           atomic_short;
1832     typedef atomic<unsigned short>  atomic_ushort;
1833     typedef atomic<int>             atomic_int;
1834     typedef atomic<unsigned int>    atomic_uint;
1835     typedef atomic<long>            atomic_long;
1836     typedef atomic<unsigned long>   atomic_ulong;
1837     typedef atomic<long long>       atomic_llong;
1838     typedef atomic<unsigned long long> atomic_ullong;
1839 #if ( CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400 ) || CDS_COMPILER == CDS_COMPILER_CLANG
1840     typedef atomic<char16_t>        atomic_char16_t;
1841     typedef atomic<char32_t>        atomic_char32_t;
1842 #endif
1843     typedef atomic<wchar_t>         atomic_wchar_t;
1844
1845
1846     typedef atomic<cds::int_least8_t>    atomic_int_least8_t;
1847     typedef atomic<cds::uint_least8_t>   atomic_uint_least8_t;
1848     typedef atomic<cds::int_least16_t>   atomic_int_least16_t;
1849     typedef atomic<cds::uint_least16_t>  atomic_uint_least16_t;
1850     typedef atomic<cds::int_least32_t>   atomic_int_least32_t;
1851     typedef atomic<cds::uint_least32_t>  atomic_uint_least32_t;
1852     typedef atomic<cds::int_least64_t>   atomic_int_least64_t;
1853     typedef atomic<cds::uint_least64_t>  atomic_uint_least64_t;
1854     typedef atomic<cds::int_fast8_t>     atomic_int_fast8_t;
1855     typedef atomic<cds::uint_fast8_t>    atomic_uint_fast8_t;
1856     typedef atomic<cds::int_fast16_t>    atomic_int_fast16_t;
1857     typedef atomic<cds::uint_fast16_t>   atomic_uint_fast16_t;
1858     typedef atomic<cds::int_fast32_t>    atomic_int_fast32_t;
1859     typedef atomic<cds::uint_fast32_t>   atomic_uint_fast32_t;
1860     typedef atomic<cds::int_fast64_t>    atomic_int_fast64_t;
1861     typedef atomic<cds::uint_fast64_t>   atomic_uint_fast64_t;
1862     typedef atomic<intptr_t>             atomic_intptr_t;
1863     typedef atomic<uintptr_t>            atomic_uintptr_t;
1864     typedef atomic<size_t>               atomic_size_t;
1865     typedef atomic<ptrdiff_t>            atomic_ptrdiff_t;
1866     typedef atomic<cds::intmax_t>        atomic_intmax_t;
1867     typedef atomic<cds::uintmax_t>       atomic_uintmax_t;
1868
1869     template <class T>
1870     static inline bool atomic_is_lock_free(const volatile atomic<T> * p) CDS_NOEXCEPT
1871     {
1872         return p->is_lock_free();
1873     }
1874
1875     template <class T>
1876     static inline bool atomic_is_lock_free(const atomic<T> * p ) CDS_NOEXCEPT
1877     {
1878         return p->is_lock_free();
1879     }
1880
1881     /*
1882     template <class T>
1883     static inline void atomic_init(volatile atomic<T> * p, T val) CDS_NOEXCEPT
1884     {
1885         p->init( val );
1886     }
1887
1888     template <class T>
1889     static inline void atomic_init( atomic<T> * p, T val) CDS_NOEXCEPT
1890     {
1891         p->init( val );
1892     }
1893     */
1894
1895     template <class T>
1896     static inline void atomic_store(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1897     {
1898         p->store(val);
1899     }
1900     template <class T>
1901     static inline void atomic_store(atomic<T>* p, T val) CDS_NOEXCEPT
1902     {
1903         p->store( val );
1904     }
1905
1906     template <class T>
1907     static inline void atomic_store_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1908     {
1909         p->store( val, order );
1910     }
1911     template <class T>
1912     static inline void atomic_store_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1913     {
1914         p->store( val, order );
1915     }
1916
1917     template <class T>
1918     static inline T atomic_load(const volatile atomic<T>* p) CDS_NOEXCEPT
1919     {
1920         return p->load();
1921     }
1922     template <class T>
1923     static inline T atomic_load(const atomic<T>* p) CDS_NOEXCEPT
1924     {
1925         return p->load();
1926     }
1927
1928     template <class T>
1929     static inline T atomic_load_explicit(const volatile atomic<T>* p, memory_order order) CDS_NOEXCEPT
1930     {
1931         return p->load( order );
1932     }
1933     template <class T>
1934     static inline T atomic_load_explicit(const atomic<T>* p, memory_order order) CDS_NOEXCEPT
1935     {
1936         return p->load( order );
1937     }
1938
1939     template <class T>
1940     static inline T atomic_exchange(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1941     {
1942         return p->exchange( val );
1943     }
1944     template <class T>
1945     static inline T atomic_exchange(atomic<T>* p, T val ) CDS_NOEXCEPT
1946     {
1947         return p->exchange( val );
1948     }
1949
1950     template <class T>
1951     static inline T atomic_exchange_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1952     {
1953         return p->exchange( val, order );
1954     }
1955     template <class T>
1956     static inline T atomic_exchange_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1957     {
1958         return p->exchange( val, order );
1959     }
1960
1961     template <class T>
1962     static inline bool atomic_compare_exchange_weak(volatile atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1963     {
1964         return p->compare_exchange_weak( *expected, desired );
1965     }
1966     template <class T>
1967     static inline bool atomic_compare_exchange_weak(atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1968     {
1969         return p->compare_exchange_weak( *expected, desired );
1970     }
1971
1972     template <class T>
1973     static inline bool atomic_compare_exchange_strong(volatile atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1974     {
1975         return p->compare_exchange_strong( *expected, desired );
1976     }
1977     template <class T>
1978     static inline bool atomic_compare_exchange_strong(atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1979     {
1980         return p->compare_exchange_strong( *expected, desired );
1981     }
1982
1983     template <class T>
1984     static inline bool atomic_compare_exchange_weak_explicit(volatile atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1985     {
1986         return p->compare_exchange_weak( *expected, desired, success_order, failure_order );
1987     }
1988     template <class T>
1989     static inline bool atomic_compare_exchange_weak_explicit(atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1990     {
1991         return p->compare_exchange_weak( *expected, desired, success_order, failure_order );
1992     }
1993
1994     template <class T>
1995     static inline bool atomic_compare_exchange_strong_explicit(volatile atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1996     {
1997         return p->compare_exchange_strong( *expected, desired, success_order, failure_order );
1998     }
1999     template <class T>
2000     static inline bool atomic_compare_exchange_strong_explicit(atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
2001     {
2002         return p->compare_exchange_strong( *expected, desired, success_order, failure_order );
2003     }
2004
2005     template <class T>
2006     static inline T atomic_fetch_add(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2007     {
2008         return p->fetch_add( val );
2009     }
2010     template <class T>
2011     static inline T atomic_fetch_add(atomic<T>* p, T val) CDS_NOEXCEPT
2012     {
2013         return p->fetch_add( val );
2014     }
2015     template <class T>
2016     static inline T * atomic_fetch_add(volatile atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2017     {
2018         return p->fetch_add( offset );
2019     }
2020     template <class T>
2021     static inline T * atomic_fetch_add(atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2022     {
2023         return p->fetch_add( offset );
2024     }
2025
2026     template <class T>
2027     static inline T atomic_fetch_add_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2028     {
2029         return p->fetch_add( val, order );
2030     }
2031     template <class T>
2032     static inline T atomic_fetch_add_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2033     {
2034         return p->fetch_add( val, order );
2035     }
2036     template <class T>
2037     static inline T * atomic_fetch_add_explicit(volatile atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2038     {
2039         return p->fetch_add( offset, order );
2040     }
2041     template <class T>
2042     static inline T * atomic_fetch_add_explicit(atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2043     {
2044         return p->fetch_add( offset, order );
2045     }
2046
2047     template <class T>
2048     static inline T atomic_fetch_sub(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2049     {
2050         return p->fetch_sub( val );
2051     }
2052     template <class T>
2053     static inline T atomic_fetch_sub(atomic<T>* p, T val) CDS_NOEXCEPT
2054     {
2055         return p->fetch_sub( val );
2056     }
2057     template <class T>
2058     static inline T * atomic_fetch_sub(volatile atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2059     {
2060         return p->fetch_sub( offset );
2061     }
2062     template <class T>
2063     static inline T * atomic_fetch_sub(atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2064     {
2065         return p->fetch_sub( offset );
2066     }
2067
2068     template <class T>
2069     static inline T atomic_fetch_sub_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2070     {
2071         return p->fetch_sub( val, order );
2072     }
2073     template <class T>
2074     static inline T atomic_fetch_sub_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2075     {
2076         return p->fetch_sub( val, order );
2077     }
2078     template <class T>
2079     static inline T * atomic_fetch_sub_explicit(volatile atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2080     {
2081         return p->fetch_sub( offset, order );
2082     }
2083     template <class T>
2084     static inline T * atomic_fetch_sub_explicit(atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2085     {
2086         return p->fetch_sub( offset, order );
2087     }
2088
2089     template <class T>
2090     static inline T atomic_fetch_and(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2091     {
2092         return p->fetch_and( val );
2093     }
2094     template <class T>
2095     static inline T atomic_fetch_and(atomic<T>* p, T val) CDS_NOEXCEPT
2096     {
2097         return p->fetch_and( val );
2098     }
2099
2100     template <class T>
2101     static inline T atomic_fetch_and_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2102     {
2103         return p->fetch_and( val, order );
2104     }
2105     template <class T>
2106     static inline T atomic_fetch_and_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2107     {
2108         return p->fetch_and( val, order );
2109     }
2110
2111     template <class T>
2112     static inline T atomic_fetch_or(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2113     {
2114         return p->fetch_or( val );
2115     }
2116     template <class T>
2117     static inline T atomic_fetch_or(atomic<T>* p, T val) CDS_NOEXCEPT
2118     {
2119         return p->fetch_or( val );
2120     }
2121
2122     template <class T>
2123     static inline T atomic_fetch_or_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2124     {
2125         return p->fetch_or( val, order );
2126     }
2127     template <class T>
2128     static inline T atomic_fetch_or_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2129     {
2130         return p->fetch_or( val, order );
2131     }
2132
2133     template <class T>
2134     static inline T atomic_fetch_xor(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2135     {
2136         return p->fetch_xor( val );
2137     }
2138     template <class T>
2139     static inline T atomic_fetch_xor(atomic<T>* p, T val) CDS_NOEXCEPT
2140     {
2141         return p->fetch_xor( val );
2142     }
2143
2144     template <class T>
2145     static inline T atomic_fetch_xor_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2146     {
2147         return p->fetch_xor( val, order );
2148     }
2149     template <class T>
2150     static inline T atomic_fetch_xor_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2151     {
2152         return p->fetch_xor( val, order );
2153     }
2154
2155     // Atomic flag type
2156     typedef struct atomic_flag
2157     {
2158         void clear( memory_order order = memory_order_seq_cst ) volatile CDS_NOEXCEPT
2159         {
2160             assert( order != memory_order_acquire
2161                 && order != memory_order_acq_rel
2162                 && order != memory_order_consume
2163                 );
2164             platform::atomic_flag_clear( &m_Flag, order );
2165         }
2166         void clear( memory_order order = memory_order_seq_cst ) CDS_NOEXCEPT
2167         {
2168             assert( order != memory_order_acquire
2169                 && order != memory_order_acq_rel
2170                 && order != memory_order_consume
2171                 );
2172             platform::atomic_flag_clear( &m_Flag, order );
2173         }
2174
2175         bool test_and_set( memory_order order = memory_order_seq_cst ) volatile CDS_NOEXCEPT
2176         {
2177             return platform::atomic_flag_tas( &m_Flag, order );
2178         }
2179         bool test_and_set( memory_order order = memory_order_seq_cst ) CDS_NOEXCEPT
2180         {
2181             return platform::atomic_flag_tas( &m_Flag, order );
2182         }
2183
2184         atomic_flag() = default;
2185
2186 #ifdef CDS_CXX11_DELETE_DEFINITION_SUPPORT
2187         atomic_flag(const atomic_flag&) = delete;
2188         atomic_flag& operator=(const atomic_flag&) = delete;
2189         atomic_flag& operator=(const atomic_flag&) volatile = delete;
2190 #elif CDS_COMPILER != CDS_COMPILER_MSVC
2191     // MS VC generate error C2552 "non-aggregates cannot be initialized with initializer list"
2192     // when atomic_flag initializes with ATOMIC_FLAG_INIT
2193     private:
2194         atomic_flag(const atomic_flag&);
2195         atomic_flag& operator=(const atomic_flag&);
2196         atomic_flag& operator=(const atomic_flag&) volatile;
2197     public:
2198 #endif
2199
2200         platform::atomic_flag_type volatile m_Flag;
2201     } atomic_flag;
2202
2203     static inline bool atomic_flag_test_and_set(volatile atomic_flag* p) CDS_NOEXCEPT
2204     {
2205         return p->test_and_set();
2206     }
2207     static inline bool atomic_flag_test_and_set(atomic_flag * p) CDS_NOEXCEPT
2208     {
2209         return p->test_and_set();
2210     }
2211     static inline bool atomic_flag_test_and_set_explicit(volatile atomic_flag* p, memory_order order) CDS_NOEXCEPT
2212     {
2213         return p->test_and_set( order );
2214     }
2215     static inline bool atomic_flag_test_and_set_explicit(atomic_flag* p, memory_order order) CDS_NOEXCEPT
2216     {
2217         return p->test_and_set( order );
2218     }
2219     static inline void atomic_flag_clear(volatile atomic_flag* p) CDS_NOEXCEPT
2220     {
2221         return p->clear();
2222     }
2223     static inline void atomic_flag_clear(atomic_flag* p) CDS_NOEXCEPT
2224     {
2225         return p->clear();
2226     }
2227     static inline void atomic_flag_clear_explicit(volatile atomic_flag* p, memory_order order) CDS_NOEXCEPT
2228     {
2229         return p->clear( order );
2230     }
2231     static inline void atomic_flag_clear_explicit(atomic_flag* p, memory_order order) CDS_NOEXCEPT
2232     {
2233         return p->clear( order );
2234     }
2235
2236     // Fences
2237     static inline void atomic_thread_fence(memory_order order) CDS_NOEXCEPT
2238     {
2239         platform::thread_fence( order );
2240         CDS_COMPILER_RW_BARRIER;
2241     }
2242     static inline void atomic_signal_fence(memory_order order) CDS_NOEXCEPT
2243     {
2244         platform::signal_fence( order );
2245     }
2246
2247 }}  // namespace cds::cxx11_atomic
2248
2249 //@endcond
2250 #endif // #ifndef __CDS_COMPILER_CXX11_ATOMIC_H