libstdc++

atomic_base.h

Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 3, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // Under Section 7 of GPL version 3, you are granted additional
00017 // permissions described in the GCC Runtime Library Exception, version
00018 // 3.1, as published by the Free Software Foundation.
00019 
00020 // You should have received a copy of the GNU General Public License and
00021 // a copy of the GCC Runtime Library Exception along with this program;
00022 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00023 // <http://www.gnu.org/licenses/>.
00024 
00025 /** @file bits/atomic_base.h
00026  *  This is an internal header file, included by other library headers.
00027  *  Do not attempt to use it directly. @headername{atomic}
00028  */
00029 
00030 #ifndef _GLIBCXX_ATOMIC_BASE_H
00031 #define _GLIBCXX_ATOMIC_BASE_H 1
00032 
00033 #pragma GCC system_header
00034 
00035 #include <bits/c++config.h>
00036 #include <stdbool.h>
00037 #include <stdint.h>
00038 #include <bits/atomic_lockfree_defines.h>
00039 
00040 namespace std _GLIBCXX_VISIBILITY(default)
00041 {
00042 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00043 
00044   /**
00045    * @defgroup atomics Atomics
00046    *
00047    * Components for performing atomic operations.
00048    * @{
00049    */
00050 
00051   /// Enumeration for memory_order
00052   typedef enum memory_order
00053     {
00054       memory_order_relaxed,
00055       memory_order_consume,
00056       memory_order_acquire,
00057       memory_order_release,
00058       memory_order_acq_rel,
00059       memory_order_seq_cst
00060     } memory_order;
00061 
00062   // Drop release ordering as per [atomics.types.operations.req]/21
00063   constexpr memory_order
00064   __cmpexch_failure_order(memory_order __m) noexcept
00065   {
00066     return __m == memory_order_acq_rel ? memory_order_acquire
00067       : __m == memory_order_release ? memory_order_relaxed : __m;
00068   }
00069 
00070   inline void
00071   atomic_thread_fence(memory_order __m) noexcept
00072   { __atomic_thread_fence(__m); }
00073 
00074   inline void
00075   atomic_signal_fence(memory_order __m) noexcept
00076   { __atomic_signal_fence(__m); }
00077 
00078   /// kill_dependency
00079   template<typename _Tp>
00080     inline _Tp
00081     kill_dependency(_Tp __y) noexcept
00082     {
00083       _Tp __ret(__y);
00084       return __ret;
00085     }
00086 
00087 
00088   // Base types for atomics.
00089   template<typename _IntTp>
00090     struct __atomic_base;
00091 
00092   /// atomic_char
00093   typedef __atomic_base<char>               atomic_char;
00094 
00095   /// atomic_schar
00096   typedef __atomic_base<signed char>            atomic_schar;
00097 
00098   /// atomic_uchar
00099   typedef __atomic_base<unsigned char>      atomic_uchar;
00100 
00101   /// atomic_short
00102   typedef __atomic_base<short>          atomic_short;
00103 
00104   /// atomic_ushort
00105   typedef __atomic_base<unsigned short>     atomic_ushort;
00106 
00107   /// atomic_int
00108   typedef __atomic_base<int>                atomic_int;
00109 
00110   /// atomic_uint
00111   typedef __atomic_base<unsigned int>           atomic_uint;
00112 
00113   /// atomic_long
00114   typedef __atomic_base<long>               atomic_long;
00115 
00116   /// atomic_ulong
00117   typedef __atomic_base<unsigned long>      atomic_ulong;
00118 
00119   /// atomic_llong
00120   typedef __atomic_base<long long>          atomic_llong;
00121 
00122   /// atomic_ullong
00123   typedef __atomic_base<unsigned long long>     atomic_ullong;
00124 
00125   /// atomic_wchar_t
00126   typedef __atomic_base<wchar_t>        atomic_wchar_t;
00127 
00128   /// atomic_char16_t
00129   typedef __atomic_base<char16_t>       atomic_char16_t;
00130 
00131   /// atomic_char32_t
00132   typedef __atomic_base<char32_t>       atomic_char32_t;
00133 
00134   /// atomic_char32_t
00135   typedef __atomic_base<char32_t>       atomic_char32_t;
00136 
00137 
00138   /// atomic_int_least8_t
00139   typedef __atomic_base<int_least8_t>       atomic_int_least8_t;
00140 
00141   /// atomic_uint_least8_t
00142   typedef __atomic_base<uint_least8_t>          atomic_uint_least8_t;
00143 
00144   /// atomic_int_least16_t
00145   typedef __atomic_base<int_least16_t>          atomic_int_least16_t;
00146 
00147   /// atomic_uint_least16_t
00148   typedef __atomic_base<uint_least16_t>         atomic_uint_least16_t;
00149 
00150   /// atomic_int_least32_t
00151   typedef __atomic_base<int_least32_t>          atomic_int_least32_t;
00152 
00153   /// atomic_uint_least32_t
00154   typedef __atomic_base<uint_least32_t>         atomic_uint_least32_t;
00155 
00156   /// atomic_int_least64_t
00157   typedef __atomic_base<int_least64_t>          atomic_int_least64_t;
00158 
00159   /// atomic_uint_least64_t
00160   typedef __atomic_base<uint_least64_t>         atomic_uint_least64_t;
00161 
00162 
00163   /// atomic_int_fast8_t
00164   typedef __atomic_base<int_fast8_t>        atomic_int_fast8_t;
00165 
00166   /// atomic_uint_fast8_t
00167   typedef __atomic_base<uint_fast8_t>           atomic_uint_fast8_t;
00168 
00169   /// atomic_int_fast16_t
00170   typedef __atomic_base<int_fast16_t>           atomic_int_fast16_t;
00171 
00172   /// atomic_uint_fast16_t
00173   typedef __atomic_base<uint_fast16_t>          atomic_uint_fast16_t;
00174 
00175   /// atomic_int_fast32_t
00176   typedef __atomic_base<int_fast32_t>           atomic_int_fast32_t;
00177 
00178   /// atomic_uint_fast32_t
00179   typedef __atomic_base<uint_fast32_t>          atomic_uint_fast32_t;
00180 
00181   /// atomic_int_fast64_t
00182   typedef __atomic_base<int_fast64_t>           atomic_int_fast64_t;
00183 
00184   /// atomic_uint_fast64_t
00185   typedef __atomic_base<uint_fast64_t>          atomic_uint_fast64_t;
00186 
00187 
00188   /// atomic_intptr_t
00189   typedef __atomic_base<intptr_t>           atomic_intptr_t;
00190 
00191   /// atomic_uintptr_t
00192   typedef __atomic_base<uintptr_t>              atomic_uintptr_t;
00193 
00194   /// atomic_size_t
00195   typedef __atomic_base<size_t>             atomic_size_t;
00196 
00197   /// atomic_intmax_t
00198   typedef __atomic_base<intmax_t>           atomic_intmax_t;
00199 
00200   /// atomic_uintmax_t
00201   typedef __atomic_base<uintmax_t>              atomic_uintmax_t;
00202 
00203   /// atomic_ptrdiff_t
00204   typedef __atomic_base<ptrdiff_t>              atomic_ptrdiff_t;
00205 
00206 
00207 #define ATOMIC_VAR_INIT(_VI) { _VI }
00208 
00209   template<typename _Tp>
00210     struct atomic;
00211 
00212   template<typename _Tp>
00213     struct atomic<_Tp*>;
00214 
00215     /* The target's "set" value for test-and-set may not be exactly 1.  */
00216 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
00217     typedef bool __atomic_flag_data_type;
00218 #else
00219     typedef unsigned char __atomic_flag_data_type;
00220 #endif
00221 
00222   /**
00223    *  @brief Base type for atomic_flag.
00224    *
00225    *  Base type is POD with data, allowing atomic_flag to derive from
00226    *  it and meet the standard layout type requirement. In addition to
00227    *  compatibilty with a C interface, this allows different
00228    *  implementations of atomic_flag to use the same atomic operation
00229    *  functions, via a standard conversion to the __atomic_flag_base
00230    *  argument.
00231   */
00232   _GLIBCXX_BEGIN_EXTERN_C
00233 
00234   struct __atomic_flag_base
00235   {
00236     __atomic_flag_data_type _M_i;
00237   };
00238 
00239   _GLIBCXX_END_EXTERN_C
00240 
00241 #define ATOMIC_FLAG_INIT { 0 }
00242 
00243   /// atomic_flag
00244   struct atomic_flag : public __atomic_flag_base
00245   {
00246     atomic_flag() noexcept = default;
00247     ~atomic_flag() noexcept = default;
00248     atomic_flag(const atomic_flag&) = delete;
00249     atomic_flag& operator=(const atomic_flag&) = delete;
00250     atomic_flag& operator=(const atomic_flag&) volatile = delete;
00251 
00252     // Conversion to ATOMIC_FLAG_INIT.
00253     constexpr atomic_flag(bool __i) noexcept
00254       : __atomic_flag_base{ _S_init(__i) }
00255     { }
00256 
00257     bool
00258     test_and_set(memory_order __m = memory_order_seq_cst) noexcept
00259     {
00260       return __atomic_test_and_set (&_M_i, __m);
00261     }
00262 
00263     bool
00264     test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
00265     {
00266       return __atomic_test_and_set (&_M_i, __m);
00267     }
00268 
00269     void
00270     clear(memory_order __m = memory_order_seq_cst) noexcept
00271     {
00272       __glibcxx_assert(__m != memory_order_consume);
00273       __glibcxx_assert(__m != memory_order_acquire);
00274       __glibcxx_assert(__m != memory_order_acq_rel);
00275 
00276       __atomic_clear (&_M_i, __m);
00277     }
00278 
00279     void
00280     clear(memory_order __m = memory_order_seq_cst) volatile noexcept
00281     {
00282       __glibcxx_assert(__m != memory_order_consume);
00283       __glibcxx_assert(__m != memory_order_acquire);
00284       __glibcxx_assert(__m != memory_order_acq_rel);
00285 
00286       __atomic_clear (&_M_i, __m);
00287     }
00288 
00289   private:
00290     static constexpr __atomic_flag_data_type
00291     _S_init(bool __i)
00292     { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
00293   };
00294 
00295 
00296   /// Base class for atomic integrals.
00297   //
00298   // For each of the integral types, define atomic_[integral type] struct
00299   //
00300   // atomic_bool     bool
00301   // atomic_char     char
00302   // atomic_schar    signed char
00303   // atomic_uchar    unsigned char
00304   // atomic_short    short
00305   // atomic_ushort   unsigned short
00306   // atomic_int      int
00307   // atomic_uint     unsigned int
00308   // atomic_long     long
00309   // atomic_ulong    unsigned long
00310   // atomic_llong    long long
00311   // atomic_ullong   unsigned long long
00312   // atomic_char16_t char16_t
00313   // atomic_char32_t char32_t
00314   // atomic_wchar_t  wchar_t
00315   //
00316   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
00317   // 8 bytes, since that is what GCC built-in functions for atomic
00318   // memory access expect.
00319   template<typename _ITp>
00320     struct __atomic_base
00321     {
00322     private:
00323       typedef _ITp  __int_type;
00324 
00325       __int_type    _M_i;
00326 
00327     public:
00328       __atomic_base() noexcept = default;
00329       ~__atomic_base() noexcept = default;
00330       __atomic_base(const __atomic_base&) = delete;
00331       __atomic_base& operator=(const __atomic_base&) = delete;
00332       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00333 
00334       // Requires __int_type convertible to _M_i.
00335       constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
00336 
00337       operator __int_type() const noexcept
00338       { return load(); }
00339 
00340       operator __int_type() const volatile noexcept
00341       { return load(); }
00342 
00343       __int_type
00344       operator=(__int_type __i) noexcept
00345       {
00346     store(__i);
00347     return __i;
00348       }
00349 
00350       __int_type
00351       operator=(__int_type __i) volatile noexcept
00352       {
00353     store(__i);
00354     return __i;
00355       }
00356 
00357       __int_type
00358       operator++(int) noexcept
00359       { return fetch_add(1); }
00360 
00361       __int_type
00362       operator++(int) volatile noexcept
00363       { return fetch_add(1); }
00364 
00365       __int_type
00366       operator--(int) noexcept
00367       { return fetch_sub(1); }
00368 
00369       __int_type
00370       operator--(int) volatile noexcept
00371       { return fetch_sub(1); }
00372 
00373       __int_type
00374       operator++() noexcept
00375       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00376 
00377       __int_type
00378       operator++() volatile noexcept
00379       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00380 
00381       __int_type
00382       operator--() noexcept
00383       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00384 
00385       __int_type
00386       operator--() volatile noexcept
00387       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00388 
00389       __int_type
00390       operator+=(__int_type __i) noexcept
00391       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00392 
00393       __int_type
00394       operator+=(__int_type __i) volatile noexcept
00395       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00396 
00397       __int_type
00398       operator-=(__int_type __i) noexcept
00399       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00400 
00401       __int_type
00402       operator-=(__int_type __i) volatile noexcept
00403       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00404 
00405       __int_type
00406       operator&=(__int_type __i) noexcept
00407       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00408 
00409       __int_type
00410       operator&=(__int_type __i) volatile noexcept
00411       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00412 
00413       __int_type
00414       operator|=(__int_type __i) noexcept
00415       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00416 
00417       __int_type
00418       operator|=(__int_type __i) volatile noexcept
00419       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00420 
00421       __int_type
00422       operator^=(__int_type __i) noexcept
00423       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00424 
00425       __int_type
00426       operator^=(__int_type __i) volatile noexcept
00427       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00428 
00429       bool
00430       is_lock_free() const noexcept
00431       { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
00432 
00433       bool
00434       is_lock_free() const volatile noexcept
00435       { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
00436 
00437       void
00438       store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
00439       {
00440     __glibcxx_assert(__m != memory_order_acquire);
00441     __glibcxx_assert(__m != memory_order_acq_rel);
00442     __glibcxx_assert(__m != memory_order_consume);
00443 
00444     __atomic_store_n(&_M_i, __i, __m);
00445       }
00446 
00447       void
00448       store(__int_type __i,
00449         memory_order __m = memory_order_seq_cst) volatile noexcept
00450       {
00451     __glibcxx_assert(__m != memory_order_acquire);
00452     __glibcxx_assert(__m != memory_order_acq_rel);
00453     __glibcxx_assert(__m != memory_order_consume);
00454 
00455     __atomic_store_n(&_M_i, __i, __m);
00456       }
00457 
00458       __int_type
00459       load(memory_order __m = memory_order_seq_cst) const noexcept
00460       {
00461     __glibcxx_assert(__m != memory_order_release);
00462     __glibcxx_assert(__m != memory_order_acq_rel);
00463 
00464     return __atomic_load_n(&_M_i, __m);
00465       }
00466 
00467       __int_type
00468       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00469       {
00470     __glibcxx_assert(__m != memory_order_release);
00471     __glibcxx_assert(__m != memory_order_acq_rel);
00472 
00473     return __atomic_load_n(&_M_i, __m);
00474       }
00475 
00476       __int_type
00477       exchange(__int_type __i,
00478            memory_order __m = memory_order_seq_cst) noexcept
00479       {
00480     return __atomic_exchange_n(&_M_i, __i, __m);
00481       }
00482 
00483 
00484       __int_type
00485       exchange(__int_type __i,
00486            memory_order __m = memory_order_seq_cst) volatile noexcept
00487       {
00488     return __atomic_exchange_n(&_M_i, __i, __m);
00489       }
00490 
00491       bool
00492       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00493                 memory_order __m1, memory_order __m2) noexcept
00494       {
00495     __glibcxx_assert(__m2 != memory_order_release);
00496     __glibcxx_assert(__m2 != memory_order_acq_rel);
00497     __glibcxx_assert(__m2 <= __m1);
00498 
00499     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00500       }
00501 
00502       bool
00503       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00504                 memory_order __m1,
00505                 memory_order __m2) volatile noexcept
00506       {
00507     __glibcxx_assert(__m2 != memory_order_release);
00508     __glibcxx_assert(__m2 != memory_order_acq_rel);
00509     __glibcxx_assert(__m2 <= __m1);
00510 
00511     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00512       }
00513 
00514       bool
00515       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00516                 memory_order __m = memory_order_seq_cst) noexcept
00517       {
00518     return compare_exchange_weak(__i1, __i2, __m,
00519                      __cmpexch_failure_order(__m));
00520       }
00521 
00522       bool
00523       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00524            memory_order __m = memory_order_seq_cst) volatile noexcept
00525       {
00526     return compare_exchange_weak(__i1, __i2, __m,
00527                      __cmpexch_failure_order(__m));
00528       }
00529 
00530       bool
00531       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00532                   memory_order __m1, memory_order __m2) noexcept
00533       {
00534     __glibcxx_assert(__m2 != memory_order_release);
00535     __glibcxx_assert(__m2 != memory_order_acq_rel);
00536     __glibcxx_assert(__m2 <= __m1);
00537 
00538     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00539       }
00540 
00541       bool
00542       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00543                   memory_order __m1,
00544                   memory_order __m2) volatile noexcept
00545       {
00546     __glibcxx_assert(__m2 != memory_order_release);
00547     __glibcxx_assert(__m2 != memory_order_acq_rel);
00548     __glibcxx_assert(__m2 <= __m1);
00549 
00550     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00551       }
00552 
00553       bool
00554       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00555                   memory_order __m = memory_order_seq_cst) noexcept
00556       {
00557     return compare_exchange_strong(__i1, __i2, __m,
00558                        __cmpexch_failure_order(__m));
00559       }
00560 
00561       bool
00562       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00563          memory_order __m = memory_order_seq_cst) volatile noexcept
00564       {
00565     return compare_exchange_strong(__i1, __i2, __m,
00566                        __cmpexch_failure_order(__m));
00567       }
00568 
00569       __int_type
00570       fetch_add(__int_type __i,
00571         memory_order __m = memory_order_seq_cst) noexcept
00572       { return __atomic_fetch_add(&_M_i, __i, __m); }
00573 
00574       __int_type
00575       fetch_add(__int_type __i,
00576         memory_order __m = memory_order_seq_cst) volatile noexcept
00577       { return __atomic_fetch_add(&_M_i, __i, __m); }
00578 
00579       __int_type
00580       fetch_sub(__int_type __i,
00581         memory_order __m = memory_order_seq_cst) noexcept
00582       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00583 
00584       __int_type
00585       fetch_sub(__int_type __i,
00586         memory_order __m = memory_order_seq_cst) volatile noexcept
00587       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00588 
00589       __int_type
00590       fetch_and(__int_type __i,
00591         memory_order __m = memory_order_seq_cst) noexcept
00592       { return __atomic_fetch_and(&_M_i, __i, __m); }
00593 
00594       __int_type
00595       fetch_and(__int_type __i,
00596         memory_order __m = memory_order_seq_cst) volatile noexcept
00597       { return __atomic_fetch_and(&_M_i, __i, __m); }
00598 
00599       __int_type
00600       fetch_or(__int_type __i,
00601            memory_order __m = memory_order_seq_cst) noexcept
00602       { return __atomic_fetch_or(&_M_i, __i, __m); }
00603 
00604       __int_type
00605       fetch_or(__int_type __i,
00606            memory_order __m = memory_order_seq_cst) volatile noexcept
00607       { return __atomic_fetch_or(&_M_i, __i, __m); }
00608 
00609       __int_type
00610       fetch_xor(__int_type __i,
00611         memory_order __m = memory_order_seq_cst) noexcept
00612       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00613 
00614       __int_type
00615       fetch_xor(__int_type __i,
00616         memory_order __m = memory_order_seq_cst) volatile noexcept
00617       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00618     };
00619 
00620 
00621   /// Partial specialization for pointer types.
00622   template<typename _PTp>
00623     struct __atomic_base<_PTp*>
00624     {
00625     private:
00626       typedef _PTp*     __pointer_type;
00627 
00628       __pointer_type    _M_p;
00629 
00630       // Factored out to facilitate explicit specialization.
00631       constexpr ptrdiff_t
00632       _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
00633 
00634       constexpr ptrdiff_t
00635       _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
00636 
00637     public:
00638       __atomic_base() noexcept = default;
00639       ~__atomic_base() noexcept = default;
00640       __atomic_base(const __atomic_base&) = delete;
00641       __atomic_base& operator=(const __atomic_base&) = delete;
00642       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00643 
00644       // Requires __pointer_type convertible to _M_p.
00645       constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
00646 
00647       operator __pointer_type() const noexcept
00648       { return load(); }
00649 
00650       operator __pointer_type() const volatile noexcept
00651       { return load(); }
00652 
00653       __pointer_type
00654       operator=(__pointer_type __p) noexcept
00655       {
00656     store(__p);
00657     return __p;
00658       }
00659 
00660       __pointer_type
00661       operator=(__pointer_type __p) volatile noexcept
00662       {
00663     store(__p);
00664     return __p;
00665       }
00666 
00667       __pointer_type
00668       operator++(int) noexcept
00669       { return fetch_add(1); }
00670 
00671       __pointer_type
00672       operator++(int) volatile noexcept
00673       { return fetch_add(1); }
00674 
00675       __pointer_type
00676       operator--(int) noexcept
00677       { return fetch_sub(1); }
00678 
00679       __pointer_type
00680       operator--(int) volatile noexcept
00681       { return fetch_sub(1); }
00682 
00683       __pointer_type
00684       operator++() noexcept
00685       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00686                   memory_order_seq_cst); }
00687 
00688       __pointer_type
00689       operator++() volatile noexcept
00690       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00691                   memory_order_seq_cst); }
00692 
00693       __pointer_type
00694       operator--() noexcept
00695       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00696                   memory_order_seq_cst); }
00697 
00698       __pointer_type
00699       operator--() volatile noexcept
00700       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00701                   memory_order_seq_cst); }
00702 
00703       __pointer_type
00704       operator+=(ptrdiff_t __d) noexcept
00705       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00706                   memory_order_seq_cst); }
00707 
00708       __pointer_type
00709       operator+=(ptrdiff_t __d) volatile noexcept
00710       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00711                   memory_order_seq_cst); }
00712 
00713       __pointer_type
00714       operator-=(ptrdiff_t __d) noexcept
00715       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00716                   memory_order_seq_cst); }
00717 
00718       __pointer_type
00719       operator-=(ptrdiff_t __d) volatile noexcept
00720       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00721                   memory_order_seq_cst); }
00722 
00723       bool
00724       is_lock_free() const noexcept
00725       { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
00726 
00727       bool
00728       is_lock_free() const volatile noexcept
00729       { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
00730 
00731       void
00732       store(__pointer_type __p,
00733         memory_order __m = memory_order_seq_cst) noexcept
00734       {
00735     __glibcxx_assert(__m != memory_order_acquire);
00736     __glibcxx_assert(__m != memory_order_acq_rel);
00737     __glibcxx_assert(__m != memory_order_consume);
00738 
00739     __atomic_store_n(&_M_p, __p, __m);
00740       }
00741 
00742       void
00743       store(__pointer_type __p,
00744         memory_order __m = memory_order_seq_cst) volatile noexcept
00745       {
00746     __glibcxx_assert(__m != memory_order_acquire);
00747     __glibcxx_assert(__m != memory_order_acq_rel);
00748     __glibcxx_assert(__m != memory_order_consume);
00749 
00750     __atomic_store_n(&_M_p, __p, __m);
00751       }
00752 
00753       __pointer_type
00754       load(memory_order __m = memory_order_seq_cst) const noexcept
00755       {
00756     __glibcxx_assert(__m != memory_order_release);
00757     __glibcxx_assert(__m != memory_order_acq_rel);
00758 
00759     return __atomic_load_n(&_M_p, __m);
00760       }
00761 
00762       __pointer_type
00763       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00764       {
00765     __glibcxx_assert(__m != memory_order_release);
00766     __glibcxx_assert(__m != memory_order_acq_rel);
00767 
00768     return __atomic_load_n(&_M_p, __m);
00769       }
00770 
00771       __pointer_type
00772       exchange(__pointer_type __p,
00773            memory_order __m = memory_order_seq_cst) noexcept
00774       {
00775     return __atomic_exchange_n(&_M_p, __p, __m);
00776       }
00777 
00778 
00779       __pointer_type
00780       exchange(__pointer_type __p,
00781            memory_order __m = memory_order_seq_cst) volatile noexcept
00782       {
00783     return __atomic_exchange_n(&_M_p, __p, __m);
00784       }
00785 
00786       bool
00787       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00788                   memory_order __m1,
00789                   memory_order __m2) noexcept
00790       {
00791     __glibcxx_assert(__m2 != memory_order_release);
00792     __glibcxx_assert(__m2 != memory_order_acq_rel);
00793     __glibcxx_assert(__m2 <= __m1);
00794 
00795     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00796       }
00797 
00798       bool
00799       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00800                   memory_order __m1,
00801                   memory_order __m2) volatile noexcept
00802       {
00803     __glibcxx_assert(__m2 != memory_order_release);
00804     __glibcxx_assert(__m2 != memory_order_acq_rel);
00805     __glibcxx_assert(__m2 <= __m1);
00806 
00807     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00808       }
00809 
00810       __pointer_type
00811       fetch_add(ptrdiff_t __d,
00812         memory_order __m = memory_order_seq_cst) noexcept
00813       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00814 
00815       __pointer_type
00816       fetch_add(ptrdiff_t __d,
00817         memory_order __m = memory_order_seq_cst) volatile noexcept
00818       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00819 
00820       __pointer_type
00821       fetch_sub(ptrdiff_t __d,
00822         memory_order __m = memory_order_seq_cst) noexcept
00823       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00824 
00825       __pointer_type
00826       fetch_sub(ptrdiff_t __d,
00827         memory_order __m = memory_order_seq_cst) volatile noexcept
00828       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00829     };
00830 
00831   // @} group atomics
00832 
00833 _GLIBCXX_END_NAMESPACE_VERSION
00834 } // namespace std
00835 
00836 #endif