Advertisement
Guest User

Untitled

a guest
Sep 15th, 2019
717
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 34.72 KB | None | 0 0
  1. // atomic standard header
  2. #pragma once
  3. #ifndef _ATOMIC_
  4. #define _ATOMIC_
  5. #ifndef RC_INVOKED
  6. #include <yvals.h>
  7.  
  8.  #ifdef _M_CEE_PURE
  9.   #error <atomic> is not supported when compiling with /clr:pure.
  10.  #endif /* _M_CEE_PURE */
  11.  
  12.  #include <type_traits>
  13.  
  14.  
  15. #include <stddef.h>
  16. #include <stdint.h>
  17. #include <stdlib.h>
  18. #include <limits.h>
  19. #include <xatomic.h>
  20.  
  21.  #pragma pack(push,_CRT_PACKING)
  22.  #pragma warning(push,_STL_WARNING_LEVEL)
  23.  #pragma warning(disable: _STL_DISABLED_WARNINGS)
  24.  #pragma push_macro("new")
  25.  #undef new
  26.  
  27.  #pragma warning(disable: 4522) // multiple assignment operators specified
  28.  
  29. // Every occurrence of __declspec(nothrow) in this file is TRANSITION, VSO#174686
  30.  
  31.         // LOCK-FREE PROPERTY
  32. #define ATOMIC_BOOL_LOCK_FREE       2
  33. #define ATOMIC_CHAR_LOCK_FREE       2
  34. #define ATOMIC_CHAR16_T_LOCK_FREE   2
  35. #define ATOMIC_CHAR32_T_LOCK_FREE   2
  36. #define ATOMIC_WCHAR_T_LOCK_FREE    2
  37. #define ATOMIC_SHORT_LOCK_FREE      2
  38. #define ATOMIC_INT_LOCK_FREE        2
  39. #define ATOMIC_LONG_LOCK_FREE       2
  40. #define ATOMIC_LLONG_LOCK_FREE      2
  41. #define ATOMIC_POINTER_LOCK_FREE    2
  42.  
  43. _STD_BEGIN
  44. inline memory_order _Get_memory_order(memory_order _Order)
  45.     {   // get second memory_order argument for cas functions
  46.         // that take only one memory_order argument
  47.     return (_Order == memory_order_acq_rel ? memory_order_acquire
  48.         : _Order == memory_order_release ? memory_order_relaxed
  49.         : _Order);
  50.     }
  51.  
  52.         // FUNCTION TEMPLATE kill_dependency
  53. template<class _Ty>
  54.     _Ty kill_dependency(_Ty _Arg) _NOEXCEPT
  55.     {   // magic template that kills dependency ordering when called
  56.     return (_Arg);
  57.     }
  58.  
  59.         // GENERAL OPERATIONS ON ATOMIC TYPES (FORWARD DECLARATIONS)
  60. template <class _Ty>
  61.     struct atomic;
  62. template <class _Ty>
  63.     _NODISCARD __declspec(nothrow) bool atomic_is_lock_free(const volatile atomic<_Ty> *);
  64. template <class _Ty>
  65.     _NODISCARD __declspec(nothrow) bool atomic_is_lock_free(const atomic<_Ty> *);
  66. template <class _Ty>
  67.     __declspec(nothrow) void atomic_init(volatile atomic<_Ty> *, _Ty);
  68. template <class _Ty>
  69.     __declspec(nothrow) void atomic_init(atomic<_Ty> *, _Ty);
  70. template <class _Ty>
  71.     __declspec(nothrow) void atomic_store(volatile atomic<_Ty> *, _Ty);
  72. template <class _Ty>
  73.     __declspec(nothrow) void atomic_store(atomic<_Ty> *, _Ty);
  74. template <class _Ty>
  75.     __declspec(nothrow) void atomic_store_explicit(volatile atomic<_Ty> *, _Ty, memory_order);
  76. template <class _Ty>
  77.     __declspec(nothrow) void atomic_store_explicit(atomic<_Ty> *, _Ty, memory_order);
  78. template <class _Ty>
  79.     _NODISCARD __declspec(nothrow) _Ty atomic_load(const volatile atomic<_Ty> *);
  80. template <class _Ty>
  81.     _NODISCARD __declspec(nothrow) _Ty atomic_load(const atomic<_Ty> *);
  82. template <class _Ty>
  83.     _NODISCARD __declspec(nothrow) _Ty atomic_load_explicit(const volatile atomic<_Ty> *, memory_order);
  84. template <class _Ty>
  85.     _NODISCARD __declspec(nothrow) _Ty atomic_load_explicit(const atomic<_Ty> *, memory_order);
  86. template <class _Ty>
  87.     __declspec(nothrow) _Ty atomic_exchange(volatile atomic<_Ty> *, _Ty);
  88. template <class _Ty>
  89.     __declspec(nothrow) _Ty atomic_exchange(atomic<_Ty> *, _Ty);
  90. template <class _Ty>
  91.     __declspec(nothrow) _Ty atomic_exchange_explicit(volatile atomic<_Ty> *, _Ty, memory_order);
  92. template <class _Ty>
  93.     __declspec(nothrow) _Ty atomic_exchange_explicit(atomic<_Ty> *, _Ty, memory_order);
  94. template <class _Ty>
  95.     __declspec(nothrow) bool atomic_compare_exchange_weak(volatile atomic<_Ty> *, _Ty *, _Ty);
  96. template <class _Ty>
  97.     __declspec(nothrow) bool atomic_compare_exchange_weak(atomic<_Ty> *, _Ty *, _Ty);
  98. template <class _Ty>
  99.     __declspec(nothrow) bool atomic_compare_exchange_weak_explicit(volatile atomic<_Ty> *, _Ty *, _Ty,
  100.         memory_order, memory_order);
  101. template <class _Ty>
  102.     __declspec(nothrow) bool atomic_compare_exchange_weak_explicit(atomic<_Ty> *, _Ty *, _Ty,
  103.         memory_order, memory_order);
  104. template <class _Ty>
  105.     __declspec(nothrow) bool atomic_compare_exchange_strong(volatile atomic<_Ty> *, _Ty *, _Ty);
  106. template <class _Ty>
  107.     __declspec(nothrow) bool atomic_compare_exchange_strong(atomic<_Ty> *, _Ty *, _Ty);
  108. template <class _Ty>
  109.     __declspec(nothrow) bool atomic_compare_exchange_strong_explicit(volatile atomic<_Ty> *, _Ty *, _Ty,
  110.         memory_order, memory_order);
  111. template <class _Ty>
  112.     __declspec(nothrow) bool atomic_compare_exchange_strong_explicit(atomic<_Ty> *, _Ty *, _Ty,
  113.         memory_order, memory_order);
  114.  
  115.         // TEMPLATED OPERATIONS ON ATOMIC TYPES (DECLARED BUT NOT DEFINED)
  116. template <class _Ty>
  117.     __declspec(nothrow) _Ty atomic_fetch_add(volatile atomic<_Ty>*, _Ty);
  118. template <class _Ty>
  119.     __declspec(nothrow) _Ty atomic_fetch_add(atomic<_Ty>*, _Ty);
  120. template <class _Ty>
  121.     __declspec(nothrow) _Ty atomic_fetch_add_explicit(volatile atomic<_Ty>*, _Ty, memory_order);
  122. template <class _Ty>
  123.     __declspec(nothrow) _Ty atomic_fetch_add_explicit(atomic<_Ty>*, _Ty, memory_order);
  124. template <class _Ty>
  125.     __declspec(nothrow) _Ty atomic_fetch_sub(volatile atomic<_Ty>*, _Ty);
  126. template <class _Ty>
  127.     __declspec(nothrow) _Ty atomic_fetch_sub(atomic<_Ty>*, _Ty);
  128. template <class _Ty>
  129.     __declspec(nothrow) _Ty atomic_fetch_sub_explicit(volatile atomic<_Ty>*, _Ty, memory_order);
  130. template <class _Ty>
  131.     __declspec(nothrow) _Ty atomic_fetch_sub_explicit(atomic<_Ty>*, _Ty, memory_order);
  132. template <class _Ty>
  133.     __declspec(nothrow) _Ty atomic_fetch_and(volatile atomic<_Ty>*, _Ty);
  134. template <class _Ty>
  135.     __declspec(nothrow) _Ty atomic_fetch_and(atomic<_Ty>*, _Ty);
  136. template <class _Ty>
  137.     __declspec(nothrow) _Ty atomic_fetch_and_explicit(volatile atomic<_Ty>*, _Ty, memory_order);
  138. template <class _Ty>
  139.     __declspec(nothrow) _Ty atomic_fetch_and_explicit(atomic<_Ty>*, _Ty, memory_order);
  140. template <class _Ty>
  141.     __declspec(nothrow) _Ty atomic_fetch_or(volatile atomic<_Ty>*, _Ty);
  142. template <class _Ty>
  143.     __declspec(nothrow) _Ty atomic_fetch_or(atomic<_Ty>*, _Ty);
  144. template <class _Ty>
  145.     __declspec(nothrow) _Ty atomic_fetch_or_explicit(volatile atomic<_Ty>*, _Ty, memory_order);
  146. template <class _Ty>
  147.     __declspec(nothrow) _Ty atomic_fetch_or_explicit(atomic<_Ty>*, _Ty, memory_order);
  148. template <class _Ty>
  149.     __declspec(nothrow) _Ty atomic_fetch_xor(volatile atomic<_Ty>*, _Ty);
  150. template <class _Ty>
  151.     __declspec(nothrow) _Ty atomic_fetch_xor(atomic<_Ty>*, _Ty);
  152. template <class _Ty>
  153.     __declspec(nothrow) _Ty atomic_fetch_xor_explicit(volatile atomic<_Ty>*, _Ty, memory_order);
  154. template <class _Ty>
  155.     __declspec(nothrow) _Ty atomic_fetch_xor_explicit(atomic<_Ty>*, _Ty, memory_order);
  156.  
  157.         // STRUCT atomic_flag
  158. #define ATOMIC_FLAG_INIT    {0}
  159. typedef struct atomic_flag
  160.     {   // structure for managing flag with test-and-set semantics
  161.     __declspec(nothrow) bool test_and_set(memory_order _Order = memory_order_seq_cst) volatile;
  162.     __declspec(nothrow) bool test_and_set(memory_order _Order = memory_order_seq_cst);
  163.     __declspec(nothrow) void clear(memory_order _Order = memory_order_seq_cst) volatile;
  164.     __declspec(nothrow) void clear(memory_order _Order = memory_order_seq_cst);
  165.  
  166.     _Atomic_flag_t _My_flag;
  167.  
  168.     atomic_flag() _NOEXCEPT = default;
  169.     atomic_flag(const atomic_flag&) = delete;
  170.     atomic_flag& operator=(const atomic_flag&) = delete;
  171.     atomic_flag& operator=(const atomic_flag&) volatile = delete;
  172.     } atomic_flag;
  173.  
  174. __declspec(nothrow) inline bool atomic_flag::test_and_set(memory_order _Order) volatile
  175.     {   // atomically set *this to true and return previous value
  176.     return (_Atomic_flag_test_and_set(&_My_flag, _Order));
  177.     }
  178.  
  179. __declspec(nothrow) inline bool atomic_flag::test_and_set(memory_order _Order)
  180.     {   // atomically set *this to true and return previous value
  181.     return (_Atomic_flag_test_and_set(&_My_flag, _Order));
  182.     }
  183.  
  184. __declspec(nothrow) inline void atomic_flag::clear(memory_order _Order) volatile
  185.     {   // atomically clear *this
  186.     _Atomic_flag_clear(&_My_flag, _Order);
  187.     }
  188.  
  189. __declspec(nothrow) inline void atomic_flag::clear(memory_order _Order)
  190.     {   // atomically clear *this
  191.     _Atomic_flag_clear(&_My_flag, _Order);
  192.     }
  193.  
  194. __declspec(nothrow) inline bool atomic_flag_test_and_set(volatile atomic_flag *_Flag)
  195.     {   // atomically set *_Flag to true and return previous value
  196.     return (_Atomic_flag_test_and_set(&_Flag->_My_flag, memory_order_seq_cst));
  197.     }
  198.  
  199. __declspec(nothrow) inline bool atomic_flag_test_and_set(atomic_flag *_Flag)
  200.     {   // atomically set *_Flag to true and return previous value
  201.     return (_Atomic_flag_test_and_set(&_Flag->_My_flag, memory_order_seq_cst));
  202.     }
  203.  
  204. __declspec(nothrow) inline bool atomic_flag_test_and_set_explicit(volatile atomic_flag *_Flag, memory_order _Order)
  205.     {   // atomically set *_Flag to true and return previous value
  206.     return (_Atomic_flag_test_and_set(&_Flag->_My_flag, _Order));
  207.     }
  208.  
  209. __declspec(nothrow) inline bool atomic_flag_test_and_set_explicit(atomic_flag *_Flag, memory_order _Order)
  210.     {   // atomically set *_Flag to true and return previous value
  211.     return (_Atomic_flag_test_and_set(&_Flag->_My_flag, _Order));
  212.     }
  213.  
  214. __declspec(nothrow) inline void atomic_flag_clear(volatile atomic_flag *_Flag)
  215.     {   // atomically clear *_Flag
  216.     _Atomic_flag_clear(&_Flag->_My_flag, memory_order_seq_cst);
  217.     }
  218.  
  219. __declspec(nothrow) inline void atomic_flag_clear(atomic_flag *_Flag)
  220.     {   // atomically clear *_Flag
  221.     _Atomic_flag_clear(&_Flag->_My_flag, memory_order_seq_cst);
  222.     }
  223.  
  224. __declspec(nothrow) inline void atomic_flag_clear_explicit(volatile atomic_flag *_Flag, memory_order _Order)
  225.     {   // atomically clear *_Flag
  226.     _Atomic_flag_clear(&_Flag->_My_flag, _Order);
  227.     }
  228.  
  229. __declspec(nothrow) inline void atomic_flag_clear_explicit(atomic_flag *_Flag, memory_order _Order)
  230.     {   // atomically clear *_Flag
  231.     _Atomic_flag_clear(&_Flag->_My_flag, _Order);
  232.     }
  233.  
  234.         // STRUCT TEMPLATE _Atomic_impl
  235. template<unsigned _Bytes>
  236.     struct _Atomic_impl
  237.     {   // struct for managing locks around operations on atomic types
  238.     typedef _Uint1_t _My_int;   // "1 byte" means "no alignment required"
  239.  
  240. #if defined(__clang__) || defined(__EDG__)
  241.     constexpr _Atomic_impl() noexcept // TRANSITION, LLVM#33235 and TRANSITION, VSO#493135
  242. #else /* defined(__clang__) || defined(__EDG__) */
  243.     __declspec(nothrow) constexpr _Atomic_impl()
  244. #endif /* defined(__clang__) || defined(__EDG__) */
  245.         : _My_flag(0)
  246.         {   // default constructor
  247.         }
  248.  
  249.     bool _Is_lock_free() const volatile
  250.         {   // operations that use locks are not lock-free
  251.         return (false);
  252.         }
  253.  
  254.     void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
  255.         {   // lock and store
  256.         _Atomic_copy(&_My_flag, _Bytes, _Tgt, _Src, _Order);
  257.         }
  258.  
  259.     void _Load(void *_Tgt, const void *_Src,
  260.         memory_order _Order) const volatile
  261.         {   // lock and load
  262.         _Atomic_copy(&_My_flag, _Bytes, _Tgt, _Src, _Order);
  263.         }
  264.  
  265.     void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
  266.         {   // lock and exchange
  267.         _Atomic_exchange(&_My_flag, _Bytes, _Left, _Right, _Order);
  268.         }
  269.  
  270.     bool _Compare_exchange_weak(
  271.         void *_Tgt, void *_Exp, const void *_Value,
  272.         memory_order _Order1, memory_order _Order2) volatile
  273.         {   // lock and compare/exchange
  274.         return (_Atomic_compare_exchange_weak(
  275.             &_My_flag, _Bytes, _Tgt, _Exp, _Value, _Order1, _Order2));
  276.         }
  277.  
  278.     bool _Compare_exchange_strong(
  279.         void *_Tgt, void *_Exp, const void *_Value,
  280.         memory_order _Order1, memory_order _Order2) volatile
  281.         {   // lock and compare/exchange
  282.         return (_Atomic_compare_exchange_strong(
  283.             &_My_flag, _Bytes, _Tgt, _Exp, _Value, _Order1, _Order2));
  284.         }
  285.  
  286. private:
  287.     mutable _Atomic_flag_t _My_flag;
  288.     };
  289.  
  290.         // SPECIALIZATIONS OF _Atomic_impl
  291.  
  292. template<>
  293.     struct _Atomic_impl<1U>
  294.     {   // struct for managing lock-free operations on 1-byte atomic types
  295.     typedef _Uint1_t _My_int;
  296.  
  297.     bool _Is_lock_free() const volatile
  298.         {   // all operations are lock-free
  299.         return (true);
  300.         }
  301.  
  302.     void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
  303.         {   // store
  304.         _Atomic_store_1((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
  305.         }
  306.  
  307.     void _Load(void *_Tgt, const void *_Src,
  308.         memory_order _Order) const volatile
  309.         {   // load
  310.         *(_My_int*)_Tgt = _Atomic_load_1((_My_int*)_Src, _Order);
  311.         }
  312.  
  313.     void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
  314.         {   // exchange
  315.         *(_My_int*)_Right =
  316.             _Atomic_exchange_1((_My_int*)_Left, *(_My_int*)_Right, _Order);
  317.         }
  318.  
  319.     bool _Compare_exchange_weak(
  320.         void *_Tgt, void *_Exp, const void *_Value,
  321.         memory_order _Order1, memory_order _Order2) volatile
  322.         {   // compare/exchange
  323.         return (_Atomic_compare_exchange_weak_1(
  324.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  325.             _Order1, _Order2));
  326.         }
  327.  
  328.     bool _Compare_exchange_strong(
  329.         void *_Tgt, void *_Exp, const void *_Value,
  330.         memory_order _Order1, memory_order _Order2) volatile
  331.         {   // compare/exchange
  332.         return (_Atomic_compare_exchange_strong_1(
  333.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  334.             _Order1, _Order2));
  335.         }
  336.     };
  337.  
  338. template<>
  339.     struct _Atomic_impl<2U>
  340.     {   // struct for managing lock-free operations on 2-byte atomic types
  341.     typedef _Uint2_t _My_int;
  342.  
  343.     bool _Is_lock_free() const volatile
  344.         {   // all operations are lock-free
  345.         return (true);
  346.         }
  347.  
  348.     void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
  349.         {   // store
  350.         _Atomic_store_2((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
  351.         }
  352.  
  353.     void _Load(void *_Tgt, const void *_Src,
  354.         memory_order _Order) const volatile
  355.         {   // load
  356.         *(_My_int*)_Tgt = _Atomic_load_2((_My_int*)_Src, _Order);
  357.         }
  358.  
  359.     void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
  360.         {   // exchange
  361.         *(_My_int*)_Right =
  362.             _Atomic_exchange_2((_My_int*)_Left, *(_My_int*)_Right, _Order);
  363.         }
  364.  
  365.     bool _Compare_exchange_weak(
  366.         void *_Tgt, void *_Exp, const void *_Value,
  367.         memory_order _Order1, memory_order _Order2) volatile
  368.         {   // compare/exchange
  369.         return (_Atomic_compare_exchange_weak_2(
  370.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  371.             _Order1, _Order2));
  372.         }
  373.  
  374.     bool _Compare_exchange_strong(
  375.         void *_Tgt, void *_Exp, const void *_Value,
  376.         memory_order _Order1, memory_order _Order2) volatile
  377.         {   // compare/exchange
  378.         return (_Atomic_compare_exchange_strong_2(
  379.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  380.             _Order1, _Order2));
  381.         }
  382.     };
  383.  
  384. template<>
  385.     struct _Atomic_impl<4U>
  386.     {   // struct for managing lock-free operations on 4-byte atomic types
  387.     typedef _Uint4_t _My_int;
  388.  
  389.     bool _Is_lock_free() const volatile
  390.         {   // all operations are lock-free
  391.         return (true);
  392.         }
  393.  
  394.     void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
  395.         {   // store
  396.         _Atomic_store_4((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
  397.         }
  398.  
  399.     void _Load(void *_Tgt, const void *_Src,
  400.         memory_order _Order) const volatile
  401.         {   // load
  402.         *(_My_int*)_Tgt = _Atomic_load_4((_My_int*)_Src, _Order);
  403.         }
  404.  
  405.     void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
  406.         {   // exchange
  407.         *(_My_int*)_Right =
  408.             _Atomic_exchange_4((_My_int*)_Left, *(_My_int*)_Right, _Order);
  409.         }
  410.  
  411.     bool _Compare_exchange_weak(
  412.         void *_Tgt, void *_Exp, const void *_Value,
  413.         memory_order _Order1, memory_order _Order2) volatile
  414.         {   // compare/exchange
  415.         return (_Atomic_compare_exchange_weak_4(
  416.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  417.             _Order1, _Order2));
  418.         }
  419.  
  420.     bool _Compare_exchange_strong(
  421.         void *_Tgt, void *_Exp, const void *_Value,
  422.         memory_order _Order1, memory_order _Order2) volatile
  423.         {   // compare/exchange
  424.         return (_Atomic_compare_exchange_strong_4(
  425.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  426.             _Order1, _Order2));
  427.         }
  428.     };
  429.  
  430. template<>
  431.     struct _Atomic_impl<8U>
  432.     {   // struct for managing lock-free operations on 8-byte atomic types
  433.     typedef _Uint8_t _My_int;
  434.  
  435.     bool _Is_lock_free() const volatile
  436.         {   // all operations are lock-free
  437.         return (true);
  438.         }
  439.  
  440.     void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
  441.         {   // store
  442.         _Atomic_store_8((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
  443.         }
  444.  
  445.     void _Load(void *_Tgt, const void *_Src,
  446.         memory_order _Order) const volatile
  447.         {   // load
  448.         *(_My_int*)_Tgt = _Atomic_load_8((_My_int*)_Src, _Order);
  449.         }
  450.  
  451.     void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
  452.         {   // exchange
  453.         *(_My_int*)_Right =
  454.             _Atomic_exchange_8((_My_int*)_Left, *(_My_int*)_Right, _Order);
  455.         }
  456.  
  457.     bool _Compare_exchange_weak(
  458.         void *_Tgt, void *_Exp, const void *_Value,
  459.         memory_order _Order1, memory_order _Order2) volatile
  460.         {   // compare/exchange
  461.         return (_Atomic_compare_exchange_weak_8(
  462.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  463.             _Order1, _Order2));
  464.         }
  465.  
  466.     bool _Compare_exchange_strong(
  467.         void *_Tgt, void *_Exp, const void *_Value,
  468.         memory_order _Order1, memory_order _Order2) volatile
  469.         {   // compare/exchange
  470.         return (_Atomic_compare_exchange_strong_8(
  471.             (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
  472.             _Order1, _Order2));
  473.         }
  474.     };
  475.  
  476.         // STRUCT TEMPLATE _Atomic_base
  477. template<class _Ty,
  478.     unsigned _Bytes>
  479.     struct _Atomic_base
  480.         : _Atomic_impl<_Bytes>
  481.     {   // struct that defines most member functions of std::atomic
  482.     typedef _Atomic_impl<_Bytes> _Mybase;
  483.     typedef typename _Mybase::_My_int _My_int;
  484.  
  485.     __declspec(nothrow) constexpr _Atomic_base(_Ty _Val)
  486.         : _My_val(_Val)
  487.         {   // construct from _Val, initialization is not an atomic operation
  488.         }
  489.  
  490.     _Atomic_base() _NOEXCEPT = default;
  491.     _Atomic_base(const _Atomic_base&) = delete;
  492.     _Atomic_base& operator=(const _Atomic_base&) = delete;
  493.     _Atomic_base& operator=(const _Atomic_base&) volatile = delete;
  494.  
  495.     __declspec(nothrow) _Ty operator=(_Ty _Right) volatile
  496.         {   // assign from _Right
  497.         this->_Store((void *)_STD addressof(_My_val), _STD addressof(_Right), memory_order_seq_cst);
  498.         return (_Right);
  499.         }
  500.  
  501.     __declspec(nothrow) _Ty operator=(_Ty _Right)
  502.         {   // assign from _Right
  503.         this->_Store(_STD addressof(_My_val), _STD addressof(_Right), memory_order_seq_cst);
  504.         return (_Right);
  505.         }
  506.  
  507.     _NODISCARD __declspec(nothrow) bool is_lock_free() const volatile
  508.         {   // return true if operations are lock-free
  509.         return (this->_Is_lock_free());
  510.         }
  511.  
  512.     _NODISCARD __declspec(nothrow) bool is_lock_free() const
  513.         {   // return true if operations are lock-free
  514.         return (this->_Is_lock_free());
  515.         }
  516.  
  517.     __declspec(nothrow) void store(_Ty _Value, memory_order _Order = memory_order_seq_cst) volatile
  518.         {   // store _Value into *this
  519.         this->_Store((void *)_STD addressof(_My_val), _STD addressof(_Value), _Order);
  520.         }
  521.  
  522.     __declspec(nothrow) void store(_Ty _Value, memory_order _Order = memory_order_seq_cst)
  523.         {   // store _Value into *this
  524.         this->_Store(_STD addressof(_My_val), _STD addressof(_Value), _Order);
  525.         }
  526.  
  527.     _NODISCARD __declspec(nothrow) _Ty load(memory_order _Order = memory_order_seq_cst) const volatile
  528.         {   // return value stored in *this
  529.         aligned_union_t<1, _Ty> _Result; // TRANSITION, VSO#456881
  530.         this->_Load(&_Result, (const void *)_STD addressof(_My_val), _Order);
  531.         return (reinterpret_cast<_Ty&>(_Result));
  532.         }
  533.  
  534.     _NODISCARD __declspec(nothrow) _Ty load(memory_order _Order = memory_order_seq_cst) const
  535.         {   // return value stored in *this
  536.         aligned_union_t<1, _Ty> _Result; // TRANSITION, VSO#456881
  537.         this->_Load(&_Result, _STD addressof(_My_val), _Order);
  538.         return (reinterpret_cast<_Ty&>(_Result));
  539.         }
  540.  
  541.     __declspec(nothrow) operator _Ty() const volatile
  542.         {   // return value stored in *this
  543.         return (load());
  544.         }
  545.  
  546.     __declspec(nothrow) operator _Ty() const
  547.         {   // return value stored in *this
  548.         return (load());
  549.         }
  550.  
  551.     __declspec(nothrow) _Ty exchange(_Ty _Value, memory_order _Order = memory_order_seq_cst) volatile
  552.         {   // exchange value stored in *this with _Value
  553.         this->_Exchange((void *)_STD addressof(_My_val), _STD addressof(_Value), _Order);
  554.         return (_Value);
  555.         }
  556.  
  557.     __declspec(nothrow) _Ty exchange(_Ty _Value, memory_order _Order = memory_order_seq_cst)
  558.         {   // exchange value stored in *this with _Value
  559.         this->_Exchange(_STD addressof(_My_val), _STD addressof(_Value), _Order);
  560.         return (_Value);
  561.         }
  562.  
  563.     __declspec(nothrow) bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
  564.         memory_order _Order1, memory_order _Order2) volatile
  565.         {   // compare and exchange value stored in *this with *_Exp, _Value
  566.         return (this->_Compare_exchange_weak(
  567.             (void *)_STD addressof(_My_val), _STD addressof(_Exp), _STD addressof(_Value),
  568.                 _Order1, _Order2));
  569.         }
  570.  
  571.     __declspec(nothrow) bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
  572.         memory_order _Order1, memory_order _Order2)
  573.         {   // compare and exchange value stored in *this with *_Exp, _Value
  574.         return (this->_Compare_exchange_weak(
  575.             _STD addressof(_My_val), _STD addressof(_Exp), _STD addressof(_Value),
  576.                 _Order1, _Order2));
  577.         }
  578.  
  579.     __declspec(nothrow) bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
  580.         memory_order _Order = memory_order_seq_cst) volatile
  581.         {   // compare and exchange value stored in *this with *_Exp, _Value
  582.         return (compare_exchange_weak(
  583.             _Exp, _Value, _Order, _Get_memory_order(_Order)));
  584.         }
  585.  
  586.     __declspec(nothrow) bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
  587.         memory_order _Order = memory_order_seq_cst)
  588.         {   // compare and exchange value stored in *this with *_Exp, _Value
  589.         return (compare_exchange_weak(
  590.             _Exp, _Value, _Order, _Get_memory_order(_Order)));
  591.         }
  592.  
  593.     __declspec(nothrow) bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
  594.         memory_order _Order1, memory_order _Order2) volatile
  595.         {   // compare and exchange value stored in *this with *_Exp, _Value
  596.         return (this->_Compare_exchange_strong(
  597.             (void *)_STD addressof(_My_val), _STD addressof(_Exp), _STD addressof(_Value),
  598.                 _Order1, _Order2));
  599.         }
  600.  
  601.     __declspec(nothrow) bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
  602.         memory_order _Order1, memory_order _Order2)
  603.         {   // compare and exchange value stored in *this with *_Exp, _Value
  604.         return (this->_Compare_exchange_strong(
  605.             _STD addressof(_My_val), _STD addressof(_Exp), _STD addressof(_Value),
  606.                 _Order1, _Order2));
  607.         }
  608.  
  609.     __declspec(nothrow) bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
  610.         memory_order _Order = memory_order_seq_cst) volatile
  611.         {   // compare and exchange value stored in *this with *_Exp, _Value
  612.         return (compare_exchange_strong(
  613.             _Exp, _Value, _Order, _Get_memory_order(_Order)));
  614.         }
  615.  
  616.     __declspec(nothrow) bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
  617.         memory_order _Order = memory_order_seq_cst)
  618.         {   // compare and exchange value stored in *this with *_Exp, _Value
  619.         return (compare_exchange_strong(
  620.             _Exp, _Value, _Order, _Get_memory_order(_Order)));
  621.         }
  622.  
  623. #ifndef _ENABLE_ATOMIC_ALIGNMENT_FIX
  624.     static_assert(alignof(_Ty) >= sizeof(_My_int),
  625.         "You've instantiated std::atomic<T> with sizeof(T) equal to 2/4/8 and alignof(T) < sizeof(T). "
  626.         "Before VS 2015 Update 2, this would have misbehaved at runtime. "
  627.         "VS 2015 Update 2 was fixed to handle this correctly, "
  628.         "but the fix inherently changes layout and breaks binary compatibility. "
  629.         "Please define _ENABLE_ATOMIC_ALIGNMENT_FIX to acknowledge that you understand this, "
  630.         "and that everything you're linking has been compiled with VS 2015 Update 2 (or later).");
  631. #endif /* _ENABLE_ATOMIC_ALIGNMENT_FIX */
  632.  
  633.     alignas(sizeof(_My_int)) alignas(_Ty) _Ty _My_val;
  634.     };
  635.  
  636.         // STRUCT TEMPLATE atomic
  637. template<class _Ty>
  638.     struct atomic
  639.         : _Atomic_base<_Ty, sizeof (_Ty)>
  640.     {   // template that manages values of _Ty atomically
  641.     static_assert(is_trivially_copyable_v<_Ty>,
  642.         "atomic<T> requires T to be trivially copyable.");
  643.  
  644.     typedef _Atomic_base<_Ty, sizeof (_Ty)> _My_base;
  645.  
  646.     using value_type = _Ty;
  647.  
  648. #if _HAS_CXX17
  649.     static constexpr bool is_always_lock_free =
  650.         sizeof(_Ty) == 1
  651.         || sizeof(_Ty) == 2
  652.         || sizeof(_Ty) == 4
  653.         || sizeof(_Ty) == 8;
  654.  
  655.     static constexpr bool _Is_usually_lock_free = is_always_lock_free;
  656. #endif /* _HAS_CXX17 */
  657.  
  658.     atomic() _NOEXCEPT = default;
  659.     atomic(const atomic&) = delete;
  660.     atomic& operator=(const atomic&) = delete;
  661.     atomic& operator=(const atomic&) volatile = delete;
  662.  
  663.     __declspec(nothrow) constexpr atomic(_Ty _Val)
  664.         : _My_base(_Val)
  665.         {   // construct from _Val
  666.         }
  667.  
  668.     __declspec(nothrow) _Ty operator=(_Ty _Right) volatile
  669.         {   // assign from _Right
  670.         return (_My_base::operator=(_Right));
  671.         }
  672.  
  673.     __declspec(nothrow) _Ty operator=(_Ty _Right)
  674.         {   // assign from _Right
  675.         return (_My_base::operator=(_Right));
  676.         }
  677.     };
  678. #define ATOMIC_VAR_INIT(_Val)   { _Val }
  679. #define _ATOMIC_ITYPE   _Atomic_bool
  680. #define _ITYPE  bool
  681. #define _ISIZE  1
  682. #include <xxatomic>
  683.  
  684. #define _ATOMIC_ITYPE   _Atomic_char
  685. #define _ITYPE  char
  686. #define _ISIZE  1
  687. #define _ATOMIC_HAS_ARITHMETIC_OPS
  688. #include <xxatomic>
  689.  
  690. #define _ATOMIC_ITYPE   _Atomic_schar
  691. #define _ITYPE signed char
  692. #define _ISIZE 1
  693. #define _ATOMIC_HAS_ARITHMETIC_OPS
  694. #include <xxatomic>
  695.  
  696. #define _ATOMIC_ITYPE   _Atomic_uchar
  697. #define _ITYPE unsigned char
  698. #define _ISIZE 1
  699. #define _ATOMIC_HAS_ARITHMETIC_OPS
  700. #include <xxatomic>
  701.  
  702. #define _ATOMIC_ITYPE   _Atomic_char16_t
  703. #define _ITYPE char16_t
  704. #define _ISIZE 2
  705. #define _ATOMIC_HAS_ARITHMETIC_OPS
  706. #include <xxatomic>
  707.  
  708. #define _ATOMIC_ITYPE   _Atomic_char32_t
  709. #define _ITYPE char32_t
  710. #define _ISIZE 4
  711. #define _ATOMIC_HAS_ARITHMETIC_OPS
  712. #include <xxatomic>
  713.  
  714. #define _ATOMIC_ITYPE   _Atomic_wchar_t
  715. #define _ITYPE wchar_t
  716. #define _ISIZE _WCHAR_T_SIZE
  717. #define _ATOMIC_HAS_ARITHMETIC_OPS
  718.  
  719.  #ifndef _NATIVE_WCHAR_T_DEFINED
  720.  #define _ATOMIC_HAS_NO_SPECIALIZATION
  721.  #endif /* _NATIVE_WCHAR_T_DEFINED */
  722.  
  723. #include <xxatomic>
  724.  
  725. #define _ATOMIC_ITYPE   _Atomic_short
  726. #define _ITYPE short
  727. #define _ISIZE _SHORT_SIZE
  728. #define _ATOMIC_HAS_ARITHMETIC_OPS
  729. #include <xxatomic>
  730.  
  731. #define _ATOMIC_ITYPE   _Atomic_ushort
  732. #define _ITYPE unsigned short
  733. #define _ISIZE _SHORT_SIZE
  734. #define _ATOMIC_HAS_ARITHMETIC_OPS
  735. #include <xxatomic>
  736.  
  737. #define _ATOMIC_ITYPE   _Atomic_int
  738. #define _ITYPE int
  739. #define _ISIZE _INT_SIZE
  740. #define _ATOMIC_HAS_ARITHMETIC_OPS
  741. #include <xxatomic>
  742.  
  743. #define _ATOMIC_ITYPE   _Atomic_uint
  744. #define _ITYPE unsigned int
  745. #define _ISIZE _INT_SIZE
  746. #define _ATOMIC_HAS_ARITHMETIC_OPS
  747. #include <xxatomic>
  748.  
  749. #define _ATOMIC_ITYPE   _Atomic_long
  750. #define _ITYPE long
  751. #define _ISIZE _LONG_SIZE
  752. #define _ATOMIC_HAS_ARITHMETIC_OPS
  753. #include <xxatomic>
  754.  
  755. #define _ATOMIC_ITYPE   _Atomic_ulong
  756. #define _ITYPE unsigned long
  757. #define _ISIZE _LONG_SIZE
  758. #define _ATOMIC_HAS_ARITHMETIC_OPS
  759. #include <xxatomic>
  760.  
  761. #define _ATOMIC_ITYPE   _Atomic_llong
  762. #define _ITYPE long long
  763. #define _ISIZE _LONGLONG_SIZE
  764. #define _ATOMIC_HAS_ARITHMETIC_OPS
  765. #include <xxatomic>
  766.  
  767. #define _ATOMIC_ITYPE   _Atomic_ullong
  768. #define _ITYPE unsigned long long
  769. #define _ISIZE _LONGLONG_SIZE
  770. #define _ATOMIC_HAS_ARITHMETIC_OPS
  771. #include <xxatomic>
  772.  
  773. #define _ATOMIC_ITYPE   _Atomic_address
  774. #define _ITYPE void *
  775. #define _ISIZE _ADDR_SIZE
  776. #define _ATOMIC_IS_ADDRESS_TYPE
  777. #define _ATOMIC_HAS_NO_SPECIALIZATION
  778. #include <xxatomic>
  779.  
  780.         // GENERAL OPERATIONS ON ATOMIC TYPES
  781. template <class _Ty>
  782.     _NODISCARD __declspec(nothrow) inline bool atomic_is_lock_free(const volatile atomic<_Ty> *_Atom)
  783.     {
  784.     return (_Atom->is_lock_free());
  785.     }
  786.  
  787. template <class _Ty>
  788.     _NODISCARD __declspec(nothrow) inline bool atomic_is_lock_free(const atomic<_Ty> *_Atom)
  789.     {
  790.     return (_Atom->is_lock_free());
  791.     }
  792.  
  793. template <class _Ty>
  794.     __declspec(nothrow) inline void atomic_init(volatile atomic<_Ty> *_Atom, _Ty _Value)
  795.     {
  796.     *_Atom = _Value;
  797.     }
  798.  
  799. template <class _Ty>
  800.     __declspec(nothrow) inline void atomic_init(atomic<_Ty> *_Atom, _Ty _Value)
  801.     {
  802.     *_Atom = _Value;
  803.     }
  804.  
  805. template <class _Ty>
  806.     __declspec(nothrow) inline void atomic_store(volatile atomic<_Ty> *_Atom, _Ty _Value)
  807.     {
  808.     _Atom->store(_Value);
  809.     }
  810.  
  811. template <class _Ty>
  812.     __declspec(nothrow) inline void atomic_store(atomic<_Ty> *_Atom, _Ty _Value)
  813.     {
  814.     _Atom->store(_Value);
  815.     }
  816.  
  817. template <class _Ty>
  818.     __declspec(nothrow) inline void atomic_store_explicit(volatile atomic<_Ty> *_Atom, _Ty _Value,
  819.         memory_order _Order)
  820.     {
  821.     _Atom->store(_Value, _Order);
  822.     }
  823.  
  824. template <class _Ty>
  825.     __declspec(nothrow) inline void atomic_store_explicit(atomic<_Ty> *_Atom, _Ty _Value, memory_order _Order)
  826.     {
  827.     _Atom->store(_Value, _Order);
  828.     }
  829.  
  830. template <class _Ty>
  831.     _NODISCARD __declspec(nothrow) inline _Ty atomic_load(const volatile atomic<_Ty> *_Atom)
  832.     {
  833.     return (_Atom->load());
  834.     }
  835.  
  836. template <class _Ty>
  837.     _NODISCARD __declspec(nothrow) inline _Ty atomic_load(const atomic<_Ty> *_Atom)
  838.     {
  839.     return (_Atom->load());
  840.     }
  841.  
  842. template <class _Ty>
  843.     _NODISCARD __declspec(nothrow) inline _Ty atomic_load_explicit(const volatile atomic<_Ty> *_Atom,
  844.         memory_order _Order)
  845.     {
  846.     return (_Atom->load(_Order));
  847.     }
  848.  
  849. template <class _Ty>
  850.     _NODISCARD __declspec(nothrow) inline _Ty atomic_load_explicit(const atomic<_Ty> *_Atom, memory_order _Order)
  851.     {
  852.     return (_Atom->load(_Order));
  853.     }
  854.  
  855. template <class _Ty>
  856.     __declspec(nothrow) inline _Ty atomic_exchange(volatile atomic<_Ty> *_Atom, _Ty _Value)
  857.     {
  858.     return (_Atom->exchange(_Value));
  859.     }
  860.  
  861. template <class _Ty>
  862.     __declspec(nothrow) inline _Ty atomic_exchange(atomic<_Ty> *_Atom, _Ty _Value)
  863.     {
  864.     return (_Atom->exchange(_Value));
  865.     }
  866.  
  867. template <class _Ty>
  868.     __declspec(nothrow) inline _Ty atomic_exchange_explicit(volatile atomic<_Ty> *_Atom, _Ty _Value,
  869.         memory_order _Order)
  870.     {
  871.     return (_Atom->exchange(_Value, _Order));
  872.     }
  873.  
  874. template <class _Ty>
  875.     __declspec(nothrow) inline _Ty atomic_exchange_explicit(atomic<_Ty> *_Atom, _Ty _Value,
  876.         memory_order _Order)
  877.     {
  878.     return (_Atom->exchange(_Value, _Order));
  879.     }
  880.  
  881. template <class _Ty>
  882.     __declspec(nothrow) inline bool atomic_compare_exchange_weak(
  883.         volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value)
  884.     {
  885.     return (_Atom->compare_exchange_weak(*_Exp, _Value));
  886.     }
  887.  
  888. template <class _Ty>
  889.     __declspec(nothrow) inline bool atomic_compare_exchange_weak(
  890.         atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value)
  891.     {
  892.     return (_Atom->compare_exchange_weak(*_Exp, _Value));
  893.     }
  894.  
  895. template <class _Ty>
  896.     __declspec(nothrow) inline bool atomic_compare_exchange_weak_explicit(
  897.         volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
  898.         memory_order _Order1, memory_order _Order2)
  899.     {
  900.     return (_Atom->compare_exchange_weak(*_Exp, _Value, _Order1, _Order2));
  901.     }
  902.  
  903. template <class _Ty>
  904.     __declspec(nothrow) inline bool atomic_compare_exchange_weak_explicit(
  905.         atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
  906.         memory_order _Order1, memory_order _Order2)
  907.     {
  908.     return (_Atom->compare_exchange_weak(*_Exp, _Value, _Order1, _Order2));
  909.     }
  910.  
  911. template <class _Ty>
  912.     __declspec(nothrow) inline bool atomic_compare_exchange_strong(
  913.         volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value)
  914.     {
  915.     return (_Atom->compare_exchange_strong(*_Exp, _Value));
  916.     }
  917.  
  918. template <class _Ty>
  919.     __declspec(nothrow) inline bool atomic_compare_exchange_strong(
  920.         atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value)
  921.     {
  922.     return (_Atom->compare_exchange_strong(*_Exp, _Value));
  923.     }
  924.  
  925. template <class _Ty>
  926.     __declspec(nothrow) inline bool atomic_compare_exchange_strong_explicit(
  927.         volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
  928.         memory_order _Order1, memory_order _Order2)
  929.     {
  930.     return (_Atom->compare_exchange_strong(*_Exp, _Value, _Order1, _Order2));
  931.     }
  932.  
  933. template <class _Ty>
  934.     __declspec(nothrow) inline bool atomic_compare_exchange_strong_explicit(
  935.         atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
  936.         memory_order _Order1, memory_order _Order2)
  937.     {
  938.     return (_Atom->compare_exchange_strong(*_Exp, _Value, _Order1, _Order2));
  939.     }
  940.  
  941.         // OVERLOADS FOR POINTERS
  942. template <class _Ty>
  943.     __declspec(nothrow) inline _Ty *atomic_fetch_add(volatile atomic<_Ty*> *_Atom, ptrdiff_t _Value)
  944.     {
  945.     return (_Atom->fetch_add(_Value));
  946.     }
  947.  
  948. template <class _Ty>
  949.     __declspec(nothrow) inline _Ty *atomic_fetch_add(atomic<_Ty*> *_Atom, ptrdiff_t _Value)
  950.     {
  951.     return (_Atom->fetch_add(_Value));
  952.     }
  953.  
  954. template <class _Ty>
  955.     __declspec(nothrow) inline _Ty *atomic_fetch_add_explicit(volatile atomic<_Ty*> *_Atom, ptrdiff_t _Value,
  956.         memory_order _Order)
  957.     {
  958.     return (_Atom->fetch_add(_Value, _Order));
  959.     }
  960.  
  961. template <class _Ty>
  962.     __declspec(nothrow) inline _Ty *atomic_fetch_add_explicit(atomic<_Ty*> *_Atom, ptrdiff_t _Value,
  963.         memory_order _Order)
  964.     {
  965.     return (_Atom->fetch_add(_Value, _Order));
  966.     }
  967.  
  968. template <class _Ty>
  969.     __declspec(nothrow) inline _Ty *atomic_fetch_sub(volatile atomic<_Ty*> *_Atom, ptrdiff_t _Value)
  970.     {
  971.     return (_Atom->fetch_sub(_Value));
  972.     }
  973.  
  974. template <class _Ty>
  975.     __declspec(nothrow) inline _Ty *atomic_fetch_sub(atomic<_Ty*> *_Atom, ptrdiff_t _Value)
  976.     {
  977.     return (_Atom->fetch_sub(_Value));
  978.     }
  979.  
  980. template <class _Ty>
  981.     __declspec(nothrow) inline _Ty *atomic_fetch_sub_explicit(volatile atomic<_Ty*> *_Atom, ptrdiff_t _Value,
  982.         memory_order _Order)
  983.     {
  984.     return (_Atom->fetch_sub(_Value, _Order));
  985.     }
  986.  
  987. template <class _Ty>
  988.     __declspec(nothrow) inline _Ty *atomic_fetch_sub_explicit(atomic<_Ty*> *_Atom, ptrdiff_t _Value,
  989.         memory_order _Order)
  990.     {
  991.     return (_Atom->fetch_sub(_Value, _Order));
  992.     }
  993.  
  994.         // FENCES
  995. extern "C" __declspec(nothrow) inline void atomic_thread_fence(memory_order _Order)
  996.     {   // force memory visibility and inhibit compiler reordering
  997.     return (_Atomic_thread_fence(_Order));
  998.     }
  999.  
  1000. extern "C" __declspec(nothrow) inline void atomic_signal_fence(memory_order _Order)
  1001.     {   // force memory visibility and inhibit compiler reordering
  1002.     return (_Atomic_signal_fence(_Order));
  1003.     }
  1004.  
  1005.         // ATOMIC TYPEDEFS
  1006. typedef atomic<bool> atomic_bool;
  1007.  
  1008. typedef atomic<char> atomic_char;
  1009. typedef atomic<signed char> atomic_schar;
  1010. typedef atomic<unsigned char> atomic_uchar;
  1011. typedef atomic<short> atomic_short;
  1012. typedef atomic<unsigned short> atomic_ushort;
  1013. typedef atomic<int> atomic_int;
  1014. typedef atomic<unsigned int> atomic_uint;
  1015. typedef atomic<long> atomic_long;
  1016. typedef atomic<unsigned long> atomic_ulong;
  1017. typedef atomic<long long> atomic_llong;
  1018. typedef atomic<unsigned long long> atomic_ullong;
  1019.  
  1020. typedef atomic<char16_t> atomic_char16_t;
  1021. typedef atomic<char32_t> atomic_char32_t;
  1022.  
  1023. typedef atomic<wchar_t> atomic_wchar_t;
  1024.  
  1025. typedef atomic<int8_t> atomic_int8_t;
  1026. typedef atomic<uint8_t> atomic_uint8_t;
  1027. typedef atomic<int16_t> atomic_int16_t;
  1028. typedef atomic<uint16_t> atomic_uint16_t;
  1029. typedef atomic<int32_t> atomic_int32_t;
  1030. typedef atomic<uint32_t> atomic_uint32_t;
  1031. typedef atomic<int64_t> atomic_int64_t;
  1032. typedef atomic<uint64_t> atomic_uint64_t;
  1033.  
  1034. typedef atomic<int_least8_t> atomic_int_least8_t;
  1035. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  1036. typedef atomic<int_least16_t> atomic_int_least16_t;
  1037. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  1038. typedef atomic<int_least32_t> atomic_int_least32_t;
  1039. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  1040. typedef atomic<int_least64_t> atomic_int_least64_t;
  1041. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  1042.  
  1043. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  1044. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  1045. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  1046. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  1047. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  1048. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  1049. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  1050. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  1051.  
  1052. typedef atomic<intptr_t> atomic_intptr_t;
  1053. typedef atomic<uintptr_t> atomic_uintptr_t;
  1054. typedef atomic<size_t> atomic_size_t;
  1055. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  1056. typedef atomic<intmax_t> atomic_intmax_t;
  1057. typedef atomic<uintmax_t> atomic_uintmax_t;
  1058. _STD_END
  1059.  #pragma pop_macro("new")
  1060.  #pragma warning(pop)
  1061.  #pragma pack(pop)
  1062. #endif /* RC_INVOKED */
  1063. #endif /* _ATOMIC_ */
  1064.  
  1065. /*
  1066.  * Copyright (c) by P.J. Plauger. All rights reserved.
  1067.  * Consult your license regarding permissions and restrictions.
  1068. V6.50:0009 */
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement