/* Copyright (c) 2019-2024 Griefer@Work                                       *
 *                                                                            *
 * This software is provided 'as-is', without any express or implied          *
 * warranty. In no event will the authors be held liable for any damages      *
 * arising from the use of this software.                                     *
 *                                                                            *
 * Permission is granted to anyone to use this software for any purpose,      *
 * including commercial applications, and to alter it and redistribute it     *
 * freely, subject to the following restrictions:                             *
 *                                                                            *
 * 1. The origin of this software must not be misrepresented; you must not    *
 *    claim that you wrote the original software. If you use this software    *
 *    in a product, an acknowledgement (see the following) in the product     *
 *    documentation is required:                                              *
 *    Portions Copyright (c) 2019-2024 Griefer@Work                           *
 * 2. Altered source versions must be plainly marked as such, and must not be *
 *    misrepresented as being the original software.                          *
 * 3. This notice may not be removed or altered from any source distribution. *
 */
/* (>) Standard: ISO C++11 (ISO/IEC 14882:2011) */
/* (#) Portability: MSVC      (/include/atomic) */
/* (#) Portability: libstdc++ (/include/atomic) */
#ifndef _CXX_ATOMIC
#define _CXX_ATOMIC 1

#include <__stdcxx.h>

#include <hybrid/__atomic.h>
#include <hybrid/typecore.h>

#ifdef __COMPILER_HAVE_PRAGMA_PUSHMACRO
#pragma push_macro("atomic")
#endif /* __COMPILER_HAVE_PRAGMA_PUSHMACRO */
#undef atomic
__CXXDECL_BEGIN

#ifndef __char16_t_defined
#define __char16_t_defined
typedef __CHAR16_TYPE__ char16_t;
typedef __CHAR32_TYPE__ char32_t;
#endif /* !__char16_t_defined */

#define __PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE_STD(T)                                                                        \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_weak(T &__exp, T __val,                                                                                \
	                      __NAMESPACE_STD_SYM memory_order __succ,                                                          \
	                      __NAMESPACE_STD_SYM memory_order __fail) __CXX_NOEXCEPT {                                         \
		bool __result = __hybrid_atomic_cmpxch_weak(&__m_val, __val, __exp, __succ, __fail);                                \
		if (!__result)                                                                                                      \
			__exp = __hybrid_atomic_load(&__m_val, __ATOMIC_ACQUIRE);                                                       \
		return __result;                                                                                                    \
	}                                                                                                                       \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_weak(T &__exp, T __val,                                                                                \
	                      __NAMESPACE_STD_SYM memory_order __succ,                                                          \
	                      __NAMESPACE_STD_SYM memory_order __fail) volatile __CXX_NOEXCEPT {                                \
		bool __result = __hybrid_atomic_cmpxch_weak(&__m_val, __val, __exp, __succ, __fail);                                \
		if (!__result)                                                                                                      \
			__exp = __hybrid_atomic_load(&__m_val, __ATOMIC_ACQUIRE);                                                       \
		return __result;                                                                                                    \
	}                                                                                                                       \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_weak(T &__exp, T __val,                                                                                \
	                      __NAMESPACE_STD_SYM memory_order __order                                                          \
	                      __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {                                 \
		return compare_exchange_weak(__exp, __val, __order,                                                                 \
		                             __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                  \
	}                                                                                                                       \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_weak(T &__exp, T __val,                                                                                \
	                      __NAMESPACE_STD_SYM memory_order __order                                                          \
	                      __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {                        \
		return compare_exchange_weak(__exp, __val, __order,                                                                 \
		                             __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                  \
	}                                                                                                                       \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_strong(T &__exp, T __val,                                                                              \
	                        __NAMESPACE_STD_SYM memory_order __succ,                                                        \
	                        __NAMESPACE_STD_SYM memory_order __fail) __CXX_NOEXCEPT {                                       \
		T __oldval;                                                                                                         \
		bool __result;                                                                                                      \
		__oldval = __hybrid_atomic_cmpxch_val(&__m_val,                                                                     \
		                                      __val, __exp,                                                                 \
		                                      __succ, __fail);                                                              \
		__result = __exp == __oldval;                                                                                       \
		__exp    = __oldval;                                                                                                \
		return __result;                                                                                                    \
	}                                                                                                                       \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_strong(T &__exp, T __val,                                                                              \
	                        __NAMESPACE_STD_SYM memory_order __succ,                                                        \
	                        __NAMESPACE_STD_SYM memory_order __fail) volatile __CXX_NOEXCEPT {                              \
		T __oldval;                                                                                                         \
		bool __result;                                                                                                      \
		__oldval = __hybrid_atomic_cmpxch_val(&__m_val,                                                                     \
		                                      __val, __exp,                                                                 \
		                                      __succ, __fail);                                                              \
		__result = __exp == __oldval;                                                                                       \
		__exp    = __oldval;                                                                                                \
		return __result;                                                                                                    \
	}                                                                                                                       \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_strong(T &__exp, T __val,                                                                              \
	                        __NAMESPACE_STD_SYM memory_order __order                                                        \
	                        __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {                               \
		return compare_exchange_strong(__exp, __val, __order,                                                               \
		                               __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                \
	}                                                                                                                       \
	__CXX_CLASSMEMBER bool                                                                                                  \
	compare_exchange_strong(T &__exp, T __val,                                                                              \
	                        __NAMESPACE_STD_SYM memory_order __order                                                        \
	                        __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {                      \
		return compare_exchange_strong(__exp, __val, __order,                                                               \
		                               __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                \
	}

/* KOS Extensions. */
#ifdef __USE_KOS
#define __PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE(T)                                                                    \
	__PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE_STD(T)                                                                    \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch(T __oldv, T __newv,                                                                                      \
	       __NAMESPACE_STD_SYM memory_order __succ,                                                                 \
	       __NAMESPACE_STD_SYM memory_order __fail) __CXX_NOEXCEPT {                                                \
		return __hybrid_atomic_cmpxch(&__m_val,                                                                     \
		                              __oldv, __newv,                                                               \
		                              __succ, __fail);                                                              \
	}                                                                                                               \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch(T __oldv, T __newv,                                                                                      \
	       __NAMESPACE_STD_SYM memory_order __succ,                                                                 \
	       __NAMESPACE_STD_SYM memory_order __fail) volatile __CXX_NOEXCEPT {                                       \
		return __hybrid_atomic_cmpxch(&__m_val,                                                                     \
		                              __oldv, __newv,                                                               \
		                              __succ, __fail);                                                              \
	}                                                                                                               \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch(T __oldv, T __newv,                                                                                      \
	       __NAMESPACE_STD_SYM memory_order __order                                                                 \
	       __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {                                        \
		return cmpxch(__oldv, __newv, __order,                                                                      \
		              __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                         \
	}                                                                                                               \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch(T __oldv, T __newv,                                                                                      \
	       __NAMESPACE_STD_SYM memory_order __order                                                                 \
	       __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {                               \
		return cmpxch(__oldv, __newv, __order,                                                                      \
		              __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                         \
	}                                                                                                               \
	__CXX_CLASSMEMBER T                                                                                             \
	cmpxch_val(T __oldv, T __newv,                                                                                  \
	           __NAMESPACE_STD_SYM memory_order __succ,                                                             \
	           __NAMESPACE_STD_SYM memory_order __fail) __CXX_NOEXCEPT {                                            \
		return __hybrid_atomic_cmpxch_val(&__m_val,                                                                 \
		                                  __oldv, __newv,                                                           \
		                                  __succ, __fail);                                                          \
	}                                                                                                               \
	__CXX_CLASSMEMBER T                                                                                             \
	cmpxch_val(T __oldv, T __newv,                                                                                  \
	           __NAMESPACE_STD_SYM memory_order __succ,                                                             \
	           __NAMESPACE_STD_SYM memory_order __fail) volatile __CXX_NOEXCEPT {                                   \
		return __hybrid_atomic_cmpxch_val(&__m_val,                                                                 \
		                                  __oldv, __newv,                                                           \
		                                  __succ, __fail);                                                          \
	}                                                                                                               \
	__CXX_CLASSMEMBER T                                                                                             \
	cmpxch_val(T __oldv, T __newv,                                                                                  \
	           __NAMESPACE_STD_SYM memory_order __order                                                             \
	           __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {                                    \
		return cmpxch_val(__oldv, __newv, __order,                                                                  \
		                  __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                     \
	}                                                                                                               \
	__CXX_CLASSMEMBER T                                                                                             \
	cmpxch_val(T __oldv, T __newv,                                                                                  \
	           __NAMESPACE_STD_SYM memory_order __order                                                             \
	           __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {                           \
		return cmpxch_val(__oldv, __newv, __order,                                                                  \
		                  __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                     \
	}                                                                                                               \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch_weak(T __oldv, T __newv,                                                                                 \
	            __NAMESPACE_STD_SYM memory_order __succ,                                                            \
	            __NAMESPACE_STD_SYM memory_order __fail) __CXX_NOEXCEPT {                                           \
		return __hybrid_atomic_cmpxch_weak(&__m_val, __oldv, __newv, __succ, __fail);                               \
	}                                                                                                               \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch_weak(T __oldv, T __newv,                                                                                 \
	            __NAMESPACE_STD_SYM memory_order __succ,                                                            \
	            __NAMESPACE_STD_SYM memory_order __fail) volatile __CXX_NOEXCEPT {                                  \
		return __hybrid_atomic_cmpxch_weak(&__m_val, __oldv, __newv, __succ, __fail);                               \
	}                                                                                                               \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch_weak(T __oldv, T __newv,                                                                                 \
	            __NAMESPACE_STD_SYM memory_order __order                                                            \
	            __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {                                   \
		return cmpxch_weak(__oldv, __newv, __order,                                                                 \
		                   __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                    \
	}                                                                                                               \
	__CXX_CLASSMEMBER bool                                                                                          \
	cmpxch_weak(T __oldv, T __newv,                                                                                 \
	            __NAMESPACE_STD_SYM memory_order __order                                                            \
	            __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {                          \
		return cmpxch_weak(__oldv, __newv, __order,                                                                 \
		                   __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                    \
	}                                                                                                               \
	__CXX_CLASSMEMBER T                                                                                             \
	cmpxch_val_weak(T __oldv, T __newv,                                                                             \
	                __NAMESPACE_STD_SYM memory_order __order                                                        \
	                __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {                               \
		return cmpxch_val_weak(__oldv, __newv, __order,                                                             \
		                       __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                \
	}                                                                                                               \
	__CXX_CLASSMEMBER T                                                                                             \
	cmpxch_val_weak(T __oldv, T __newv,                                                                             \
	                __NAMESPACE_STD_SYM memory_order __order                                                        \
	                __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {                      \
		return cmpxch_val_weak(__oldv, __newv, __order,                                                             \
		                       __NAMESPACE_INT_SYM __cmpxch_failure_order(__order));                                \
	}
#else /* __USE_KOS */
#define __PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE(T) \
	__PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE_STD(T)
#endif /* !__USE_KOS */

#ifndef __std_memory_order_defined
#define __std_memory_order_defined
__NAMESPACE_STD_BEGIN
typedef enum {
	memory_order_relaxed = __ATOMIC_RELAXED,
	memory_order_consume = __ATOMIC_CONSUME,
	memory_order_acquire = __ATOMIC_ACQUIRE,
	memory_order_release = __ATOMIC_RELEASE,
	memory_order_acq_rel = __ATOMIC_ACQ_REL,
	memory_order_seq_cst = __ATOMIC_SEQ_CST
} memory_order;
__NAMESPACE_STD_END
#endif /* !__std_memory_order_defined */

__NAMESPACE_INT_BEGIN
__CXX_CLASSMEMBER __CXX11_CONSTEXPR __NAMESPACE_STD_SYM memory_order
__cmpxch_failure_order2(__NAMESPACE_STD_SYM memory_order __order) __CXX_NOEXCEPT {
	return __order == __NAMESPACE_STD_SYM memory_order_acq_rel
	       ? __NAMESPACE_STD_SYM memory_order_acquire
	       : __order == __NAMESPACE_STD_SYM memory_order_release
	         ? __NAMESPACE_STD_SYM memory_order_relaxed
	         : __order;
}

__CXX_CLASSMEMBER __CXX11_CONSTEXPR __NAMESPACE_STD_SYM memory_order
__cmpxch_failure_order(__NAMESPACE_STD_SYM memory_order __order) __CXX_NOEXCEPT {
	return __NAMESPACE_STD_SYM memory_order(__cmpxch_failure_order2(__NAMESPACE_STD_SYM memory_order(__order & 0xffff)) |
	                                        (__order & 0xffff0000));
}


template<class __T> struct __atomic_int {
private:
	__T __m_val;

public:
	__CXX_DEFAULT_CTOR_NOEXCEPT(__atomic_int);
	__CXX_DEFAULT_DTOR_NOEXCEPT(__atomic_int);
	__CXX_DELETE_COPY(__atomic_int);
	__CXX_DELETE_COPY_ASSIGN_X(__atomic_int);
	__CXX_DELETE_VOLATILE_COPY_ASSIGN_X(__atomic_int);

public:
	__CXX_CLASSMEMBER __CXX11_CONSTEXPR __atomic_int(__T __val) __CXX_NOEXCEPT
	    : __m_val(__val) { }

	__CXX_CLASSMEMBER operator __T() const __CXX_NOEXCEPT {
		return load();
	}

	__CXX_CLASSMEMBER operator __T() const volatile __CXX_NOEXCEPT {
		return load();
	}

	__CXX_CLASSMEMBER __T
	operator=(__T __val) __CXX_NOEXCEPT {
		store(__val);
		return __val;
	}

	__CXX_CLASSMEMBER __T
	operator=(__T __val) volatile __CXX_NOEXCEPT {
		store(__val);
		return __val;
	}

	__CXX_CLASSMEMBER __T
	operator++(int) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchinc(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator++(int) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchinc(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator--(int) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchdec(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator--(int) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchdec(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator++() __CXX_NOEXCEPT {
		return __hybrid_atomic_incfetch(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator++() volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_incfetch(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator--() __CXX_NOEXCEPT {
		return __hybrid_atomic_decfetch(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator--() volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_decfetch(&__m_val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator+=(__T __val) __CXX_NOEXCEPT {
		return __hybrid_atomic_addfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator+=(__T __val) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_addfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator-=(__T __val) __CXX_NOEXCEPT {
		return __hybrid_atomic_subfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator-=(__T __val) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_subfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator&=(__T __val) __CXX_NOEXCEPT {
		return __hybrid_atomic_andfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator&=(__T __val) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_andfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator|=(__T __val) __CXX_NOEXCEPT {
		return __hybrid_atomic_orfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator|=(__T __val) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_orfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator^=(__T __val) __CXX_NOEXCEPT {
		return __hybrid_atomic_xorfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __T
	operator^=(__T __val) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_xorfetch(&__m_val, __val, __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER bool
	is_lock_free() const __CXX_NOEXCEPT {
		return __hybrid_atomic_lockfree(&__m_val);
	}

	__CXX_CLASSMEMBER bool
	is_lock_free() const volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_lockfree(&__m_val);
	}

	__CXX_CLASSMEMBER void
	store(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER void
	store(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	load(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) const __CXX_NOEXCEPT {
		return __hybrid_atomic_load(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	load(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) const volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_load(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	exchange(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_xch(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	exchange(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_xch(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_add(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchadd(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_add(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchadd(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_sub(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchsub(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_sub(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchsub(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_and(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchand(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_and(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchand(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_or(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchor(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_or(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchor(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_xor(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchxor(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_xor(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchxor(&__m_val, __val, __order);
	}

	__PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE(__T)

#ifdef __USE_KOS
	__CXX_CLASSMEMBER __T
	fetch_inc(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchinc(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_inc(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchinc(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	inc_fetch(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_incfetch(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	inc_fetch(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_incfetch(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_dec(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchdec(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_dec(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchdec(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	dec_fetch(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_decfetch(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	dec_fetch(__NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_decfetch(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_nand(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchnand(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	fetch_nand(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_fetchnand(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	nand_fetch(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_nandfetch(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	nand_fetch(__T __val, __NAMESPACE_STD_SYM memory_order __order __DFL(__NAMESPACE_STD_SYM memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_nandfetch(&__m_val, __val, __order);
	}
#endif /* __USE_KOS */
};
__NAMESPACE_INT_END
__NAMESPACE_STD_BEGIN
#define ATOMIC_VAR_INIT(x) { x }

template<class __T> struct atomic {
private:
	__T __m_val;

public:
	__CXX_DEFAULT_CTOR_NOEXCEPT(atomic);
	__CXX_DEFAULT_DTOR_NOEXCEPT(atomic);
	__CXX_DELETE_COPY(atomic);
	__CXX_DELETE_COPY_ASSIGN_X(atomic);
	__CXX_DELETE_VOLATILE_COPY_ASSIGN_X(atomic);

public:
	__CXX_CLASSMEMBER __CXX11_CONSTEXPR atomic(__T __val) __CXX_NOEXCEPT
	    : __m_val(__val) { }

	__CXX_CLASSMEMBER operator __T() const __CXX_NOEXCEPT {
		return load();
	}

	__CXX_CLASSMEMBER operator __T() const volatile __CXX_NOEXCEPT {
		return load();
	}

	__CXX_CLASSMEMBER __T
	operator=(__T __val) __CXX_NOEXCEPT {
		store(__val);
		return __val;
	}

	__CXX_CLASSMEMBER __T
	operator=(__T __val) volatile __CXX_NOEXCEPT {
		store(__val);
		return __val;
	}

	__CXX_CLASSMEMBER bool
	is_lock_free() const __CXX_NOEXCEPT {
		return __hybrid_atomic_lockfree(&__m_val);
	}

	__CXX_CLASSMEMBER bool
	is_lock_free() const volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_lockfree(&__m_val);
	}

	__CXX_CLASSMEMBER void
	store(__T __val, memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER void
	store(__T __val, memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	load(memory_order __order __DFL(memory_order_seq_cst)) const __CXX_NOEXCEPT {
		return __hybrid_atomic_load(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	load(memory_order __order __DFL(memory_order_seq_cst)) const volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_load(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __T
	exchange(__T __val, memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return __hybrid_atomic_xch(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __T
	exchange(__T __val, memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_xch(&__m_val, __val, __order);
	}

	__PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE(__T)
};

template<class __T> struct atomic<__T *> {
	typedef __T *__pointer_type;

private:
	__pointer_type __m_val;

public:
	__CXX_DEFAULT_CTOR_NOEXCEPT(atomic);
	__CXX_DEFAULT_DTOR_NOEXCEPT(atomic);
	__CXX_DELETE_COPY(atomic);
	__CXX_DELETE_COPY_ASSIGN_X(atomic);
	__CXX_DELETE_VOLATILE_COPY_ASSIGN_X(atomic);

public:
	__CXX_CLASSMEMBER __CXX11_CONSTEXPR atomic(__pointer_type __val) __CXX_NOEXCEPT
	    : __m_val(__val) { }

	__CXX_CLASSMEMBER operator __pointer_type() const __CXX_NOEXCEPT {
		return load();
	}

	__CXX_CLASSMEMBER operator __pointer_type() const volatile __CXX_NOEXCEPT {
		return load();
	}

	__CXX_CLASSMEMBER __pointer_type
	operator=(__pointer_type __val) __CXX_NOEXCEPT {
		store(__val);
		return __val;
	}

	__CXX_CLASSMEMBER __pointer_type
	operator=(__pointer_type __val) volatile __CXX_NOEXCEPT {
		store(__val);
		return __val;
	}

	__CXX_CLASSMEMBER __pointer_type
	operator++(int) __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_fetchinc(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_fetchadd(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator++(int) volatile __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_fetchinc(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_fetchadd(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator--(int) __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_fetchdec(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_fetchsub(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator--(int) volatile __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_fetchdec(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_fetchsub(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator++() __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_incfetch(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_addfetch(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator++() volatile __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_incfetch(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_addfetch(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator--() __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_decfetch(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_subfetch(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator--() volatile __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_decfetch(&__m_val, __ATOMIC_SEQ_CST)
		                        : __hybrid_atomic_subfetch(&__m_val, sizeof(__T), __ATOMIC_SEQ_CST));
	}

	__CXX_CLASSMEMBER __pointer_type
	operator+=(__PTRDIFF_TYPE__ __val) __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_addfetch(&__m_val, __val * sizeof(__T), __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __pointer_type
	operator+=(__PTRDIFF_TYPE__ __val) volatile __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_addfetch(&__m_val, __val * sizeof(__T), __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __pointer_type
	operator-=(__PTRDIFF_TYPE__ __val) __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_subfetch(&__m_val, __val * sizeof(__T), __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __pointer_type
	operator-=(__PTRDIFF_TYPE__ __val) volatile __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_subfetch(&__m_val, __val * sizeof(__T), __ATOMIC_SEQ_CST);
	}

	__CXX_CLASSMEMBER __pointer_type
	fetch_add(__PTRDIFF_TYPE__ __val, memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_fetchadd(&__m_val, __val * sizeof(__T), __order);
	}

	__CXX_CLASSMEMBER __pointer_type
	fetch_add(__PTRDIFF_TYPE__ __val, memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_fetchadd(&__m_val, __val * sizeof(__T), __order);
	}

	__CXX_CLASSMEMBER __pointer_type
	fetch_sub(__PTRDIFF_TYPE__ __val, memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_fetchsub(&__m_val, __val * sizeof(__T), __order);
	}

	__CXX_CLASSMEMBER __pointer_type
	fetch_sub(__PTRDIFF_TYPE__ __val, memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_fetchsub(&__m_val, __val * sizeof(__T), __order);
	}

	__CXX_CLASSMEMBER bool
	is_lock_free() const __CXX_NOEXCEPT {
		return __hybrid_atomic_lockfree(&__m_val);
	}

	__CXX_CLASSMEMBER bool
	is_lock_free() const volatile __CXX_NOEXCEPT {
		return __hybrid_atomic_lockfree(&__m_val);
	}

	__CXX_CLASSMEMBER void
	store(__pointer_type __val, memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER void
	store(__pointer_type __val, memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __pointer_type
	load(memory_order __order __DFL(memory_order_seq_cst)) const __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_load(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __pointer_type
	load(memory_order __order __DFL(memory_order_seq_cst)) const volatile __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_load(&__m_val, __order);
	}

	__CXX_CLASSMEMBER __pointer_type
	exchange(__pointer_type __val, memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_xch(&__m_val, __val, __order);
	}

	__CXX_CLASSMEMBER __pointer_type
	exchange(__pointer_type __val, memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return (__pointer_type)__hybrid_atomic_xch(&__m_val, __val, __order);
	}

	__PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE(__T)

#ifdef __USE_KOS
	__CXX_CLASSMEMBER __pointer_type fetch_inc(memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_fetchinc(&__m_val, __order)
		                        : __hybrid_atomic_fetchadd(&__m_val, sizeof(__T), __order));
	}

	__CXX_CLASSMEMBER __pointer_type inc_fetch(memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_incfetch(&__m_val, __order)
		                        : __hybrid_atomic_addfetch(&__m_val, sizeof(__T), __order));
	}

	__CXX_CLASSMEMBER __pointer_type fetch_dec(memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_fetchdec(&__m_val, __order)
		                        : __hybrid_atomic_fetchsub(&__m_val, sizeof(__T), __order));
	}

	__CXX_CLASSMEMBER __pointer_type dec_fetch(memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return (__pointer_type)(sizeof(__T) == 1
		                        ? __hybrid_atomic_decfetch(&__m_val, __order)
		                        : __hybrid_atomic_subfetch(&__m_val, sizeof(__T), __order));
	}
#endif /* __USE_KOS */
};

#undef __PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE_STD
#undef __PRIVATE_CXX_DEFINE_COMPARE_EXCHANGE

#define __PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(T)                                      \
	template<> struct atomic<T>: public __NAMESPACE_INT_SYM __atomic_int<T> {            \
		typedef T __integral_type;                                                       \
		typedef __NAMESPACE_INT_SYM __atomic_int<T> __base_type;                         \
	public:                                                                              \
		__CXX_DEFAULT_CTOR_NOEXCEPT(atomic);                                             \
		__CXX_DEFAULT_DTOR_NOEXCEPT(atomic);                                             \
		__CXX_DELETE_COPY(atomic);                                                       \
		__CXX_DELETE_COPY_ASSIGN(atomic);                                                \
		__CXX_DELETE_VOLATILE_COPY_ASSIGN(atomic);                                       \
	public:                                                                              \
		__CXX_CLASSMEMBER __CXX11_CONSTEXPR atomic(__integral_type __val) __CXX_NOEXCEPT \
			: __base_type(__val) { }                                                     \
		using __base_type::operator __integral_type;                                     \
		using __base_type::operator=;                                                    \
	};
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(char)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(signed char)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned char)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(short)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned short)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(int)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned int)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(long)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned long)
#ifdef __COMPILER_HAVE_LONGLONG
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(long long)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned long long)
#endif /* __COMPILER_HAVE_LONGLONG */
#ifdef __native_wchar_t_defined
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(wchar_t)
#endif /* __native_wchar_t_defined */
#ifdef __native_char16_t_defined
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(char16_t)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(char32_t)
#endif /* __native_char16_t_defined */
#ifdef __COMPILER_INT8_IS_UNIQUE_TYPE
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(__int8)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned __int8)
#endif /* __COMPILER_INT8_IS_UNIQUE_TYPE */
#ifdef __COMPILER_INT16_IS_UNIQUE_TYPE
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(__int16)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned __int16)
#endif /* __COMPILER_INT16_IS_UNIQUE_TYPE */
#ifdef __COMPILER_INT32_IS_UNIQUE_TYPE
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(__int32)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned __int32)
#endif /* __COMPILER_INT32_IS_UNIQUE_TYPE */
#ifdef __COMPILER_INT64_IS_UNIQUE_TYPE
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(__int64)
__PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL(unsigned __int64)
#endif /* __COMPILER_INT64_IS_UNIQUE_TYPE */
#undef __PRIVATE_CXX_ATOMIC_SPECIALIZE_INTEGRAL

typedef atomic<__BOOL> atomic_bool;
typedef atomic<char> atomic_char;
typedef atomic<signed char> atomic_schar;
typedef atomic<unsigned char> atomic_uchar;
typedef atomic<short> atomic_short;
typedef atomic<unsigned short> atomic_ushort;
typedef atomic<int> atomic_int;
typedef atomic<unsigned int> atomic_uint;
typedef atomic<long> atomic_long;
typedef atomic<unsigned long> atomic_ulong;
#ifdef __COMPILER_HAVE_LONGLONG
typedef atomic<__LONGLONG> atomic_llong;
typedef atomic<__ULONGLONG> atomic_ullong;
#endif /* __COMPILER_HAVE_LONGLONG */
typedef atomic<__WCHAR_TYPE__> atomic_wchar_t;
typedef atomic<__CHAR16_TYPE__> atomic_char16_t;
typedef atomic<__CHAR32_TYPE__> atomic_char32_t;
typedef atomic<__INTMAX_TYPE__> atomic_intmax_t;
typedef atomic<__UINTMAX_TYPE__> atomic_uintmax_t;
typedef atomic<__INT_LEAST8_TYPE__> atomic_int_least8_t;
typedef atomic<__UINT_LEAST8_TYPE__> atomic_uint_least8_t;
typedef atomic<__INT_LEAST16_TYPE__> atomic_int_least16_t;
typedef atomic<__UINT_LEAST16_TYPE__> atomic_uint_least16_t;
typedef atomic<__INT_LEAST32_TYPE__> atomic_int_least32_t;
typedef atomic<__UINT_LEAST32_TYPE__> atomic_uint_least32_t;
#ifdef __UINT_LEAST64_TYPE__
typedef atomic<__INT_LEAST64_TYPE__> atomic_int_least64_t;
typedef atomic<__UINT_LEAST64_TYPE__> atomic_uint_least64_t;
#endif /* __UINT_LEAST64_TYPE__ */
typedef atomic<__INT_FAST8_TYPE__> atomic_int_fast8_t;
typedef atomic<__UINT_FAST8_TYPE__> atomic_uint_fast8_t;
typedef atomic<__INT_FAST16_TYPE__> atomic_int_fast16_t;
typedef atomic<__UINT_FAST16_TYPE__> atomic_uint_fast16_t;
typedef atomic<__INT_FAST32_TYPE__> atomic_int_fast32_t;
typedef atomic<__UINT_FAST32_TYPE__> atomic_uint_fast32_t;
#ifdef __UINT_FAST64_TYPE__
typedef atomic<__INT_FAST64_TYPE__> atomic_int_fast64_t;
typedef atomic<__UINT_FAST64_TYPE__> atomic_uint_fast64_t;
#endif /* __UINT_FAST64_TYPE__ */
typedef atomic<__INTPTR_TYPE__> atomic_intptr_t;
typedef atomic<__UINTPTR_TYPE__> atomic_uintptr_t;
typedef atomic<__SIZE_TYPE__> atomic_size_t;
typedef atomic<__PTRDIFF_TYPE__> atomic_ptrdiff_t;

#define ATOMIC_FLAG_INIT { 0 }
struct atomic_flag {
private:
	__BYTE_TYPE__ __m_val;

public:
	__CXX_DEFAULT_CTOR_NOEXCEPT(atomic_flag);
	__CXX_DEFAULT_DTOR_NOEXCEPT(atomic_flag);
	__CXX_DELETE_COPY(atomic_flag);
	__CXX_DELETE_COPY_ASSIGN(atomic_flag);
	__CXX_DELETE_VOLATILE_COPY_ASSIGN(atomic_flag);

public:
	__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __CXX11_CONSTEXPR atomic_flag(bool __val) __CXX_NOEXCEPT
		: __m_val((__BYTE_TYPE__)__val) { }

	__CXX_CLASSMEMBER __ATTR_ARTIFICIAL bool
	(test_and_set)(memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		return !!__hybrid_atomic_xch(&__m_val, 1, __order);
	}

	__CXX_CLASSMEMBER __ATTR_ARTIFICIAL bool
	(test_and_set)(memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		return !!__hybrid_atomic_xch(&__m_val, 1, __order);
	}

	__CXX_CLASSMEMBER __ATTR_ARTIFICIAL void
	(clear)(memory_order __order __DFL(memory_order_seq_cst)) __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, 0, __order);
	}

	__CXX_CLASSMEMBER __ATTR_ARTIFICIAL void
	(clear)(memory_order __order __DFL(memory_order_seq_cst)) volatile __CXX_NOEXCEPT {
		__hybrid_atomic_store(&__m_val, 0, __order);
	}
};


__CXX_CLASSMEMBER __ATTR_ARTIFICIAL void
(atomic_thread_fence)(int __order) {
	__hybrid_atomic_thread_fence(__order);
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL void
(atomic_signal_fence)(int __order) {
	__hybrid_atomic_signal_fence(__order);
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_flag_test_and_set_explicit)(atomic_flag *__ato,
                                    memory_order __order) __CXX_NOEXCEPT {
	return __ato->test_and_set(__order);
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_flag_test_and_set_explicit)(atomic_flag volatile *__ato,
                                    memory_order __order) __CXX_NOEXCEPT {
	return __ato->test_and_set(__order);
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_flag_clear_explicit)(atomic_flag *__ato,
                             memory_order __order) __CXX_NOEXCEPT {
	__ato->clear(__order);
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_flag_clear_explicit)(atomic_flag volatile *__ato,
                             memory_order __order) __CXX_NOEXCEPT {
	__ato->clear(__order);
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_flag_test_and_set)(atomic_flag *__ato) __CXX_NOEXCEPT {
	return __ato->test_and_set();
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_flag_test_and_set)(atomic_flag volatile *__ato) __CXX_NOEXCEPT {
	return __ato->test_and_set();
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_flag_clear)(atomic_flag *__ato) __CXX_NOEXCEPT {
	__ato->clear();
}

__CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_flag_clear)(atomic_flag volatile *__ato) __CXX_NOEXCEPT {
	__ato->clear();
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_is_lock_free)(atomic<__T> const *__ato) __CXX_NOEXCEPT {
	return __ato->is_lock_free();
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_is_lock_free)(atomic<__T> const volatile *__ato) __CXX_NOEXCEPT {
	return __ato->is_lock_free();
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_init)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	__ato->store(__val, memory_order_relaxed);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_init)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	__ato->store(__val, memory_order_relaxed);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_store_explicit)(atomic<__T> *__ato, __T __val,
                        memory_order __order) __CXX_NOEXCEPT {
	__ato->store(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_store_explicit)(atomic<__T> volatile *__ato, __T __val,
                        memory_order __order) __CXX_NOEXCEPT {
	__ato->store(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_load_explicit)(atomic<__T> const *__ato,
                       memory_order __order) __CXX_NOEXCEPT {
	return __ato->load(__order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_load_explicit)(atomic<__T> const volatile *__ato,
                       memory_order __order) __CXX_NOEXCEPT {
	return __ato->load(__order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_exchange_explicit)(atomic<__T> *__ato, __T __val,
                           memory_order __order) __CXX_NOEXCEPT {
	return __ato->exchange(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_exchange_explicit)(atomic<__T> volatile *__ato, __T __val,
                           memory_order __order) __CXX_NOEXCEPT {
	return __ato->exchange(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_weak_explicit)(atomic<__T> *__ato, __T *__exp, __T __replace,
                                        memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->compare_exchange_weak(*__exp, __replace, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_weak_explicit)(atomic<__T> volatile *__ato, __T *__exp, __T __replace,
                                        memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->compare_exchange_weak(*__exp, __replace, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_strong_explicit)(atomic<__T> *__ato, __T *__exp, __T __replace,
                                          memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->compare_exchange_strong(*__exp, __replace, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_strong_explicit)(atomic<__T> volatile *__ato, __T *__exp, __T __replace,
                                          memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->compare_exchange_strong(*__exp, __replace, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_store)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	atomic_store_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) void
(atomic_store)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	atomic_store_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_load)(atomic<__T> const *__ato) __CXX_NOEXCEPT {
	return atomic_load_explicit(__ato, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_load)(atomic<__T> const volatile *__ato) __CXX_NOEXCEPT {
	return atomic_load_explicit(__ato, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_exchange)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_exchange_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_exchange)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_exchange_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_weak)(atomic<__T> *__ato, __T *__exp, __T __replace) __CXX_NOEXCEPT {
	return atomic_compare_exchange_weak_explicit(__ato, __exp, __replace, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_weak)(atomic<__T> volatile *__ato, __T *__exp, __T __replace) __CXX_NOEXCEPT {
	return atomic_compare_exchange_weak_explicit(__ato, __exp, __replace, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_strong)(atomic<__T> *__ato, __T *__exp, __T __replace) __CXX_NOEXCEPT {
	return atomic_compare_exchange_strong_explicit(__ato, __exp, __replace, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_compare_exchange_strong)(atomic<__T> volatile *__ato, __T *__exp, __T __replace) __CXX_NOEXCEPT {
	return atomic_compare_exchange_strong_explicit(__ato, __exp, __replace, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_add_explicit)(atomic<__T> *__ato, __T __val, memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_add(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_add_explicit)(atomic<__T> volatile *__ato, __T __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_add(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_sub_explicit)(atomic<__T> *__ato, __T __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_sub(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_sub_explicit)(atomic<__T> volatile *__ato, __T __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_sub(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_and_explicit)(atomic<__T> *__ato, __T __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_and(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_and_explicit)(atomic<__T> volatile *__ato, __T __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_and(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_or_explicit)(atomic<__T> *__ato, __T __val,
                           memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_or(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_or_explicit)(atomic<__T> volatile *__ato, __T __val,
                           memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_or(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_xor_explicit)(atomic<__T> *__ato, __T __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_xor(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_xor_explicit)(atomic<__T> volatile *__ato, __T __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_xor(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_add)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_add_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_add)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_add_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_sub)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_sub_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_sub)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_sub_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_and)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_and_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_and)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_and_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_or)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_or_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_or)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_or_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_xor)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_xor_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_xor)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_xor_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_add_explicit)(atomic<__T *> *__ato, __PTRDIFF_TYPE__ __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_add(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_add_explicit)(atomic<__T *> volatile *__ato, __PTRDIFF_TYPE__ __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_add(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_add)(atomic<__T *> volatile *__ato, __PTRDIFF_TYPE__ __val) __CXX_NOEXCEPT {
	return __ato->fetch_add(__val);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_add)(atomic<__T *> *__ato, __PTRDIFF_TYPE__ __val) __CXX_NOEXCEPT {
	return __ato->fetch_add(__val);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_sub_explicit)(atomic<__T *> volatile *__ato, __PTRDIFF_TYPE__ __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_sub(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_sub_explicit)(atomic<__T *> *__ato, __PTRDIFF_TYPE__ __val,
                            memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_sub(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_sub)(atomic<__T *> volatile *__ato, __PTRDIFF_TYPE__ __val) __CXX_NOEXCEPT {
	return __ato->fetch_sub(__val);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T *
(atomic_fetch_sub)(atomic<__T *> *__ato, __PTRDIFF_TYPE__ __val) __CXX_NOEXCEPT {
	return __ato->fetch_sub(__val);
}

#ifdef __USE_KOS
template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch_explicit)(atomic<__T> *__ato, __T __oldv, __T __newv,
                         memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch_explicit)(atomic<__T> volatile *__ato, __T __oldv, __T __newv,
                         memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch)(atomic<__T> *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch)(atomic<__T> volatile *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val_explicit)(atomic<__T> *__ato, __T __oldv, __T __newv,
                             memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch_val(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val_explicit)(atomic<__T> volatile *__ato, __T __oldv, __T __newv,
                             memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch_val(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val)(atomic<__T> *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch_val(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val)(atomic<__T> volatile *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch_val(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch_weak_explicit)(atomic<__T> *__ato, __T __oldv, __T __newv,
                              memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch_weak(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch_weak_explicit)(atomic<__T> volatile *__ato, __T __oldv, __T __newv,
                              memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch_weak(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch_weak)(atomic<__T> *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch_weak(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) bool
(atomic_cmpxch_weak)(atomic<__T> volatile *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch_weak(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val_weak_explicit)(atomic<__T> *__ato, __T __oldv, __T __newv,
                                  memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch_val_weak(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val_weak_explicit)(atomic<__T> volatile *__ato, __T __oldv, __T __newv,
                                  memory_order __succ, memory_order __fail) __CXX_NOEXCEPT {
	return __ato->cmpxch_val_weak(__oldv, __newv, __succ, __fail);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val_weak)(atomic<__T> *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch_val_weak(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_cmpxch_val_weak)(atomic<__T> volatile *__ato, __T __oldv, __T __newv) __CXX_NOEXCEPT {
	return __ato->cmpxch_val_weak(__oldv, __newv, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_nand_explicit)(atomic<__T> *__ato, __T __val,
                             memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_nand(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_nand_explicit)(atomic<__T> volatile *__ato, __T __val,
                             memory_order __order) __CXX_NOEXCEPT {
	return __ato->fetch_nand(__val, __order);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_nand)(atomic<__T> *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_nand_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}

template<class __T> __CXX_CLASSMEMBER __ATTR_ARTIFICIAL __ATTR_NONNULL((1)) __T
(atomic_fetch_nand)(atomic<__T> volatile *__ato, __T __val) __CXX_NOEXCEPT {
	return atomic_fetch_nand_explicit(__ato, __val, __ATOMIC_SEQ_CST);
}
#endif /* __USE_KOS */

__NAMESPACE_STD_END
__CXXDECL_END

#ifdef __COMPILER_HAVE_PRAGMA_PUSHMACRO
#pragma pop_macro("atomic")
#endif /* __COMPILER_HAVE_PRAGMA_PUSHMACRO */

#endif /* !_CXX_ATOMIC */
