Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 61 additions & 10 deletions stl/inc/atomic
Original file line number Diff line number Diff line change
Expand Up @@ -1158,6 +1158,24 @@ struct _Atomic_integral<_Ty, 8> : _Atomic_storage<_Ty> { // atomic integral oper
#endif // _M_IX86
};

#if 1 // TRANSITION, ABI
template <size_t _TypeSize>
_INLINE_VAR constexpr bool _Is_always_lock_free = _TypeSize <= 8 && (_TypeSize & (_TypeSize - 1)) == 0;
#else // ^^^ don't break ABI / break ABI vvv
#if _ATOMIC_HAS_DCAS
template <size_t _TypeSize>
_INLINE_VAR constexpr bool _Is_always_lock_free = _TypeSize <= 2 * sizeof(void*);
#else // ^^^ _ATOMIC_HAS_DCAS / !_ATOMIC_HAS_DCAS vvv
template <size_t _TypeSize>
_INLINE_VAR constexpr bool _Is_always_lock_free = _TypeSize <= sizeof(void*);
#endif // _ATOMIC_HAS_DCAS
#endif // break ABI

template <class _Ty, bool _Is_lock_free = _Is_always_lock_free<sizeof(_Ty)>>
_INLINE_VAR constexpr bool _Deprecate_non_lock_free_volatile = true;

template <class _Ty>
_CXX20_DEPRECATE_VOLATILE _INLINE_VAR constexpr bool _Deprecate_non_lock_free_volatile<_Ty, false> = true;

// STRUCT TEMPLATE _Atomic_integral_facade
template <class _Ty>
Expand All @@ -1173,6 +1191,8 @@ struct _Atomic_integral_facade : _Atomic_integral<_Ty> {
using _Base::_Base;
#endif // ^^^ no workaround ^^^

// _Deprecate_non_lock_free_volatile is unnecessary here.

// note: const_cast-ing away volatile is safe because all our intrinsics add volatile back on.
// We make the primary functions non-volatile for better debug codegen, as non-volatile atomics
// are far more common than volatile ones.
Expand Down Expand Up @@ -1313,6 +1333,8 @@ struct _Atomic_floating : _Atomic_storage<_Ty> {
return _Temp;
}

// _Deprecate_non_lock_free_volatile is unnecessary here.

// note: const_cast-ing away volatile is safe because all our intrinsics add volatile back on.
// We make the primary functions non-volatile for better debug codegen, as non-volatile atomics
// are far more common than volatile ones.
Expand Down Expand Up @@ -1377,6 +1399,8 @@ struct _Atomic_pointer : _Atomic_storage<_Ty> {
return reinterpret_cast<_Ty>(_Result);
}

// _Deprecate_non_lock_free_volatile is unnecessary here.

_Ty fetch_add(const ptrdiff_t _Diff) volatile noexcept {
return const_cast<_Atomic_pointer*>(this)->fetch_add(_Diff);
}
Expand Down Expand Up @@ -1495,11 +1519,11 @@ public:
atomic(const atomic&) = delete;
atomic& operator=(const atomic&) = delete;

#if 1 // TRANSITION, ABI
#if _HAS_CXX17
static constexpr bool is_always_lock_free = sizeof(_Ty) <= 8 && (sizeof(_Ty) & sizeof(_Ty) - 1) == 0;
static constexpr bool is_always_lock_free = _Is_always_lock_free<sizeof(_Ty)>;
#endif // _HAS_CXX17

#if 1 // TRANSITION, ABI
_NODISCARD bool is_lock_free() const volatile noexcept {
constexpr bool _Result = sizeof(_Ty) <= 8 && (sizeof(_Ty) & sizeof(_Ty) - 1) == 0;
return _Result;
Expand All @@ -1517,14 +1541,6 @@ public:

#else // ^^^ don't break ABI / break ABI vvv

#if _HAS_CXX17
#if _ATOMIC_HAS_DCAS
static constexpr bool is_always_lock_free = sizeof(_Ty) <= 2 * sizeof(void*);
#else // ^^^ _ATOMIC_HAS_DCAS / !_ATOMIC_HAS_DCAS vvv
static constexpr bool is_always_lock_free = sizeof(_Ty) <= sizeof(void*);
#endif // _ATOMIC_HAS_DCAS
#endif // _HAS_CXX17

_NODISCARD bool is_lock_free() const volatile noexcept {
#if _ATOMIC_HAS_DCAS
return sizeof(_Ty) <= 2 * sizeof(void*);
Expand All @@ -1539,6 +1555,7 @@ public:
}

_Ty operator=(const _Ty _Value) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
this->store(_Value);
return _Value;
}
Expand All @@ -1555,42 +1572,51 @@ public:
// non-volatile should result in better debug codegen.
using _Base::store;
void store(const _Ty _Value) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
const_cast<atomic*>(this)->_Base::store(_Value);
}

void store(const _Ty _Value, const memory_order _Order) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
const_cast<atomic*>(this)->_Base::store(_Value, _Order);
}

using _Base::load;
_NODISCARD _Ty load() const volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return const_cast<const atomic*>(this)->_Base::load();
}

_NODISCARD _Ty load(const memory_order _Order) const volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return const_cast<const atomic*>(this)->_Base::load(_Order);
}

using _Base::exchange;
_Ty exchange(const _Ty _Value) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return const_cast<atomic*>(this)->_Base::exchange(_Value);
}

_Ty exchange(const _Ty _Value, const memory_order _Order) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return const_cast<atomic*>(this)->_Base::exchange(_Value, _Order);
}

using _Base::compare_exchange_strong;
bool compare_exchange_strong(_Ty& _Expected, const _Ty _Desired) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return const_cast<atomic*>(this)->_Base::compare_exchange_strong(_Expected, _Desired);
}

bool compare_exchange_strong(_Ty& _Expected, const _Ty _Desired, const memory_order _Order) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return const_cast<atomic*>(this)->_Base::compare_exchange_strong(_Expected, _Desired, _Order);
}

bool compare_exchange_strong(_Ty& _Expected, const _Ty _Desired, const memory_order _Success,
const memory_order _Failure) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return this->compare_exchange_strong(_Expected, _Desired, _Combine_cas_memory_orders(_Success, _Failure));
}

Expand All @@ -1601,6 +1627,7 @@ public:

bool compare_exchange_weak(_Ty& _Expected, const _Ty _Desired) volatile noexcept {
// we have no weak CAS intrinsics, even on ARM32/ARM64, so fall back to strong
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return this->compare_exchange_strong(_Expected, _Desired);
}

Expand All @@ -1609,6 +1636,7 @@ public:
}

bool compare_exchange_weak(_Ty& _Expected, const _Ty _Desired, const memory_order _Order) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return this->compare_exchange_strong(_Expected, _Desired, _Order);
}

Expand All @@ -1618,6 +1646,7 @@ public:

bool compare_exchange_weak(_Ty& _Expected, const _Ty _Desired, const memory_order _Success,
const memory_order _Failure) volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return this->compare_exchange_strong(_Expected, _Desired, _Combine_cas_memory_orders(_Success, _Failure));
}

Expand All @@ -1627,6 +1656,7 @@ public:
}

operator _Ty() const volatile noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return this->load();
}

Expand Down Expand Up @@ -1666,11 +1696,13 @@ template <class _Ty>
_CXX20_DEPRECATE_ATOMIC_INIT void atomic_init(
volatile atomic<_Ty>* const _Mem, const typename atomic<_Ty>::value_type _Value) noexcept {
// NB: respecting volatility here appears unimplementable
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
_STD atomic_init(const_cast<atomic<_Ty>*>(_Mem), _Value);
}

template <class _Ty>
void atomic_store(volatile atomic<_Ty>* const _Mem, const _Identity_t<_Ty> _Value) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
_Mem->store(_Value);
}

Expand All @@ -1682,6 +1714,7 @@ void atomic_store(atomic<_Ty>* const _Mem, const _Identity_t<_Ty> _Value) noexce
template <class _Ty>
void atomic_store_explicit(
volatile atomic<_Ty>* const _Mem, const _Identity_t<_Ty> _Value, const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
_Mem->store(_Value, _Order);
}

Expand All @@ -1692,6 +1725,7 @@ void atomic_store_explicit(atomic<_Ty>* const _Mem, const _Identity_t<_Ty> _Valu

template <class _Ty>
_NODISCARD _Ty atomic_load(const volatile atomic<_Ty>* const _Mem) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->load();
}

Expand All @@ -1702,6 +1736,7 @@ _NODISCARD _Ty atomic_load(const atomic<_Ty>* const _Mem) noexcept {

template <class _Ty>
_NODISCARD _Ty atomic_load_explicit(const volatile atomic<_Ty>* const _Mem, const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->load(_Order);
}

Expand All @@ -1712,6 +1747,7 @@ _NODISCARD _Ty atomic_load_explicit(const atomic<_Ty>* const _Mem, const memory_

template <class _Ty>
_Ty atomic_exchange(volatile atomic<_Ty>* const _Mem, const _Identity_t<_Ty> _Value) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->exchange(_Value);
}

Expand All @@ -1723,6 +1759,7 @@ _Ty atomic_exchange(atomic<_Ty>* const _Mem, const _Identity_t<_Ty> _Value) noex
template <class _Ty>
_Ty atomic_exchange_explicit(
volatile atomic<_Ty>* const _Mem, const _Identity_t<_Ty> _Value, const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->exchange(_Value, _Order);
}

Expand All @@ -1735,6 +1772,7 @@ _Ty atomic_exchange_explicit(
template <class _Ty>
bool atomic_compare_exchange_strong(
volatile atomic<_Ty>* const _Mem, _Identity_t<_Ty>* const _Expected, const _Identity_t<_Ty> _Desired) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->compare_exchange_strong(*_Expected, _Desired);
}

Expand All @@ -1747,6 +1785,7 @@ bool atomic_compare_exchange_strong(
template <class _Ty>
bool atomic_compare_exchange_strong_explicit(volatile atomic<_Ty>* const _Mem, _Identity_t<_Ty>* const _Expected,
const _Identity_t<_Ty> _Desired, const memory_order _Success, const memory_order _Failure) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->compare_exchange_strong(*_Expected, _Desired, _Combine_cas_memory_orders(_Success, _Failure));
}

Expand All @@ -1759,6 +1798,7 @@ bool atomic_compare_exchange_strong_explicit(atomic<_Ty>* const _Mem, _Identity_
template <class _Ty>
bool atomic_compare_exchange_weak(
volatile atomic<_Ty>* const _Mem, _Identity_t<_Ty>* const _Expected, const _Identity_t<_Ty> _Desired) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->compare_exchange_strong(*_Expected, _Desired);
}

Expand All @@ -1771,6 +1811,7 @@ bool atomic_compare_exchange_weak(
template <class _Ty>
bool atomic_compare_exchange_weak_explicit(volatile atomic<_Ty>* const _Mem, _Identity_t<_Ty>* const _Expected,
const _Identity_t<_Ty> _Desired, const memory_order _Success, const memory_order _Failure) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->compare_exchange_strong(*_Expected, _Desired, _Combine_cas_memory_orders(_Success, _Failure));
}

Expand All @@ -1782,6 +1823,7 @@ bool atomic_compare_exchange_weak_explicit(atomic<_Ty>* const _Mem, _Identity_t<

template <class _Ty>
_Ty atomic_fetch_add(volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::difference_type _Value) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_add(_Value);
}

Expand All @@ -1793,6 +1835,7 @@ _Ty atomic_fetch_add(atomic<_Ty>* _Mem, const typename atomic<_Ty>::difference_t
template <class _Ty>
_Ty atomic_fetch_add_explicit(volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::difference_type _Value,
const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_add(_Value, _Order);
}

Expand All @@ -1804,6 +1847,7 @@ _Ty atomic_fetch_add_explicit(

template <class _Ty>
_Ty atomic_fetch_sub(volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::difference_type _Value) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_sub(_Value);
}

Expand All @@ -1815,6 +1859,7 @@ _Ty atomic_fetch_sub(atomic<_Ty>* _Mem, const typename atomic<_Ty>::difference_t
template <class _Ty>
_Ty atomic_fetch_sub_explicit(volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::difference_type _Value,
const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_sub(_Value, _Order);
}

Expand All @@ -1826,6 +1871,7 @@ _Ty atomic_fetch_sub_explicit(

template <class _Ty>
_Ty atomic_fetch_and(volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _Value) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_and(_Value);
}

Expand All @@ -1837,6 +1883,7 @@ _Ty atomic_fetch_and(atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _
template <class _Ty>
_Ty atomic_fetch_and_explicit(
volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _Value, const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_and(_Value, _Order);
}

Expand All @@ -1848,6 +1895,7 @@ _Ty atomic_fetch_and_explicit(

template <class _Ty>
_Ty atomic_fetch_or(volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _Value) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_or(_Value);
}

Expand All @@ -1859,6 +1907,7 @@ _Ty atomic_fetch_or(atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _V
template <class _Ty>
_Ty atomic_fetch_or_explicit(
volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _Value, const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_or(_Value, _Order);
}

Expand All @@ -1870,6 +1919,7 @@ _Ty atomic_fetch_or_explicit(

template <class _Ty>
_Ty atomic_fetch_xor(volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _Value) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_xor(_Value);
}

Expand All @@ -1881,6 +1931,7 @@ _Ty atomic_fetch_xor(atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _
template <class _Ty>
_Ty atomic_fetch_xor_explicit(
volatile atomic<_Ty>* _Mem, const typename atomic<_Ty>::value_type _Value, const memory_order _Order) noexcept {
static_assert(_Deprecate_non_lock_free_volatile<_Ty>, "Never fails");
return _Mem->fetch_xor(_Value, _Order);
}

Expand Down
11 changes: 7 additions & 4 deletions stl/inc/utility
Original file line number Diff line number Diff line change
Expand Up @@ -436,11 +436,12 @@ struct tuple_size<const _Tuple> : _Tuple_size_sfinae<_Tuple> { // size of const
};

template <class _Tuple>
struct tuple_size<volatile _Tuple> : _Tuple_size_sfinae<_Tuple> { // size of volatile tuple
struct _CXX20_DEPRECATE_VOLATILE tuple_size<volatile _Tuple> : _Tuple_size_sfinae<_Tuple> { // size of volatile tuple
};

template <class _Tuple>
struct tuple_size<const volatile _Tuple> : _Tuple_size_sfinae<_Tuple> { // size of const volatile tuple
struct _CXX20_DEPRECATE_VOLATILE tuple_size<const volatile _Tuple>
: _Tuple_size_sfinae<_Tuple> { // size of const volatile tuple
};

template <class _Ty>
Expand All @@ -456,13 +457,15 @@ struct _MSVC_KNOWN_SEMANTICS tuple_element<_Index, const _Tuple> : tuple_element
};

template <size_t _Index, class _Tuple>
struct _MSVC_KNOWN_SEMANTICS tuple_element<_Index, volatile _Tuple> : tuple_element<_Index, _Tuple> {
struct _CXX20_DEPRECATE_VOLATILE _MSVC_KNOWN_SEMANTICS tuple_element<_Index, volatile _Tuple>
: tuple_element<_Index, _Tuple> {
using _Mybase = tuple_element<_Index, _Tuple>;
using type = add_volatile_t<typename _Mybase::type>;
};

template <size_t _Index, class _Tuple>
struct _MSVC_KNOWN_SEMANTICS tuple_element<_Index, const volatile _Tuple> : tuple_element<_Index, _Tuple> {
struct _CXX20_DEPRECATE_VOLATILE _MSVC_KNOWN_SEMANTICS tuple_element<_Index, const volatile _Tuple>
: tuple_element<_Index, _Tuple> {
using _Mybase = tuple_element<_Index, _Tuple>;
using type = add_cv_t<typename _Mybase::type>;
};
Expand Down
8 changes: 4 additions & 4 deletions stl/inc/variant
Original file line number Diff line number Diff line change
Expand Up @@ -381,9 +381,9 @@ struct variant_size; // undefined
template <class _Ty>
struct variant_size<const _Ty> : variant_size<_Ty>::type {};
template <class _Ty>
struct variant_size<volatile _Ty> : variant_size<_Ty>::type {};
struct _CXX20_DEPRECATE_VOLATILE variant_size<volatile _Ty> : variant_size<_Ty>::type {};
template <class _Ty>
struct variant_size<const volatile _Ty> : variant_size<_Ty>::type {};
struct _CXX20_DEPRECATE_VOLATILE variant_size<const volatile _Ty> : variant_size<_Ty>::type {};
template <class _Ty>
inline constexpr size_t variant_size_v = variant_size<_Ty>::value;

Expand All @@ -399,11 +399,11 @@ struct variant_alternative<_Idx, const _Ty> {
using type = add_const_t<variant_alternative_t<_Idx, _Ty>>;
};
template <size_t _Idx, class _Ty>
struct variant_alternative<_Idx, volatile _Ty> {
struct _CXX20_DEPRECATE_VOLATILE variant_alternative<_Idx, volatile _Ty> {
using type = add_volatile_t<variant_alternative_t<_Idx, _Ty>>;
};
template <size_t _Idx, class _Ty>
struct variant_alternative<_Idx, const volatile _Ty> {
struct _CXX20_DEPRECATE_VOLATILE variant_alternative<_Idx, const volatile _Ty> {
using type = add_cv_t<variant_alternative_t<_Idx, _Ty>>;
};
template <size_t _Idx, class... _Types>
Expand Down
Loading