Greetings, This makes memory_order in <atomic> a scoped enum *only* for C++20.
This is built and tested on x86_64-linux. OK?
Index: include/bits/atomic_base.h =================================================================== --- include/bits/atomic_base.h (revision 268246) +++ include/bits/atomic_base.h (working copy) @@ -52,6 +52,24 @@ */ /// Enumeration for memory_order +#if __cplusplus > 201703L + enum class memory_order : int + { + relaxed, + consume, + acquire, + release, + acq_rel, + seq_cst + }; + + inline constexpr memory_order memory_order_relaxed = memory_order::relaxed; + inline constexpr memory_order memory_order_consume = memory_order::consume; + inline constexpr memory_order memory_order_acquire = memory_order::acquire; + inline constexpr memory_order memory_order_release = memory_order::release; + inline constexpr memory_order memory_order_acq_rel = memory_order::acq_rel; + inline constexpr memory_order memory_order_seq_cst = memory_order::seq_cst; +#else typedef enum memory_order { memory_order_relaxed, @@ -61,6 +79,7 @@ memory_order_acq_rel, memory_order_seq_cst } memory_order; +#endif enum __memory_order_modifier { @@ -73,13 +92,13 @@ constexpr memory_order operator|(memory_order __m, __memory_order_modifier __mod) { - return memory_order(__m | int(__mod)); + return memory_order(int(__m) | int(__mod)); } constexpr memory_order operator&(memory_order __m, __memory_order_modifier __mod) { - return memory_order(__m & int(__mod)); + return memory_order(int(__m) & int(__mod)); } // Drop release ordering as per [atomics.types.operations.req]/21 @@ -94,16 +113,16 @@ __cmpexch_failure_order(memory_order __m) noexcept { return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask) - | (__m & __memory_order_modifier_mask)); + | __memory_order_modifier(__m & __memory_order_modifier_mask)); } _GLIBCXX_ALWAYS_INLINE void atomic_thread_fence(memory_order __m) noexcept - { __atomic_thread_fence(__m); } + { __atomic_thread_fence(int(__m)); } _GLIBCXX_ALWAYS_INLINE void atomic_signal_fence(memory_order __m) noexcept - { __atomic_signal_fence(__m); } + { __atomic_signal_fence(int(__m)); } /// kill_dependency template<typename _Tp> @@ -173,13 +192,13 @@ _GLIBCXX_ALWAYS_INLINE bool test_and_set(memory_order __m = memory_order_seq_cst) noexcept { - return __atomic_test_and_set (&_M_i, __m); + return __atomic_test_and_set (&_M_i, int(__m)); } _GLIBCXX_ALWAYS_INLINE bool test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept { - return __atomic_test_and_set (&_M_i, __m); + return __atomic_test_and_set (&_M_i, int(__m)); } _GLIBCXX_ALWAYS_INLINE void @@ -190,7 +209,7 @@ __glibcxx_assert(__b != memory_order_acquire); __glibcxx_assert(__b != memory_order_acq_rel); - __atomic_clear (&_M_i, __m); + __atomic_clear (&_M_i, int(__m)); } _GLIBCXX_ALWAYS_INLINE void @@ -201,7 +220,7 @@ __glibcxx_assert(__b != memory_order_acquire); __glibcxx_assert(__b != memory_order_acq_rel); - __atomic_clear (&_M_i, __m); + __atomic_clear (&_M_i, int(__m)); } private: @@ -374,7 +393,7 @@ __glibcxx_assert(__b != memory_order_acq_rel); __glibcxx_assert(__b != memory_order_consume); - __atomic_store_n(&_M_i, __i, __m); + __atomic_store_n(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE void @@ -386,7 +405,7 @@ __glibcxx_assert(__b != memory_order_acq_rel); __glibcxx_assert(__b != memory_order_consume); - __atomic_store_n(&_M_i, __i, __m); + __atomic_store_n(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type @@ -396,7 +415,7 @@ __glibcxx_assert(__b != memory_order_release); __glibcxx_assert(__b != memory_order_acq_rel); - return __atomic_load_n(&_M_i, __m); + return __atomic_load_n(&_M_i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type @@ -406,7 +425,7 @@ __glibcxx_assert(__b != memory_order_release); __glibcxx_assert(__b != memory_order_acq_rel); - return __atomic_load_n(&_M_i, __m); + return __atomic_load_n(&_M_i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type @@ -413,7 +432,7 @@ exchange(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept { - return __atomic_exchange_n(&_M_i, __i, __m); + return __atomic_exchange_n(&_M_i, __i, int(__m)); } @@ -421,7 +440,7 @@ exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile noexcept { - return __atomic_exchange_n(&_M_i, __i, __m); + return __atomic_exchange_n(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE bool @@ -434,7 +453,8 @@ __glibcxx_assert(__b2 != memory_order_acq_rel); __glibcxx_assert(__b2 <= __b1); - return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2); + return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, + int(__m1), int(__m2)); } _GLIBCXX_ALWAYS_INLINE bool @@ -448,7 +468,8 @@ __glibcxx_assert(__b2 != memory_order_acq_rel); __glibcxx_assert(__b2 <= __b1); - return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2); + return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, + int(__m1), int(__m2)); } _GLIBCXX_ALWAYS_INLINE bool @@ -477,7 +498,8 @@ __glibcxx_assert(__b2 != memory_order_acq_rel); __glibcxx_assert(__b2 <= __b1); - return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2); + return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, + int(__m1), int(__m2)); } _GLIBCXX_ALWAYS_INLINE bool @@ -492,7 +514,8 @@ __glibcxx_assert(__b2 != memory_order_acq_rel); __glibcxx_assert(__b2 <= __b1); - return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2); + return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, + int(__m1), int(__m2)); } _GLIBCXX_ALWAYS_INLINE bool @@ -514,52 +537,52 @@ _GLIBCXX_ALWAYS_INLINE __int_type fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept - { return __atomic_fetch_add(&_M_i, __i, __m); } + { return __atomic_fetch_add(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst) volatile noexcept - { return __atomic_fetch_add(&_M_i, __i, __m); } + { return __atomic_fetch_add(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept - { return __atomic_fetch_sub(&_M_i, __i, __m); } + { return __atomic_fetch_sub(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst) volatile noexcept - { return __atomic_fetch_sub(&_M_i, __i, __m); } + { return __atomic_fetch_sub(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept - { return __atomic_fetch_and(&_M_i, __i, __m); } + { return __atomic_fetch_and(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst) volatile noexcept - { return __atomic_fetch_and(&_M_i, __i, __m); } + { return __atomic_fetch_and(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept - { return __atomic_fetch_or(&_M_i, __i, __m); } + { return __atomic_fetch_or(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile noexcept - { return __atomic_fetch_or(&_M_i, __i, __m); } + { return __atomic_fetch_or(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept - { return __atomic_fetch_xor(&_M_i, __i, __m); } + { return __atomic_fetch_xor(&_M_i, __i, int(__m)); } _GLIBCXX_ALWAYS_INLINE __int_type fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst) volatile noexcept - { return __atomic_fetch_xor(&_M_i, __i, __m); } + { return __atomic_fetch_xor(&_M_i, __i, int(__m)); } }; @@ -691,7 +714,7 @@ __glibcxx_assert(__b != memory_order_acq_rel); __glibcxx_assert(__b != memory_order_consume); - __atomic_store_n(&_M_p, __p, __m); + __atomic_store_n(&_M_p, __p, int(__m)); } _GLIBCXX_ALWAYS_INLINE void @@ -703,7 +726,7 @@ __glibcxx_assert(__b != memory_order_acq_rel); __glibcxx_assert(__b != memory_order_consume); - __atomic_store_n(&_M_p, __p, __m); + __atomic_store_n(&_M_p, __p, int(__m)); } _GLIBCXX_ALWAYS_INLINE __pointer_type @@ -713,7 +736,7 @@ __glibcxx_assert(__b != memory_order_release); __glibcxx_assert(__b != memory_order_acq_rel); - return __atomic_load_n(&_M_p, __m); + return __atomic_load_n(&_M_p, int(__m)); } _GLIBCXX_ALWAYS_INLINE __pointer_type @@ -723,7 +746,7 @@ __glibcxx_assert(__b != memory_order_release); __glibcxx_assert(__b != memory_order_acq_rel); - return __atomic_load_n(&_M_p, __m); + return __atomic_load_n(&_M_p, int(__m)); } _GLIBCXX_ALWAYS_INLINE __pointer_type @@ -730,7 +753,7 @@ exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) noexcept { - return __atomic_exchange_n(&_M_p, __p, __m); + return __atomic_exchange_n(&_M_p, __p, int(__m)); } @@ -738,7 +761,7 @@ exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) volatile noexcept { - return __atomic_exchange_n(&_M_p, __p, __m); + return __atomic_exchange_n(&_M_p, __p, int(__m)); } _GLIBCXX_ALWAYS_INLINE bool @@ -752,7 +775,8 @@ __glibcxx_assert(__b2 != memory_order_acq_rel); __glibcxx_assert(__b2 <= __b1); - return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2); + return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, + int(__m1), int(__m2)); } _GLIBCXX_ALWAYS_INLINE bool @@ -767,28 +791,29 @@ __glibcxx_assert(__b2 != memory_order_acq_rel); __glibcxx_assert(__b2 <= __b1); - return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2); + return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, + int(__m1), int(__m2)); } _GLIBCXX_ALWAYS_INLINE __pointer_type fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) noexcept - { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); } + { return __atomic_fetch_add(&_M_p, _M_type_size(__d), int(__m)); } _GLIBCXX_ALWAYS_INLINE __pointer_type fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile noexcept - { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); } + { return __atomic_fetch_add(&_M_p, _M_type_size(__d), int(__m)); } _GLIBCXX_ALWAYS_INLINE __pointer_type fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) noexcept - { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); } + { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), int(__m)); } _GLIBCXX_ALWAYS_INLINE __pointer_type fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile noexcept - { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); } + { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), int(__m)); } }; // @} group atomics Index: testsuite/29_atomics/headers/atomic/types_std_c++2a.cc =================================================================== --- testsuite/29_atomics/headers/atomic/types_std_c++2a.cc (nonexistent) +++ testsuite/29_atomics/headers/atomic/types_std_c++2a.cc (working copy) @@ -0,0 +1,32 @@ +// { dg-options "-std=gnu++2a" } +// { dg-do compile { target c++2a } } + +// Copyright (C) 2019 Free Software Foundation, Inc. +// +// This file is part of the GNU ISO C++ Library. This library is free +// software; you can redistribute it and/or modify it under the +// terms of the GNU General Public License as published by the +// Free Software Foundation; either version 3, or (at your option) +// any later version. +// +// This library is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License along +// with this library; see the file COPYING3. If not see +// <http://www.gnu.org/licenses/>. + +#include <atomic> + +void test01() +{ + using std::memory_order; + constexpr auto relaxed = memory_order::relaxed; + constexpr auto consume = memory_order::consume; + constexpr auto acquire = memory_order::acquire; + constexpr auto release = memory_order::release; + constexpr auto acq_rel = memory_order::acq_rel; + constexpr auto seq_cst = memory_order::seq_cst; +} Index: testsuite/29_atomics/headers/atomic/types_std_c++2a_neg.cc =================================================================== --- testsuite/29_atomics/headers/atomic/types_std_c++2a_neg.cc (nonexistent) +++ testsuite/29_atomics/headers/atomic/types_std_c++2a_neg.cc (working copy) @@ -0,0 +1,33 @@ +// { dg-options "-std=gnu++2a" } +// { dg-do compile { target c++2a } } + +// Copyright (C) 2019 Free Software Foundation, Inc. +// +// This file is part of the GNU ISO C++ Library. This library is free +// software; you can redistribute it and/or modify it under the +// terms of the GNU General Public License as published by the +// Free Software Foundation; either version 3, or (at your option) +// any later version. +// +// This library is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License along +// with this library; see the file COPYING3. If not see +// <http://www.gnu.org/licenses/>. + +#include <atomic> + +void test01() +{ + // Not global scoped, only namespace std. + using memory_order; // { dg-error "expected nested-name-specifier" } + constexpr auto relaxed = memory_order::relaxed; // { dg-error "has not been declared" } + constexpr auto consume = memory_order::consume; // { dg-error "has not been declared" } + constexpr auto acquire = memory_order::acquire; // { dg-error "has not been declared" } + constexpr auto release = memory_order::release; // { dg-error "has not been declared" } + constexpr auto acq_rel = memory_order::acq_rel; // { dg-error "has not been declared" } + constexpr auto seq_cst = memory_order::seq_cst; // { dg-error "has not been declared" } +}
2019-01-24 Edward Smith-Rowland <3dw...@verizon.net> PR libstdc++/88996 Implement P0439R0 Make std::memory_order a scoped enumeration. * include/bits/atomic_base.h: For C++20 make memory_order a scoped enum, add assignments for the old constants. Adjust calls. * testsuite/29_atomics/headers/atomic/types_std_c++2a.cc: New test. * testsuite/29_atomics/headers/atomic/types_std_c++2a_neg.cc: New test.