Same as before, only a slight change. __sync_mem_flag_test_and_set()
and __sync_mem_flag_clear() have been removed in a different
consolidation patch, so this changes those calls to
__sync_mem_exchange() and __sync_mem_store() which are subsuming all the
behaviour of those routines.
Straight up translation which now calls the new routines with a memory
model parameter instead of the old __sync routines with various barriers.
bootstrapped on x86_64-unknown-linux-gnu and no new regressions.
OK for branch?
* libstdc++-v3/include/bits/atomic_2.h (__atomic2): Use new
__sync_mem routines.
Index: include/bits/atomic_2.h
===================================================================
*** include/bits/atomic_2.h (revision 178710)
--- include/bits/atomic_2.h (working copy)
*************** namespace __atomic2
*** 60,78 ****
bool
test_and_set(memory_order __m = memory_order_seq_cst) noexcept
{
! // Redundant synchronize if built-in for lock is a full barrier.
! if (__m != memory_order_acquire && __m != memory_order_acq_rel)
! __sync_synchronize();
! return __sync_lock_test_and_set(&_M_i, 1);
}
bool
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
{
! // Redundant synchronize if built-in for lock is a full barrier.
! if (__m != memory_order_acquire && __m != memory_order_acq_rel)
! __sync_synchronize();
! return __sync_lock_test_and_set(&_M_i, 1);
}
void
--- 60,72 ----
bool
test_and_set(memory_order __m = memory_order_seq_cst) noexcept
{
! return __sync_mem_exchange (&_M_i, 1, __m);
}
bool
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
{
! return __sync_mem_exchange (&_M_i, 1, __m);
}
void
*************** namespace __atomic2
*** 82,90 ****
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_lock_release(&_M_i);
! if (__m != memory_order_acquire && __m != memory_order_acq_rel)
! __sync_synchronize();
}
void
--- 76,82 ----
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_mem_store (&_M_i, 0, __m);
}
void
*************** namespace __atomic2
*** 94,102 ****
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_lock_release(&_M_i);
! if (__m != memory_order_acquire && __m != memory_order_acq_rel)
! __sync_synchronize();
}
};
--- 86,92 ----
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_mem_store (&_M_i, 0, __m);
}
};
*************** namespace __atomic2
*** 180,238 ****
__int_type
operator++() noexcept
! { return __sync_add_and_fetch(&_M_i, 1); }
__int_type
operator++() volatile noexcept
! { return __sync_add_and_fetch(&_M_i, 1); }
__int_type
operator--() noexcept
! { return __sync_sub_and_fetch(&_M_i, 1); }
__int_type
operator--() volatile noexcept
! { return __sync_sub_and_fetch(&_M_i, 1); }
__int_type
operator+=(__int_type __i) noexcept
! { return __sync_add_and_fetch(&_M_i, __i); }
__int_type
operator+=(__int_type __i) volatile noexcept
! { return __sync_add_and_fetch(&_M_i, __i); }
__int_type
operator-=(__int_type __i) noexcept
! { return __sync_sub_and_fetch(&_M_i, __i); }
__int_type
operator-=(__int_type __i) volatile noexcept
! { return __sync_sub_and_fetch(&_M_i, __i); }
__int_type
operator&=(__int_type __i) noexcept
! { return __sync_and_and_fetch(&_M_i, __i); }
__int_type
operator&=(__int_type __i) volatile noexcept
! { return __sync_and_and_fetch(&_M_i, __i); }
__int_type
operator|=(__int_type __i) noexcept
! { return __sync_or_and_fetch(&_M_i, __i); }
__int_type
operator|=(__int_type __i) volatile noexcept
! { return __sync_or_and_fetch(&_M_i, __i); }
__int_type
operator^=(__int_type __i) noexcept
! { return __sync_xor_and_fetch(&_M_i, __i); }
__int_type
operator^=(__int_type __i) volatile noexcept
! { return __sync_xor_and_fetch(&_M_i, __i); }
bool
is_lock_free() const noexcept
--- 170,228 ----
__int_type
operator++() noexcept
! { return __sync_mem_add_fetch(&_M_i, 1, memory_order_seq_cst); }
__int_type
operator++() volatile noexcept
! { return __sync_mem_add__fetch(&_M_i, 1, memory_order_seq_cst); }
__int_type
operator--() noexcept
! { return __sync_mem_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
__int_type
operator--() volatile noexcept
! { return __sync_mem_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
__int_type
operator+=(__int_type __i) noexcept
! { return __sync_mem_add_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator+=(__int_type __i) volatile noexcept
! { return __sync_mem_add_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator-=(__int_type __i) noexcept
! { return __sync_mem_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator-=(__int_type __i) volatile noexcept
! { return __sync_mem_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator&=(__int_type __i) noexcept
! { return __sync_mem_and_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator&=(__int_type __i) volatile noexcept
! { return __sync_mem_and_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator|=(__int_type __i) noexcept
! { return __sync_mem_or_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator|=(__int_type __i) volatile noexcept
! { return __sync_mem_or_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator^=(__int_type __i) noexcept
! { return __sync_mem_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
__int_type
operator^=(__int_type __i) volatile noexcept
! { return __sync_mem_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
bool
is_lock_free() const noexcept
*************** namespace __atomic2
*** 249,263 ****
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! if (__m == memory_order_relaxed)
! _M_i = __i;
! else
! {
! // write_mem_barrier();
! _M_i = __i;
! if (__m == memory_order_seq_cst)
! __sync_synchronize();
! }
}
void
--- 239,245 ----
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! __sync_mem_store (&_M_i, __i, __m);
}
void
*************** namespace __atomic2
*** 268,282 ****
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! if (__m == memory_order_relaxed)
! _M_i = __i;
! else
! {
! // write_mem_barrier();
! _M_i = __i;
! if (__m == memory_order_seq_cst)
! __sync_synchronize();
! }
}
__int_type
--- 250,256 ----
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! __sync_mem_store (&_M_i, __i, __m);
}
__int_type
*************** namespace __atomic2
*** 285,294 ****
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_synchronize();
! __int_type __ret = _M_i;
! __sync_synchronize();
! return __ret;
}
__int_type
--- 259,265 ----
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! return __sync_mem_load (&_M_i, __m);
}
__int_type
*************** namespace __atomic2
*** 297,314 ****
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_synchronize();
! __int_type __ret = _M_i;
! __sync_synchronize();
! return __ret;
}
__int_type
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{
! // XXX built-in assumes memory_order_acquire.
! return __sync_lock_test_and_set(&_M_i, __i);
}
--- 268,281 ----
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! return __sync_mem_load (&_M_i, __m);
}
__int_type
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{
! return __sync_mem_exchange (&_M_i, __i, __m);
}
*************** namespace __atomic2
*** 316,323 ****
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
! // XXX built-in assumes memory_order_acquire.
! return __sync_lock_test_and_set(&_M_i, __i);
}
bool
--- 283,289 ----
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
! return __sync_mem_exchange (&_M_i, __i, __m);
}
bool
*************** namespace __atomic2
*** 356,361 ****
--- 322,328 ----
__glibcxx_assert(__m2 <= __m1);
__int_type __i1o = __i1;
+ // Compare_and_swap is a full barrier already.
__int_type __i1n = __sync_val_compare_and_swap(&_M_i, __i1o, __i2);
// Assume extra stores (of same value) allowed in true case.
*************** namespace __atomic2
*** 373,378 ****
--- 340,346 ----
__glibcxx_assert(__m2 <= __m1);
__int_type __i1o = __i1;
+ // Compare_and_swap is a full barrier already.
__int_type __i1n = __sync_val_compare_and_swap(&_M_i, __i1o, __i2);
// Assume extra stores (of same value) allowed in true case.
*************** namespace __atomic2
*** 399,450 ****
__int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_fetch_and_add(&_M_i, __i); }
__int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_fetch_and_add(&_M_i, __i); }
__int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_fetch_and_sub(&_M_i, __i); }
__int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_fetch_and_sub(&_M_i, __i); }
__int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_fetch_and_and(&_M_i, __i); }
__int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_fetch_and_and(&_M_i, __i); }
__int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_fetch_and_or(&_M_i, __i); }
__int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_fetch_and_or(&_M_i, __i); }
__int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_fetch_and_xor(&_M_i, __i); }
__int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_fetch_and_xor(&_M_i, __i); }
};
--- 367,418 ----
__int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_mem_fetch_add(&_M_i, __i, __m); }
__int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_mem_fetch_add(&_M_i, __i, __m); }
__int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_mem_fetch_sub(&_M_i, __i, __m); }
__int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_mem_fetch_sub(&_M_i, __i, __m); }
__int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_mem_fetch_and(&_M_i, __i, __m); }
__int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_mem_fetch_and(&_M_i, __i, __m); }
__int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_mem_fetch_or(&_M_i, __i, __m); }
__int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_mem_fetch_or(&_M_i, __i, __m); }
__int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_mem_fetch_xor(&_M_i, __i, __m); }
__int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_mem_fetch_xor(&_M_i, __i, __m); }
};
*************** namespace __atomic2
*** 505,539 ****
__pointer_type
operator++() noexcept
! { return fetch_add(1) + 1; }
__pointer_type
operator++() volatile noexcept
! { return fetch_add(1) + 1; }
__pointer_type
operator--() noexcept
! { return fetch_sub(1) -1; }
__pointer_type
operator--() volatile noexcept
! { return fetch_sub(1) -1; }
__pointer_type
operator+=(ptrdiff_t __d) noexcept
! { return fetch_add(__d) + __d; }
__pointer_type
operator+=(ptrdiff_t __d) volatile noexcept
! { return fetch_add(__d) + __d; }
__pointer_type
operator-=(ptrdiff_t __d) noexcept
! { return fetch_sub(__d) - __d; }
__pointer_type
operator-=(ptrdiff_t __d) volatile noexcept
! { return fetch_sub(__d) - __d; }
bool
is_lock_free() const noexcept
--- 473,507 ----
__pointer_type
operator++() noexcept
! { return __sync_mem_add_fetch(&_M_p, 1, memory_order_seq_cst); }
__pointer_type
operator++() volatile noexcept
! { return __sync_mem_add_fetch(&_M_p, 1, memory_order_seq_cst); }
__pointer_type
operator--() noexcept
! { return __sync_mem_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
__pointer_type
operator--() volatile noexcept
! { return __sync_mem_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
__pointer_type
operator+=(ptrdiff_t __d) noexcept
! { return __sync_mem_add_fetch(&_M_p, __d, memory_order_seq_cst); }
__pointer_type
operator+=(ptrdiff_t __d) volatile noexcept
! { return __sync_mem_add_fetch(&_M_p, __d, memory_order_seq_cst); }
__pointer_type
operator-=(ptrdiff_t __d) noexcept
! { return __sync_mem_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
__pointer_type
operator-=(ptrdiff_t __d) volatile noexcept
! { return __sync_mem_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
bool
is_lock_free() const noexcept
*************** namespace __atomic2
*** 551,565 ****
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! if (__m == memory_order_relaxed)
! _M_p = __p;
! else
! {
! // write_mem_barrier();
! _M_p = __p;
! if (__m == memory_order_seq_cst)
! __sync_synchronize();
! }
}
void
--- 519,525 ----
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! __sync_mem_store (&_M_p, __p, __m);
}
void
*************** namespace __atomic2
*** 570,584 ****
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! if (__m == memory_order_relaxed)
! _M_p = __p;
! else
! {
! // write_mem_barrier();
! _M_p = __p;
! if (__m == memory_order_seq_cst)
! __sync_synchronize();
! }
}
__pointer_type
--- 530,536 ----
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
! __sync_mem_store (&_M_p, __p, __m);
}
__pointer_type
*************** namespace __atomic2
*** 587,596 ****
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_synchronize();
! __pointer_type __ret = _M_p;
! __sync_synchronize();
! return __ret;
}
__pointer_type
--- 539,545 ----
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! return __sync_mem_load (&_M_p, __m);
}
__pointer_type
*************** namespace __atomic2
*** 599,616 ****
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! __sync_synchronize();
! __pointer_type __ret = _M_p;
! __sync_synchronize();
! return __ret;
}
__pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
! // XXX built-in assumes memory_order_acquire.
! return __sync_lock_test_and_set(&_M_p, __p);
}
--- 548,561 ----
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
! return __sync_mem_load (&_M_p, __m);
}
__pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
! return __sync_mem_exchange (&_M_p, __p, __m);
}
*************** namespace __atomic2
*** 618,625 ****
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
! // XXX built-in assumes memory_order_acquire.
! return __sync_lock_test_and_set(&_M_p, __p);
}
bool
--- 563,569 ----
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
! return __sync_mem_exchange (&_M_p, __p, __m);
}
bool
*************** namespace __atomic2
*** 632,637 ****
--- 576,582 ----
__glibcxx_assert(__m2 <= __m1);
__pointer_type __p1o = __p1;
+ // Compare_and_swap is a full barrier already.
__pointer_type __p1n = __sync_val_compare_and_swap(&_M_p, __p1o, __p2);
// Assume extra stores (of same value) allowed in true case.
*************** namespace __atomic2
*** 649,654 ****
--- 594,600 ----
__glibcxx_assert(__m2 <= __m1);
__pointer_type __p1o = __p1;
+ // Compare_and_swap is a full barrier already.
__pointer_type __p1n = __sync_val_compare_and_swap(&_M_p, __p1o, __p2);
// Assume extra stores (of same value) allowed in true case.
*************** namespace __atomic2
*** 659,680 ****
__pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_fetch_and_add(&_M_p, __d); }
__pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_fetch_and_add(&_M_p, __d); }
__pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_fetch_and_sub(&_M_p, __d); }
__pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_fetch_and_sub(&_M_p, __d); }
};
} // namespace __atomic2
--- 605,626 ----
__pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_mem_fetch_add(&_M_p, __d, __m); }
__pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_mem_fetch_add(&_M_p, __d, __m); }
__pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
! { return __sync_mem_fetch_sub(&_M_p, __d, __m); }
__pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
! { return __sync_mem_fetch_sub(&_M_p, __d, __m); }
};
} // namespace __atomic2