From patchwork Thu Aug 4 19:58:54 2011 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Paolo Carlini X-Patchwork-Id: 108563 Return-Path: X-Original-To: incoming@patchwork.ozlabs.org Delivered-To: patchwork-incoming@bilbo.ozlabs.org Received: from sourceware.org (server1.sourceware.org [209.132.180.131]) by ozlabs.org (Postfix) with SMTP id C5A46B6F7B for ; Fri, 5 Aug 2011 05:59:38 +1000 (EST) Received: (qmail 5284 invoked by alias); 4 Aug 2011 19:59:35 -0000 Received: (qmail 5269 invoked by uid 22791); 4 Aug 2011 19:59:26 -0000 X-SWARE-Spam-Status: No, hits=-2.4 required=5.0 tests=AWL, BAYES_00, RP_MATCHES_RCVD X-Spam-Check-By: sourceware.org Received: from acsinet15.oracle.com (HELO acsinet15.oracle.com) (141.146.126.227) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Thu, 04 Aug 2011 19:59:04 +0000 Received: from acsinet21.oracle.com (acsinet21.oracle.com [141.146.126.237]) by acsinet15.oracle.com (Switch-3.4.4/Switch-3.4.4) with ESMTP id p74Jx1XD010839 (version=TLSv1/SSLv3 cipher=DHE-RSA-AES256-SHA bits=256 verify=OK); Thu, 4 Aug 2011 19:59:02 GMT Received: from acsmt356.oracle.com (acsmt356.oracle.com [141.146.40.156]) by acsinet21.oracle.com (8.14.4+Sun/8.14.4) with ESMTP id p74Jx0UM002345 (version=TLSv1/SSLv3 cipher=DHE-RSA-AES256-SHA bits=256 verify=NO); Thu, 4 Aug 2011 19:59:01 GMT Received: from abhmt103.oracle.com (abhmt103.oracle.com [141.146.116.55]) by acsmt356.oracle.com (8.12.11.20060308/8.12.11) with ESMTP id p74JwtW0031934; Thu, 4 Aug 2011 14:58:55 -0500 Received: from [192.168.1.4] (/79.45.216.144) by default (Oracle Beehive Gateway v4.0) with ESMTP ; Thu, 04 Aug 2011 12:58:54 -0700 Message-ID: <4E3AF9FE.8040501@oracle.com> Date: Thu, 04 Aug 2011 21:58:54 +0200 From: Paolo Carlini User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:5.0) Gecko/20110624 Thunderbird/5.0 MIME-Version: 1.0 To: "gcc-patches@gcc.gnu.org" CC: libstdc++ Subject: [v3] Use noexcept in X-IsSubscribed: yes Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org Delivered-To: mailing list gcc-patches@gcc.gnu.org Hi, tested x86_64-linux, committed to mainline. Paolo. /////////////////////////////// 2011-08-04 Paolo Carlini * src/atomic.cc: Use noexcept. * include/std/atomic: Likewise. * include/bits/atomic_0.h: Likewise. * include/bits/atomic_2.h: Likewise. * include/bits/atomic_base.h: Likewise. Index: src/atomic.cc =================================================================== --- src/atomic.cc (revision 177411) +++ src/atomic.cc (working copy) @@ -1,6 +1,6 @@ // Support for atomic operations -*- C++ -*- -// Copyright (C) 2008, 2009, 2010 +// Copyright (C) 2008, 2009, 2010, 2011 // Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free @@ -56,7 +56,7 @@ namespace __atomic0 { bool - atomic_flag::test_and_set(memory_order) + atomic_flag::test_and_set(memory_order) noexcept { #if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1) lock_guard __lock(get_atomic_mutex()); @@ -67,7 +67,7 @@ } void - atomic_flag::clear(memory_order) + atomic_flag::clear(memory_order) noexcept { #if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1) lock_guard __lock(get_atomic_mutex()); Index: include/std/atomic =================================================================== --- include/std/atomic (revision 177411) +++ include/std/atomic (working copy) @@ -59,92 +59,93 @@ __atomic_base _M_base; public: - atomic_bool() = default; - ~atomic_bool() = default; + atomic_bool() noexcept = default; + ~atomic_bool() noexcept = default; atomic_bool(const atomic_bool&) = delete; atomic_bool& operator=(const atomic_bool&) = delete; atomic_bool& operator=(const atomic_bool&) volatile = delete; - constexpr atomic_bool(bool __i) : _M_base(__i) { } + constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { } bool - operator=(bool __i) + operator=(bool __i) noexcept { return _M_base.operator=(__i); } - operator bool() const + operator bool() const noexcept { return _M_base.load(); } - operator bool() const volatile + operator bool() const volatile noexcept { return _M_base.load(); } bool - is_lock_free() const { return _M_base.is_lock_free(); } + is_lock_free() const noexcept { return _M_base.is_lock_free(); } bool - is_lock_free() const volatile { return _M_base.is_lock_free(); } + is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } void - store(bool __i, memory_order __m = memory_order_seq_cst) + store(bool __i, memory_order __m = memory_order_seq_cst) noexcept { _M_base.store(__i, __m); } void - store(bool __i, memory_order __m = memory_order_seq_cst) volatile + store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept { _M_base.store(__i, __m); } bool - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { return _M_base.load(__m); } bool - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { return _M_base.load(__m); } bool - exchange(bool __i, memory_order __m = memory_order_seq_cst) + exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept { return _M_base.exchange(__i, __m); } bool - exchange(bool __i, memory_order __m = memory_order_seq_cst) volatile + exchange(bool __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_base.exchange(__i, __m); } bool compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) volatile + memory_order __m2) volatile noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m); } bool compare_exchange_weak(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m); } bool compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) volatile + memory_order __m2) volatile noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_strong(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m); } bool compare_exchange_strong(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m); } }; @@ -158,73 +159,77 @@ _Tp _M_i; public: - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(_Tp __i) : _M_i(__i) { } + constexpr atomic(_Tp __i) noexcept : _M_i(__i) { } - operator _Tp() const; + operator _Tp() const noexcept; - operator _Tp() const volatile; + operator _Tp() const volatile noexcept; _Tp - operator=(_Tp __i) { store(__i); return __i; } + operator=(_Tp __i) noexcept { store(__i); return __i; } _Tp - operator=(_Tp __i) volatile { store(__i); return __i; } + operator=(_Tp __i) volatile noexcept { store(__i); return __i; } bool - is_lock_free() const; + is_lock_free() const noexcept; bool - is_lock_free() const volatile; + is_lock_free() const volatile noexcept; void - store(_Tp, memory_order = memory_order_seq_cst); + store(_Tp, memory_order = memory_order_seq_cst) noexcept; void - store(_Tp, memory_order = memory_order_seq_cst) volatile; + store(_Tp, memory_order = memory_order_seq_cst) volatile noexcept; _Tp - load(memory_order = memory_order_seq_cst) const; + load(memory_order = memory_order_seq_cst) const noexcept; _Tp - load(memory_order = memory_order_seq_cst) const volatile; + load(memory_order = memory_order_seq_cst) const volatile noexcept; _Tp - exchange(_Tp __i, memory_order = memory_order_seq_cst); + exchange(_Tp __i, memory_order = memory_order_seq_cst) noexcept; _Tp - exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile; + exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile noexcept; bool - compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order); + compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) noexcept; bool - compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile; + compare_exchange_weak(_Tp&, _Tp, memory_order, + memory_order) volatile noexcept; bool - compare_exchange_weak(_Tp&, _Tp, memory_order = memory_order_seq_cst); + compare_exchange_weak(_Tp&, _Tp, + memory_order = memory_order_seq_cst) noexcept; bool compare_exchange_weak(_Tp&, _Tp, - memory_order = memory_order_seq_cst) volatile; + memory_order = memory_order_seq_cst) volatile noexcept; bool - compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order); + compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) noexcept; bool - compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile; + compare_exchange_strong(_Tp&, _Tp, memory_order, + memory_order) volatile noexcept; bool - compare_exchange_strong(_Tp&, _Tp, memory_order = memory_order_seq_cst); + compare_exchange_strong(_Tp&, _Tp, + memory_order = memory_order_seq_cst) noexcept; bool compare_exchange_strong(_Tp&, _Tp, - memory_order = memory_order_seq_cst) volatile; + memory_order = memory_order_seq_cst) volatile noexcept; }; @@ -236,123 +241,126 @@ typedef __atomic_base<_Tp*> __base_type; __base_type _M_b; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__pointer_type __p) : _M_b(__p) { } + constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { } - operator __pointer_type() const + operator __pointer_type() const noexcept { return __pointer_type(_M_b); } - operator __pointer_type() const volatile + operator __pointer_type() const volatile noexcept { return __pointer_type(_M_b); } __pointer_type - operator=(__pointer_type __p) + operator=(__pointer_type __p) noexcept { return _M_b.operator=(__p); } __pointer_type - operator=(__pointer_type __p) volatile + operator=(__pointer_type __p) volatile noexcept { return _M_b.operator=(__p); } __pointer_type - operator++(int) + operator++(int) noexcept { return _M_b++; } __pointer_type - operator++(int) volatile + operator++(int) volatile noexcept { return _M_b++; } __pointer_type - operator--(int) + operator--(int) noexcept { return _M_b--; } __pointer_type - operator--(int) volatile + operator--(int) volatile noexcept { return _M_b--; } __pointer_type - operator++() + operator++() noexcept { return ++_M_b; } __pointer_type - operator++() volatile + operator++() volatile noexcept { return ++_M_b; } __pointer_type - operator--() + operator--() noexcept { return --_M_b; } __pointer_type - operator--() volatile + operator--() volatile noexcept { return --_M_b; } __pointer_type - operator+=(ptrdiff_t __d) + operator+=(ptrdiff_t __d) noexcept { return _M_b.operator+=(__d); } __pointer_type - operator+=(ptrdiff_t __d) volatile + operator+=(ptrdiff_t __d) volatile noexcept { return _M_b.operator+=(__d); } __pointer_type - operator-=(ptrdiff_t __d) + operator-=(ptrdiff_t __d) noexcept { return _M_b.operator-=(__d); } __pointer_type - operator-=(ptrdiff_t __d) volatile + operator-=(ptrdiff_t __d) volatile noexcept { return _M_b.operator-=(__d); } bool - is_lock_free() const + is_lock_free() const noexcept { return _M_b.is_lock_free(); } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return _M_b.is_lock_free(); } void - store(__pointer_type __p, memory_order __m = memory_order_seq_cst) + store(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.store(__p, __m); } void store(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.store(__p, __m); } __pointer_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { return _M_b.load(__m); } __pointer_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { return _M_b.load(__m); } __pointer_type - exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) + exchange(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.exchange(__p, __m); } __pointer_type exchange(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.exchange(__p, __m); } bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_weak(__p1, __p2, __m, __calculate_memory_order(__m)); @@ -360,7 +368,7 @@ bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_weak(__p1, __p2, __m, __calculate_memory_order(__m)); @@ -368,17 +376,18 @@ bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m, __calculate_memory_order(__m)); @@ -386,28 +395,30 @@ bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m, __calculate_memory_order(__m)); } __pointer_type - fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_add(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.fetch_add(__d, __m); } __pointer_type fetch_add(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.fetch_add(__d, __m); } __pointer_type - fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_sub(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.fetch_sub(__d, __m); } __pointer_type fetch_sub(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.fetch_sub(__d, __m); } }; @@ -419,13 +430,13 @@ typedef bool __integral_type; typedef atomic_bool __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -438,13 +449,13 @@ typedef char __integral_type; typedef atomic_char __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -457,13 +468,13 @@ typedef signed char __integral_type; typedef atomic_schar __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept= default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -476,13 +487,13 @@ typedef unsigned char __integral_type; typedef atomic_uchar __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept= default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -495,13 +506,13 @@ typedef short __integral_type; typedef atomic_short __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -514,13 +525,13 @@ typedef unsigned short __integral_type; typedef atomic_ushort __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -533,13 +544,13 @@ typedef int __integral_type; typedef atomic_int __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -552,13 +563,13 @@ typedef unsigned int __integral_type; typedef atomic_uint __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -571,13 +582,13 @@ typedef long __integral_type; typedef atomic_long __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -590,13 +601,13 @@ typedef unsigned long __integral_type; typedef atomic_ulong __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -609,13 +620,13 @@ typedef long long __integral_type; typedef atomic_llong __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -628,13 +639,13 @@ typedef unsigned long long __integral_type; typedef atomic_ullong __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -647,13 +658,13 @@ typedef wchar_t __integral_type; typedef atomic_wchar_t __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -666,13 +677,13 @@ typedef char16_t __integral_type; typedef atomic_char16_t __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -685,13 +696,13 @@ typedef char32_t __integral_type; typedef atomic_char32_t __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -700,104 +711,109 @@ // Function definitions, atomic_flag operations. inline bool - atomic_flag_test_and_set_explicit(atomic_flag* __a, memory_order __m) + atomic_flag_test_and_set_explicit(atomic_flag* __a, + memory_order __m) noexcept { return __a->test_and_set(__m); } inline bool atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, - memory_order __m) + memory_order __m) noexcept { return __a->test_and_set(__m); } inline void - atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) + atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept { __a->clear(__m); } inline void - atomic_flag_clear_explicit(volatile atomic_flag* __a, memory_order __m) + atomic_flag_clear_explicit(volatile atomic_flag* __a, + memory_order __m) noexcept { __a->clear(__m); } inline bool - atomic_flag_test_and_set(atomic_flag* __a) + atomic_flag_test_and_set(atomic_flag* __a) noexcept { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } inline bool - atomic_flag_test_and_set(volatile atomic_flag* __a) + atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } inline void - atomic_flag_clear(atomic_flag* __a) + atomic_flag_clear(atomic_flag* __a) noexcept { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } inline void - atomic_flag_clear(volatile atomic_flag* __a) + atomic_flag_clear(volatile atomic_flag* __a) noexcept { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } // Function templates generally applicable to atomic types. template inline bool - atomic_is_lock_free(const atomic<_ITp>* __a) + atomic_is_lock_free(const atomic<_ITp>* __a) noexcept { return __a->is_lock_free(); } template inline bool - atomic_is_lock_free(const volatile atomic<_ITp>* __a) + atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept { return __a->is_lock_free(); } template inline void - atomic_init(atomic<_ITp>* __a, _ITp __i); + atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept; template inline void - atomic_init(volatile atomic<_ITp>* __a, _ITp __i); + atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept; template inline void - atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, memory_order __m) + atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, + memory_order __m) noexcept { __a->store(__i, __m); } template inline void atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { __a->store(__i, __m); } template inline _ITp - atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) + atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept { return __a->load(__m); } template inline _ITp atomic_load_explicit(const volatile atomic<_ITp>* __a, - memory_order __m) + memory_order __m) noexcept { return __a->load(__m); } template inline _ITp atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->exchange(__i, __m); } template inline _ITp atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->exchange(__i, __m); } template inline bool atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, + memory_order __m2) noexcept { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } template inline bool atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, + memory_order __m2) noexcept { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } template @@ -805,7 +821,7 @@ atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } template @@ -813,44 +829,44 @@ atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } template inline void - atomic_store(atomic<_ITp>* __a, _ITp __i) + atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept { atomic_store_explicit(__a, __i, memory_order_seq_cst); } template inline void - atomic_store(volatile atomic<_ITp>* __a, _ITp __i) + atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept { atomic_store_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_load(const atomic<_ITp>* __a) + atomic_load(const atomic<_ITp>* __a) noexcept { return atomic_load_explicit(__a, memory_order_seq_cst); } template inline _ITp - atomic_load(const volatile atomic<_ITp>* __a) + atomic_load(const volatile atomic<_ITp>* __a) noexcept { return atomic_load_explicit(__a, memory_order_seq_cst); } template inline _ITp - atomic_exchange(atomic<_ITp>* __a, _ITp __i) + atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) + atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } template inline bool atomic_compare_exchange_weak(atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -860,7 +876,7 @@ template inline bool atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -870,7 +886,7 @@ template inline bool atomic_compare_exchange_strong(atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -880,7 +896,7 @@ template inline bool atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -894,111 +910,111 @@ template inline _ITp atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__i, __m); } template inline _ITp atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__i, __m); } template inline _ITp atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_sub(__i, __m); } template inline _ITp atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_sub(__i, __m); } template inline _ITp atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_and(__i, __m); } template inline _ITp atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_and(__i, __m); } template inline _ITp atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_or(__i, __m); } template inline _ITp atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_or(__i, __m); } template inline _ITp atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_xor(__i, __m); } template inline _ITp atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_xor(__i, __m); } template inline _ITp - atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } @@ -1006,45 +1022,45 @@ template inline _ITp* atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__d, __m); } template inline _ITp* atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__d, __m); } template inline _ITp* - atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_add(__d); } template inline _ITp* - atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_add(__d); } template inline _ITp* atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, - ptrdiff_t __d, memory_order __m) + ptrdiff_t __d, memory_order __m) noexcept { return __a->fetch_sub(__d, __m); } template inline _ITp* atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_sub(__d, __m); } template inline _ITp* - atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_sub(__d); } template inline _ITp* - atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_sub(__d); } // @} group atomics Index: include/bits/atomic_0.h =================================================================== --- include/bits/atomic_0.h (revision 177411) +++ include/bits/atomic_0.h (working copy) @@ -111,26 +111,26 @@ /// atomic_flag struct atomic_flag : public __atomic_flag_base { - atomic_flag() = default; - ~atomic_flag() = default; + atomic_flag() noexcept = default; + ~atomic_flag() noexcept = default; atomic_flag(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) volatile = delete; // Conversion to ATOMIC_FLAG_INIT. - atomic_flag(bool __i): __atomic_flag_base({ __i }) { } + atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { } bool - test_and_set(memory_order __m = memory_order_seq_cst); + test_and_set(memory_order __m = memory_order_seq_cst) noexcept; bool - test_and_set(memory_order __m = memory_order_seq_cst) volatile; + test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept; void - clear(memory_order __m = memory_order_seq_cst); + clear(memory_order __m = memory_order_seq_cst) noexcept; void - clear(memory_order __m = memory_order_seq_cst) volatile; + clear(memory_order __m = memory_order_seq_cst) volatile noexcept; }; @@ -166,117 +166,117 @@ __int_type _M_i; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __int_type convertible to _M_base._M_i. - constexpr __atomic_base(__int_type __i): _M_i (__i) { } + constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { } - operator __int_type() const + operator __int_type() const noexcept { return load(); } - operator __int_type() const volatile + operator __int_type() const volatile noexcept { return load(); } __int_type - operator=(__int_type __i) + operator=(__int_type __i) noexcept { store(__i); return __i; } __int_type - operator=(__int_type __i) volatile + operator=(__int_type __i) volatile noexcept { store(__i); return __i; } __int_type - operator++(int) + operator++(int) noexcept { return fetch_add(1); } __int_type - operator++(int) volatile + operator++(int) volatile noexcept { return fetch_add(1); } __int_type - operator--(int) + operator--(int) noexcept { return fetch_sub(1); } __int_type - operator--(int) volatile + operator--(int) volatile noexcept { return fetch_sub(1); } __int_type - operator++() + operator++() noexcept { return fetch_add(1) + 1; } __int_type - operator++() volatile + operator++() volatile noexcept { return fetch_add(1) + 1; } __int_type - operator--() + operator--() noexcept { return fetch_sub(1) - 1; } __int_type - operator--() volatile + operator--() volatile noexcept { return fetch_sub(1) - 1; } __int_type - operator+=(__int_type __i) + operator+=(__int_type __i) noexcept { return fetch_add(__i) + __i; } __int_type - operator+=(__int_type __i) volatile + operator+=(__int_type __i) volatile noexcept { return fetch_add(__i) + __i; } __int_type - operator-=(__int_type __i) + operator-=(__int_type __i) noexcept { return fetch_sub(__i) - __i; } __int_type - operator-=(__int_type __i) volatile + operator-=(__int_type __i) volatile noexcept { return fetch_sub(__i) - __i; } __int_type - operator&=(__int_type __i) + operator&=(__int_type __i) noexcept { return fetch_and(__i) & __i; } __int_type - operator&=(__int_type __i) volatile + operator&=(__int_type __i) volatile noexcept { return fetch_and(__i) & __i; } __int_type - operator|=(__int_type __i) + operator|=(__int_type __i) noexcept { return fetch_or(__i) | __i; } __int_type - operator|=(__int_type __i) volatile + operator|=(__int_type __i) volatile noexcept { return fetch_or(__i) | __i; } __int_type - operator^=(__int_type __i) + operator^=(__int_type __i) noexcept { return fetch_xor(__i) ^ __i; } __int_type - operator^=(__int_type __i) volatile + operator^=(__int_type __i) volatile noexcept { return fetch_xor(__i) ^ __i; } bool - is_lock_free() const + is_lock_free() const noexcept { return false; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return false; } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) + store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -285,7 +285,8 @@ } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + store(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -294,7 +295,7 @@ } __int_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -302,7 +303,7 @@ } __int_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -310,16 +311,18 @@ } __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, =, __i, __m); } __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, =, __i, __m); } bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -329,7 +332,8 @@ bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -339,7 +343,7 @@ bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -347,7 +351,7 @@ bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -355,7 +359,7 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -365,7 +369,8 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -375,7 +380,7 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -383,54 +388,60 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); } __int_type - fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_add(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, +=, __i, __m); } __int_type fetch_add(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, +=, __i, __m); } __int_type - fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_sub(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, -=, __i, __m); } __int_type fetch_sub(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, -=, __i, __m); } __int_type - fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_and(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, &=, __i, __m); } __int_type fetch_and(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, &=, __i, __m); } __int_type - fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_or(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, |=, __i, __m); } __int_type - fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + fetch_or(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, |=, __i, __m); } __int_type - fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_xor(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, ^=, __i, __m); } __int_type fetch_xor(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, ^=, __i, __m); } }; @@ -445,93 +456,95 @@ __pointer_type _M_i; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __pointer_type convertible to _M_i. - constexpr __atomic_base(__return_pointer_type __p): _M_i (__p) { } + constexpr __atomic_base(__return_pointer_type __p) noexcept + : _M_i (__p) { } - operator __return_pointer_type() const + operator __return_pointer_type() const noexcept { return reinterpret_cast<__return_pointer_type>(load()); } - operator __return_pointer_type() const volatile + operator __return_pointer_type() const volatile noexcept { return reinterpret_cast<__return_pointer_type>(load()); } __return_pointer_type - operator=(__pointer_type __p) + operator=(__pointer_type __p) noexcept { store(__p); return reinterpret_cast<__return_pointer_type>(__p); } __return_pointer_type - operator=(__pointer_type __p) volatile + operator=(__pointer_type __p) volatile noexcept { store(__p); return reinterpret_cast<__return_pointer_type>(__p); } __return_pointer_type - operator++(int) + operator++(int) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); } __return_pointer_type - operator++(int) volatile + operator++(int) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); } __return_pointer_type - operator--(int) + operator--(int) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); } __return_pointer_type - operator--(int) volatile + operator--(int) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); } __return_pointer_type - operator++() + operator++() noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); } __return_pointer_type - operator++() volatile + operator++() volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); } __return_pointer_type - operator--() + operator--() noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); } __return_pointer_type - operator--() volatile + operator--() volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); } __return_pointer_type - operator+=(ptrdiff_t __d) + operator+=(ptrdiff_t __d) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); } __return_pointer_type - operator+=(ptrdiff_t __d) volatile + operator+=(ptrdiff_t __d) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); } __return_pointer_type - operator-=(ptrdiff_t __d) + operator-=(ptrdiff_t __d) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); } __return_pointer_type - operator-=(ptrdiff_t __d) volatile + operator-=(ptrdiff_t __d) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); } bool - is_lock_free() const + is_lock_free() const noexcept { return true; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return true; } void - store(__pointer_type __p, memory_order __m = memory_order_seq_cst) + store(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -541,7 +554,7 @@ void store(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -556,7 +569,7 @@ } __return_pointer_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -565,7 +578,7 @@ } __return_pointer_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -574,7 +587,8 @@ } __return_pointer_type - exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) + exchange(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { void* __v = _ATOMIC_MODIFY_(this, =, __p, __m); return reinterpret_cast<__return_pointer_type>(__v); @@ -582,7 +596,7 @@ __return_pointer_type exchange(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { volatile __pointer_type* __p2 = &_M_i; __typeof__(__p) __w = (__p); @@ -597,7 +611,7 @@ bool compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -608,7 +622,8 @@ bool compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -618,7 +633,8 @@ } __return_pointer_type - fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_add(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); @@ -626,14 +642,15 @@ __return_pointer_type fetch_add(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); } __return_pointer_type - fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_sub(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); @@ -641,7 +658,7 @@ __return_pointer_type fetch_sub(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); Index: include/bits/atomic_2.h =================================================================== --- include/bits/atomic_2.h (revision 177411) +++ include/bits/atomic_2.h (working copy) @@ -48,17 +48,17 @@ /// atomic_flag struct atomic_flag : public __atomic_flag_base { - atomic_flag() = default; - ~atomic_flag() = default; + atomic_flag() noexcept = default; + ~atomic_flag() noexcept = default; atomic_flag(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) volatile = delete; // Conversion to ATOMIC_FLAG_INIT. - atomic_flag(bool __i): __atomic_flag_base({ __i }) { } + atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { } bool - test_and_set(memory_order __m = memory_order_seq_cst) + test_and_set(memory_order __m = memory_order_seq_cst) noexcept { // Redundant synchronize if built-in for lock is a full barrier. if (__m != memory_order_acquire && __m != memory_order_acq_rel) @@ -67,7 +67,7 @@ } bool - test_and_set(memory_order __m = memory_order_seq_cst) volatile + test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept { // Redundant synchronize if built-in for lock is a full barrier. if (__m != memory_order_acquire && __m != memory_order_acq_rel) @@ -76,7 +76,7 @@ } void - clear(memory_order __m = memory_order_seq_cst) + clear(memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_consume); __glibcxx_assert(__m != memory_order_acquire); @@ -88,7 +88,7 @@ } void - clear(memory_order __m = memory_order_seq_cst) volatile + clear(memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_consume); __glibcxx_assert(__m != memory_order_acquire); @@ -133,117 +133,117 @@ __int_type _M_i; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __int_type convertible to _M_i. - constexpr __atomic_base(__int_type __i): _M_i (__i) { } + constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { } - operator __int_type() const + operator __int_type() const noexcept { return load(); } - operator __int_type() const volatile + operator __int_type() const volatile noexcept { return load(); } __int_type - operator=(__int_type __i) + operator=(__int_type __i) noexcept { store(__i); return __i; } __int_type - operator=(__int_type __i) volatile + operator=(__int_type __i) volatile noexcept { store(__i); return __i; } __int_type - operator++(int) + operator++(int) noexcept { return fetch_add(1); } __int_type - operator++(int) volatile + operator++(int) volatile noexcept { return fetch_add(1); } __int_type - operator--(int) + operator--(int) noexcept { return fetch_sub(1); } __int_type - operator--(int) volatile + operator--(int) volatile noexcept { return fetch_sub(1); } __int_type - operator++() + operator++() noexcept { return __sync_add_and_fetch(&_M_i, 1); } __int_type - operator++() volatile + operator++() volatile noexcept { return __sync_add_and_fetch(&_M_i, 1); } __int_type - operator--() + operator--() noexcept { return __sync_sub_and_fetch(&_M_i, 1); } __int_type - operator--() volatile + operator--() volatile noexcept { return __sync_sub_and_fetch(&_M_i, 1); } __int_type - operator+=(__int_type __i) + operator+=(__int_type __i) noexcept { return __sync_add_and_fetch(&_M_i, __i); } __int_type - operator+=(__int_type __i) volatile + operator+=(__int_type __i) volatile noexcept { return __sync_add_and_fetch(&_M_i, __i); } __int_type - operator-=(__int_type __i) + operator-=(__int_type __i) noexcept { return __sync_sub_and_fetch(&_M_i, __i); } __int_type - operator-=(__int_type __i) volatile + operator-=(__int_type __i) volatile noexcept { return __sync_sub_and_fetch(&_M_i, __i); } __int_type - operator&=(__int_type __i) + operator&=(__int_type __i) noexcept { return __sync_and_and_fetch(&_M_i, __i); } __int_type - operator&=(__int_type __i) volatile + operator&=(__int_type __i) volatile noexcept { return __sync_and_and_fetch(&_M_i, __i); } __int_type - operator|=(__int_type __i) + operator|=(__int_type __i) noexcept { return __sync_or_and_fetch(&_M_i, __i); } __int_type - operator|=(__int_type __i) volatile + operator|=(__int_type __i) volatile noexcept { return __sync_or_and_fetch(&_M_i, __i); } __int_type - operator^=(__int_type __i) + operator^=(__int_type __i) noexcept { return __sync_xor_and_fetch(&_M_i, __i); } __int_type - operator^=(__int_type __i) volatile + operator^=(__int_type __i) volatile noexcept { return __sync_xor_and_fetch(&_M_i, __i); } bool - is_lock_free() const + is_lock_free() const noexcept { return true; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return true; } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) + store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -261,7 +261,8 @@ } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + store(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -279,7 +280,7 @@ } __int_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -291,7 +292,7 @@ } __int_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -303,7 +304,8 @@ } __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_i, __i); @@ -311,7 +313,8 @@ __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_i, __i); @@ -319,17 +322,18 @@ bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { return compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { return compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -337,7 +341,7 @@ bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -345,7 +349,7 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -361,7 +365,8 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -377,7 +382,7 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -385,55 +390,60 @@ bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); } __int_type - fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_add(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_add(&_M_i, __i); } __int_type fetch_add(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_add(&_M_i, __i); } __int_type - fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_sub(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_sub(&_M_i, __i); } __int_type fetch_sub(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_sub(&_M_i, __i); } __int_type - fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_and(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_and(&_M_i, __i); } __int_type fetch_and(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_and(&_M_i, __i); } __int_type - fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_or(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_or(&_M_i, __i); } __int_type fetch_or(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_or(&_M_i, __i); } __int_type - fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_xor(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_xor(&_M_i, __i); } __int_type fetch_xor(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_xor(&_M_i, __i); } }; @@ -448,93 +458,94 @@ __pointer_type _M_p; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __pointer_type convertible to _M_p. - constexpr __atomic_base(__pointer_type __p): _M_p (__p) { } + constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { } - operator __pointer_type() const + operator __pointer_type() const noexcept { return load(); } - operator __pointer_type() const volatile + operator __pointer_type() const volatile noexcept { return load(); } __pointer_type - operator=(__pointer_type __p) + operator=(__pointer_type __p) noexcept { store(__p); return __p; } __pointer_type - operator=(__pointer_type __p) volatile + operator=(__pointer_type __p) volatile noexcept { store(__p); return __p; } __pointer_type - operator++(int) + operator++(int) noexcept { return fetch_add(1); } __pointer_type - operator++(int) volatile + operator++(int) volatile noexcept { return fetch_add(1); } __pointer_type - operator--(int) + operator--(int) noexcept { return fetch_sub(1); } __pointer_type - operator--(int) volatile + operator--(int) volatile noexcept { return fetch_sub(1); } __pointer_type - operator++() + operator++() noexcept { return fetch_add(1) + 1; } __pointer_type - operator++() volatile + operator++() volatile noexcept { return fetch_add(1) + 1; } __pointer_type - operator--() + operator--() noexcept { return fetch_sub(1) -1; } __pointer_type - operator--() volatile + operator--() volatile noexcept { return fetch_sub(1) -1; } __pointer_type - operator+=(ptrdiff_t __d) + operator+=(ptrdiff_t __d) noexcept { return fetch_add(__d) + __d; } __pointer_type - operator+=(ptrdiff_t __d) volatile + operator+=(ptrdiff_t __d) volatile noexcept { return fetch_add(__d) + __d; } __pointer_type - operator-=(ptrdiff_t __d) + operator-=(ptrdiff_t __d) noexcept { return fetch_sub(__d) - __d; } __pointer_type - operator-=(ptrdiff_t __d) volatile + operator-=(ptrdiff_t __d) volatile noexcept { return fetch_sub(__d) - __d; } bool - is_lock_free() const + is_lock_free() const noexcept { return true; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return true; } void - store(__pointer_type __p, memory_order __m = memory_order_seq_cst) + store(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -553,7 +564,7 @@ void store(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -571,7 +582,7 @@ } __pointer_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -583,7 +594,7 @@ } __pointer_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -595,7 +606,8 @@ } __pointer_type - exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) + exchange(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_p, __p); @@ -604,7 +616,7 @@ __pointer_type exchange(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_p, __p); @@ -612,7 +624,8 @@ bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, + memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -628,7 +641,8 @@ bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -643,21 +657,23 @@ } __pointer_type - fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_add(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_add(&_M_p, __d); } __pointer_type fetch_add(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_add(&_M_p, __d); } __pointer_type - fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_sub(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_sub(&_M_p, __d); } __pointer_type fetch_sub(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_sub(&_M_p, __d); } }; Index: include/bits/atomic_base.h =================================================================== --- include/bits/atomic_base.h (revision 177411) +++ include/bits/atomic_base.h (working copy) @@ -59,7 +59,7 @@ } memory_order; inline memory_order - __calculate_memory_order(memory_order __m) + __calculate_memory_order(memory_order __m) noexcept { const bool __cond1 = __m == memory_order_release; const bool __cond2 = __m == memory_order_acq_rel; @@ -69,15 +69,15 @@ } void - atomic_thread_fence(memory_order); + atomic_thread_fence(memory_order) noexcept; void - atomic_signal_fence(memory_order); + atomic_signal_fence(memory_order) noexcept; /// kill_dependency template inline _Tp - kill_dependency(_Tp __y) + kill_dependency(_Tp __y) noexcept { _Tp __ret(__y); return __ret;