line |
stmt |
bran |
cond |
sub |
pod |
time |
code |
1
|
|
|
|
|
|
|
// -*- C++ -*- header. |
2
|
|
|
|
|
|
|
|
3
|
|
|
|
|
|
|
// Copyright (C) 2008-2015 Free Software Foundation, Inc. |
4
|
|
|
|
|
|
|
// |
5
|
|
|
|
|
|
|
// This file is part of the GNU ISO C++ Library. This library is free |
6
|
|
|
|
|
|
|
// software; you can redistribute it and/or modify it under the |
7
|
|
|
|
|
|
|
// terms of the GNU General Public License as published by the |
8
|
|
|
|
|
|
|
// Free Software Foundation; either version 3, or (at your option) |
9
|
|
|
|
|
|
|
// any later version. |
10
|
|
|
|
|
|
|
|
11
|
|
|
|
|
|
|
// This library is distributed in the hope that it will be useful, |
12
|
|
|
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of |
13
|
|
|
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14
|
|
|
|
|
|
|
// GNU General Public License for more details. |
15
|
|
|
|
|
|
|
|
16
|
|
|
|
|
|
|
// Under Section 7 of GPL version 3, you are granted additional |
17
|
|
|
|
|
|
|
// permissions described in the GCC Runtime Library Exception, version |
18
|
|
|
|
|
|
|
// 3.1, as published by the Free Software Foundation. |
19
|
|
|
|
|
|
|
|
20
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License and |
21
|
|
|
|
|
|
|
// a copy of the GCC Runtime Library Exception along with this program; |
22
|
|
|
|
|
|
|
// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
23
|
|
|
|
|
|
|
// . |
24
|
|
|
|
|
|
|
|
25
|
|
|
|
|
|
|
/** @file bits/atomic_base.h |
26
|
|
|
|
|
|
|
* This is an internal header file, included by other library headers. |
27
|
|
|
|
|
|
|
* Do not attempt to use it directly. @headername{atomic} |
28
|
|
|
|
|
|
|
*/ |
29
|
|
|
|
|
|
|
|
30
|
|
|
|
|
|
|
#ifndef _GLIBCXX_ATOMIC_BASE_H |
31
|
|
|
|
|
|
|
#define _GLIBCXX_ATOMIC_BASE_H 1 |
32
|
|
|
|
|
|
|
|
33
|
|
|
|
|
|
|
#pragma GCC system_header |
34
|
|
|
|
|
|
|
|
35
|
|
|
|
|
|
|
#include |
36
|
|
|
|
|
|
|
#include |
37
|
|
|
|
|
|
|
#include |
38
|
|
|
|
|
|
|
|
39
|
|
|
|
|
|
|
#ifndef _GLIBCXX_ALWAYS_INLINE |
40
|
|
|
|
|
|
|
#define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__)) |
41
|
|
|
|
|
|
|
#endif |
42
|
|
|
|
|
|
|
|
43
|
|
|
|
|
|
|
namespace std _GLIBCXX_VISIBILITY(default) |
44
|
|
|
|
|
|
|
{ |
45
|
|
|
|
|
|
|
_GLIBCXX_BEGIN_NAMESPACE_VERSION |
46
|
|
|
|
|
|
|
|
47
|
|
|
|
|
|
|
/** |
48
|
|
|
|
|
|
|
* @defgroup atomics Atomics |
49
|
|
|
|
|
|
|
* |
50
|
|
|
|
|
|
|
* Components for performing atomic operations. |
51
|
|
|
|
|
|
|
* @{ |
52
|
|
|
|
|
|
|
*/ |
53
|
|
|
|
|
|
|
|
54
|
|
|
|
|
|
|
/// Enumeration for memory_order |
55
|
|
|
|
|
|
|
typedef enum memory_order |
56
|
|
|
|
|
|
|
{ |
57
|
|
|
|
|
|
|
memory_order_relaxed, |
58
|
|
|
|
|
|
|
memory_order_consume, |
59
|
|
|
|
|
|
|
memory_order_acquire, |
60
|
|
|
|
|
|
|
memory_order_release, |
61
|
|
|
|
|
|
|
memory_order_acq_rel, |
62
|
|
|
|
|
|
|
memory_order_seq_cst |
63
|
|
|
|
|
|
|
} memory_order; |
64
|
|
|
|
|
|
|
|
65
|
|
|
|
|
|
|
enum __memory_order_modifier |
66
|
|
|
|
|
|
|
{ |
67
|
|
|
|
|
|
|
__memory_order_mask = 0x0ffff, |
68
|
|
|
|
|
|
|
__memory_order_modifier_mask = 0xffff0000, |
69
|
|
|
|
|
|
|
__memory_order_hle_acquire = 0x10000, |
70
|
|
|
|
|
|
|
__memory_order_hle_release = 0x20000 |
71
|
|
|
|
|
|
|
}; |
72
|
|
|
|
|
|
|
|
73
|
|
|
|
|
|
|
constexpr memory_order |
74
|
|
|
|
|
|
|
operator|(memory_order __m, __memory_order_modifier __mod) |
75
|
|
|
|
|
|
|
{ |
76
|
|
|
|
|
|
|
return memory_order(__m | int(__mod)); |
77
|
|
|
|
|
|
|
} |
78
|
|
|
|
|
|
|
|
79
|
|
|
|
|
|
|
constexpr memory_order |
80
|
|
|
|
|
|
|
operator&(memory_order __m, __memory_order_modifier __mod) |
81
|
|
|
|
|
|
|
{ |
82
|
|
|
|
|
|
|
return memory_order(__m & int(__mod)); |
83
|
|
|
|
|
|
|
} |
84
|
|
|
|
|
|
|
|
85
|
|
|
|
|
|
|
// Drop release ordering as per [atomics.types.operations.req]/21 |
86
|
|
|
|
|
|
|
constexpr memory_order |
87
|
|
|
|
|
|
|
__cmpexch_failure_order2(memory_order __m) noexcept |
88
|
|
|
|
|
|
|
{ |
89
|
|
|
|
|
|
|
return __m == memory_order_acq_rel ? memory_order_acquire |
90
|
|
|
|
|
|
|
: __m == memory_order_release ? memory_order_relaxed : __m; |
91
|
|
|
|
|
|
|
} |
92
|
|
|
|
|
|
|
|
93
|
|
|
|
|
|
|
constexpr memory_order |
94
|
|
|
|
|
|
|
__cmpexch_failure_order(memory_order __m) noexcept |
95
|
|
|
|
|
|
|
{ |
96
|
|
|
|
|
|
|
return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask) |
97
|
|
|
|
|
|
|
| (__m & __memory_order_modifier_mask)); |
98
|
|
|
|
|
|
|
} |
99
|
|
|
|
|
|
|
|
100
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
101
|
|
|
|
|
|
|
atomic_thread_fence(memory_order __m) noexcept |
102
|
|
|
|
|
|
|
{ __atomic_thread_fence(__m); } |
103
|
|
|
|
|
|
|
|
104
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
105
|
|
|
|
|
|
|
atomic_signal_fence(memory_order __m) noexcept |
106
|
|
|
|
|
|
|
{ __atomic_signal_fence(__m); } |
107
|
|
|
|
|
|
|
|
108
|
|
|
|
|
|
|
/// kill_dependency |
109
|
|
|
|
|
|
|
template |
110
|
|
|
|
|
|
|
inline _Tp |
111
|
|
|
|
|
|
|
kill_dependency(_Tp __y) noexcept |
112
|
|
|
|
|
|
|
{ |
113
|
|
|
|
|
|
|
_Tp __ret(__y); |
114
|
|
|
|
|
|
|
return __ret; |
115
|
|
|
|
|
|
|
} |
116
|
|
|
|
|
|
|
|
117
|
|
|
|
|
|
|
|
118
|
|
|
|
|
|
|
// Base types for atomics. |
119
|
|
|
|
|
|
|
template |
120
|
|
|
|
|
|
|
struct __atomic_base; |
121
|
|
|
|
|
|
|
|
122
|
|
|
|
|
|
|
|
123
|
|
|
|
|
|
|
#define ATOMIC_VAR_INIT(_VI) { _VI } |
124
|
|
|
|
|
|
|
|
125
|
|
|
|
|
|
|
template |
126
|
|
|
|
|
|
|
struct atomic; |
127
|
|
|
|
|
|
|
|
128
|
|
|
|
|
|
|
template |
129
|
|
|
|
|
|
|
struct atomic<_Tp*>; |
130
|
|
|
|
|
|
|
|
131
|
|
|
|
|
|
|
/* The target's "set" value for test-and-set may not be exactly 1. */ |
132
|
|
|
|
|
|
|
#if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1 |
133
|
|
|
|
|
|
|
typedef bool __atomic_flag_data_type; |
134
|
|
|
|
|
|
|
#else |
135
|
|
|
|
|
|
|
typedef unsigned char __atomic_flag_data_type; |
136
|
|
|
|
|
|
|
#endif |
137
|
|
|
|
|
|
|
|
138
|
|
|
|
|
|
|
/** |
139
|
|
|
|
|
|
|
* @brief Base type for atomic_flag. |
140
|
|
|
|
|
|
|
* |
141
|
|
|
|
|
|
|
* Base type is POD with data, allowing atomic_flag to derive from |
142
|
|
|
|
|
|
|
* it and meet the standard layout type requirement. In addition to |
143
|
|
|
|
|
|
|
* compatibility with a C interface, this allows different |
144
|
|
|
|
|
|
|
* implementations of atomic_flag to use the same atomic operation |
145
|
|
|
|
|
|
|
* functions, via a standard conversion to the __atomic_flag_base |
146
|
|
|
|
|
|
|
* argument. |
147
|
|
|
|
|
|
|
*/ |
148
|
|
|
|
|
|
|
_GLIBCXX_BEGIN_EXTERN_C |
149
|
|
|
|
|
|
|
|
150
|
|
|
|
|
|
|
struct __atomic_flag_base |
151
|
|
|
|
|
|
|
{ |
152
|
|
|
|
|
|
|
__atomic_flag_data_type _M_i; |
153
|
|
|
|
|
|
|
}; |
154
|
|
|
|
|
|
|
|
155
|
|
|
|
|
|
|
_GLIBCXX_END_EXTERN_C |
156
|
|
|
|
|
|
|
|
157
|
|
|
|
|
|
|
#define ATOMIC_FLAG_INIT { 0 } |
158
|
|
|
|
|
|
|
|
159
|
|
|
|
|
|
|
/// atomic_flag |
160
|
|
|
|
|
|
|
struct atomic_flag : public __atomic_flag_base |
161
|
|
|
|
|
|
|
{ |
162
|
|
|
|
|
|
|
atomic_flag() noexcept = default; |
163
|
|
|
|
|
|
|
~atomic_flag() noexcept = default; |
164
|
|
|
|
|
|
|
atomic_flag(const atomic_flag&) = delete; |
165
|
|
|
|
|
|
|
atomic_flag& operator=(const atomic_flag&) = delete; |
166
|
|
|
|
|
|
|
atomic_flag& operator=(const atomic_flag&) volatile = delete; |
167
|
|
|
|
|
|
|
|
168
|
|
|
|
|
|
|
// Conversion to ATOMIC_FLAG_INIT. |
169
|
|
|
|
|
|
|
constexpr atomic_flag(bool __i) noexcept |
170
|
3
|
|
|
|
|
|
: __atomic_flag_base{ _S_init(__i) } |
171
|
|
|
|
|
|
|
{ } |
172
|
|
|
|
|
|
|
|
173
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
174
|
|
|
|
|
|
|
test_and_set(memory_order __m = memory_order_seq_cst) noexcept |
175
|
|
|
|
|
|
|
{ |
176
|
19
|
|
|
|
|
|
return __atomic_test_and_set (&_M_i, __m); |
177
|
|
|
|
|
|
|
} |
178
|
|
|
|
|
|
|
|
179
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
180
|
|
|
|
|
|
|
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept |
181
|
|
|
|
|
|
|
{ |
182
|
|
|
|
|
|
|
return __atomic_test_and_set (&_M_i, __m); |
183
|
|
|
|
|
|
|
} |
184
|
|
|
|
|
|
|
|
185
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
186
|
|
|
|
|
|
|
clear(memory_order __m = memory_order_seq_cst) noexcept |
187
|
|
|
|
|
|
|
{ |
188
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
189
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_consume); |
190
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acquire); |
191
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
192
|
|
|
|
|
|
|
|
193
|
19
|
|
|
|
|
|
__atomic_clear (&_M_i, __m); |
194
|
|
|
|
|
|
|
} |
195
|
|
|
|
|
|
|
|
196
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
197
|
|
|
|
|
|
|
clear(memory_order __m = memory_order_seq_cst) volatile noexcept |
198
|
|
|
|
|
|
|
{ |
199
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
200
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_consume); |
201
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acquire); |
202
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
203
|
|
|
|
|
|
|
|
204
|
|
|
|
|
|
|
__atomic_clear (&_M_i, __m); |
205
|
|
|
|
|
|
|
} |
206
|
|
|
|
|
|
|
|
207
|
|
|
|
|
|
|
private: |
208
|
|
|
|
|
|
|
static constexpr __atomic_flag_data_type |
209
|
|
|
|
|
|
|
_S_init(bool __i) |
210
|
|
|
|
|
|
|
{ return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; } |
211
|
|
|
|
|
|
|
}; |
212
|
|
|
|
|
|
|
|
213
|
|
|
|
|
|
|
|
214
|
|
|
|
|
|
|
/// Base class for atomic integrals. |
215
|
|
|
|
|
|
|
// |
216
|
|
|
|
|
|
|
// For each of the integral types, define atomic_[integral type] struct |
217
|
|
|
|
|
|
|
// |
218
|
|
|
|
|
|
|
// atomic_bool bool |
219
|
|
|
|
|
|
|
// atomic_char char |
220
|
|
|
|
|
|
|
// atomic_schar signed char |
221
|
|
|
|
|
|
|
// atomic_uchar unsigned char |
222
|
|
|
|
|
|
|
// atomic_short short |
223
|
|
|
|
|
|
|
// atomic_ushort unsigned short |
224
|
|
|
|
|
|
|
// atomic_int int |
225
|
|
|
|
|
|
|
// atomic_uint unsigned int |
226
|
|
|
|
|
|
|
// atomic_long long |
227
|
|
|
|
|
|
|
// atomic_ulong unsigned long |
228
|
|
|
|
|
|
|
// atomic_llong long long |
229
|
|
|
|
|
|
|
// atomic_ullong unsigned long long |
230
|
|
|
|
|
|
|
// atomic_char16_t char16_t |
231
|
|
|
|
|
|
|
// atomic_char32_t char32_t |
232
|
|
|
|
|
|
|
// atomic_wchar_t wchar_t |
233
|
|
|
|
|
|
|
// |
234
|
|
|
|
|
|
|
// NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or |
235
|
|
|
|
|
|
|
// 8 bytes, since that is what GCC built-in functions for atomic |
236
|
|
|
|
|
|
|
// memory access expect. |
237
|
|
|
|
|
|
|
template |
238
|
|
|
|
|
|
|
struct __atomic_base |
239
|
|
|
|
|
|
|
{ |
240
|
|
|
|
|
|
|
private: |
241
|
|
|
|
|
|
|
typedef _ITp __int_type; |
242
|
|
|
|
|
|
|
|
243
|
|
|
|
|
|
|
static constexpr int _S_alignment = |
244
|
|
|
|
|
|
|
sizeof(_ITp) > alignof(_ITp) ? sizeof(_ITp) : alignof(_ITp); |
245
|
|
|
|
|
|
|
|
246
|
|
|
|
|
|
|
alignas(_S_alignment) __int_type _M_i; |
247
|
|
|
|
|
|
|
|
248
|
|
|
|
|
|
|
public: |
249
|
|
|
|
|
|
|
__atomic_base() noexcept = default; |
250
|
|
|
|
|
|
|
~__atomic_base() noexcept = default; |
251
|
|
|
|
|
|
|
__atomic_base(const __atomic_base&) = delete; |
252
|
|
|
|
|
|
|
__atomic_base& operator=(const __atomic_base&) = delete; |
253
|
|
|
|
|
|
|
__atomic_base& operator=(const __atomic_base&) volatile = delete; |
254
|
|
|
|
|
|
|
|
255
|
|
|
|
|
|
|
// Requires __int_type convertible to _M_i. |
256
|
|
|
|
|
|
|
constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { } |
257
|
|
|
|
|
|
|
|
258
|
|
|
|
|
|
|
operator __int_type() const noexcept |
259
|
|
|
|
|
|
|
{ return load(); } |
260
|
|
|
|
|
|
|
|
261
|
|
|
|
|
|
|
operator __int_type() const volatile noexcept |
262
|
|
|
|
|
|
|
{ return load(); } |
263
|
|
|
|
|
|
|
|
264
|
|
|
|
|
|
|
__int_type |
265
|
|
|
|
|
|
|
operator=(__int_type __i) noexcept |
266
|
|
|
|
|
|
|
{ |
267
|
|
|
|
|
|
|
store(__i); |
268
|
|
|
|
|
|
|
return __i; |
269
|
|
|
|
|
|
|
} |
270
|
|
|
|
|
|
|
|
271
|
|
|
|
|
|
|
__int_type |
272
|
|
|
|
|
|
|
operator=(__int_type __i) volatile noexcept |
273
|
|
|
|
|
|
|
{ |
274
|
|
|
|
|
|
|
store(__i); |
275
|
|
|
|
|
|
|
return __i; |
276
|
|
|
|
|
|
|
} |
277
|
|
|
|
|
|
|
|
278
|
|
|
|
|
|
|
__int_type |
279
|
|
|
|
|
|
|
operator++(int) noexcept |
280
|
|
|
|
|
|
|
{ return fetch_add(1); } |
281
|
|
|
|
|
|
|
|
282
|
|
|
|
|
|
|
__int_type |
283
|
|
|
|
|
|
|
operator++(int) volatile noexcept |
284
|
|
|
|
|
|
|
{ return fetch_add(1); } |
285
|
|
|
|
|
|
|
|
286
|
|
|
|
|
|
|
__int_type |
287
|
|
|
|
|
|
|
operator--(int) noexcept |
288
|
|
|
|
|
|
|
{ return fetch_sub(1); } |
289
|
|
|
|
|
|
|
|
290
|
|
|
|
|
|
|
__int_type |
291
|
|
|
|
|
|
|
operator--(int) volatile noexcept |
292
|
|
|
|
|
|
|
{ return fetch_sub(1); } |
293
|
|
|
|
|
|
|
|
294
|
|
|
|
|
|
|
__int_type |
295
|
|
|
|
|
|
|
operator++() noexcept |
296
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); } |
297
|
|
|
|
|
|
|
|
298
|
|
|
|
|
|
|
__int_type |
299
|
|
|
|
|
|
|
operator++() volatile noexcept |
300
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); } |
301
|
|
|
|
|
|
|
|
302
|
|
|
|
|
|
|
__int_type |
303
|
|
|
|
|
|
|
operator--() noexcept |
304
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); } |
305
|
|
|
|
|
|
|
|
306
|
|
|
|
|
|
|
__int_type |
307
|
|
|
|
|
|
|
operator--() volatile noexcept |
308
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); } |
309
|
|
|
|
|
|
|
|
310
|
|
|
|
|
|
|
__int_type |
311
|
|
|
|
|
|
|
operator+=(__int_type __i) noexcept |
312
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); } |
313
|
|
|
|
|
|
|
|
314
|
|
|
|
|
|
|
__int_type |
315
|
|
|
|
|
|
|
operator+=(__int_type __i) volatile noexcept |
316
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); } |
317
|
|
|
|
|
|
|
|
318
|
|
|
|
|
|
|
__int_type |
319
|
|
|
|
|
|
|
operator-=(__int_type __i) noexcept |
320
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); } |
321
|
|
|
|
|
|
|
|
322
|
|
|
|
|
|
|
__int_type |
323
|
|
|
|
|
|
|
operator-=(__int_type __i) volatile noexcept |
324
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); } |
325
|
|
|
|
|
|
|
|
326
|
|
|
|
|
|
|
__int_type |
327
|
|
|
|
|
|
|
operator&=(__int_type __i) noexcept |
328
|
|
|
|
|
|
|
{ return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); } |
329
|
|
|
|
|
|
|
|
330
|
|
|
|
|
|
|
__int_type |
331
|
|
|
|
|
|
|
operator&=(__int_type __i) volatile noexcept |
332
|
|
|
|
|
|
|
{ return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); } |
333
|
|
|
|
|
|
|
|
334
|
|
|
|
|
|
|
__int_type |
335
|
|
|
|
|
|
|
operator|=(__int_type __i) noexcept |
336
|
|
|
|
|
|
|
{ return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); } |
337
|
|
|
|
|
|
|
|
338
|
|
|
|
|
|
|
__int_type |
339
|
|
|
|
|
|
|
operator|=(__int_type __i) volatile noexcept |
340
|
|
|
|
|
|
|
{ return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); } |
341
|
|
|
|
|
|
|
|
342
|
|
|
|
|
|
|
__int_type |
343
|
|
|
|
|
|
|
operator^=(__int_type __i) noexcept |
344
|
|
|
|
|
|
|
{ return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); } |
345
|
|
|
|
|
|
|
|
346
|
|
|
|
|
|
|
__int_type |
347
|
|
|
|
|
|
|
operator^=(__int_type __i) volatile noexcept |
348
|
|
|
|
|
|
|
{ return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); } |
349
|
|
|
|
|
|
|
|
350
|
|
|
|
|
|
|
bool |
351
|
|
|
|
|
|
|
is_lock_free() const noexcept |
352
|
|
|
|
|
|
|
{ |
353
|
|
|
|
|
|
|
// Use a fake, minimally aligned pointer. |
354
|
|
|
|
|
|
|
return __atomic_is_lock_free(sizeof(_M_i), |
355
|
|
|
|
|
|
|
reinterpret_cast(-__alignof(_M_i))); |
356
|
|
|
|
|
|
|
} |
357
|
|
|
|
|
|
|
|
358
|
|
|
|
|
|
|
bool |
359
|
|
|
|
|
|
|
is_lock_free() const volatile noexcept |
360
|
|
|
|
|
|
|
{ |
361
|
|
|
|
|
|
|
// Use a fake, minimally aligned pointer. |
362
|
|
|
|
|
|
|
return __atomic_is_lock_free(sizeof(_M_i), |
363
|
|
|
|
|
|
|
reinterpret_cast(-__alignof(_M_i))); |
364
|
|
|
|
|
|
|
} |
365
|
|
|
|
|
|
|
|
366
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
367
|
|
|
|
|
|
|
store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept |
368
|
|
|
|
|
|
|
{ |
369
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
370
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acquire); |
371
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
372
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_consume); |
373
|
|
|
|
|
|
|
|
374
|
|
|
|
|
|
|
__atomic_store_n(&_M_i, __i, __m); |
375
|
|
|
|
|
|
|
} |
376
|
|
|
|
|
|
|
|
377
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
378
|
|
|
|
|
|
|
store(__int_type __i, |
379
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
380
|
|
|
|
|
|
|
{ |
381
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
382
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acquire); |
383
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
384
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_consume); |
385
|
|
|
|
|
|
|
|
386
|
|
|
|
|
|
|
__atomic_store_n(&_M_i, __i, __m); |
387
|
|
|
|
|
|
|
} |
388
|
|
|
|
|
|
|
|
389
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
390
|
|
|
|
|
|
|
load(memory_order __m = memory_order_seq_cst) const noexcept |
391
|
|
|
|
|
|
|
{ |
392
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
393
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_release); |
394
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
395
|
|
|
|
|
|
|
|
396
|
|
|
|
|
|
|
return __atomic_load_n(&_M_i, __m); |
397
|
|
|
|
|
|
|
} |
398
|
|
|
|
|
|
|
|
399
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
400
|
|
|
|
|
|
|
load(memory_order __m = memory_order_seq_cst) const volatile noexcept |
401
|
|
|
|
|
|
|
{ |
402
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
403
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_release); |
404
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
405
|
|
|
|
|
|
|
|
406
|
|
|
|
|
|
|
return __atomic_load_n(&_M_i, __m); |
407
|
|
|
|
|
|
|
} |
408
|
|
|
|
|
|
|
|
409
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
410
|
|
|
|
|
|
|
exchange(__int_type __i, |
411
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
412
|
|
|
|
|
|
|
{ |
413
|
|
|
|
|
|
|
return __atomic_exchange_n(&_M_i, __i, __m); |
414
|
|
|
|
|
|
|
} |
415
|
|
|
|
|
|
|
|
416
|
|
|
|
|
|
|
|
417
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
418
|
|
|
|
|
|
|
exchange(__int_type __i, |
419
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
420
|
|
|
|
|
|
|
{ |
421
|
|
|
|
|
|
|
return __atomic_exchange_n(&_M_i, __i, __m); |
422
|
|
|
|
|
|
|
} |
423
|
|
|
|
|
|
|
|
424
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
425
|
|
|
|
|
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2, |
426
|
|
|
|
|
|
|
memory_order __m1, memory_order __m2) noexcept |
427
|
|
|
|
|
|
|
{ |
428
|
|
|
|
|
|
|
memory_order __b2 = __m2 & __memory_order_mask; |
429
|
|
|
|
|
|
|
memory_order __b1 = __m1 & __memory_order_mask; |
430
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_release); |
431
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_acq_rel); |
432
|
|
|
|
|
|
|
__glibcxx_assert(__b2 <= __b1); |
433
|
|
|
|
|
|
|
|
434
|
|
|
|
|
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2); |
435
|
|
|
|
|
|
|
} |
436
|
|
|
|
|
|
|
|
437
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
438
|
|
|
|
|
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2, |
439
|
|
|
|
|
|
|
memory_order __m1, |
440
|
|
|
|
|
|
|
memory_order __m2) volatile noexcept |
441
|
|
|
|
|
|
|
{ |
442
|
|
|
|
|
|
|
memory_order __b2 = __m2 & __memory_order_mask; |
443
|
|
|
|
|
|
|
memory_order __b1 = __m1 & __memory_order_mask; |
444
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_release); |
445
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_acq_rel); |
446
|
|
|
|
|
|
|
__glibcxx_assert(__b2 <= __b1); |
447
|
|
|
|
|
|
|
|
448
|
|
|
|
|
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2); |
449
|
|
|
|
|
|
|
} |
450
|
|
|
|
|
|
|
|
451
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
452
|
|
|
|
|
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2, |
453
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
454
|
|
|
|
|
|
|
{ |
455
|
|
|
|
|
|
|
return compare_exchange_weak(__i1, __i2, __m, |
456
|
|
|
|
|
|
|
__cmpexch_failure_order(__m)); |
457
|
|
|
|
|
|
|
} |
458
|
|
|
|
|
|
|
|
459
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
460
|
|
|
|
|
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2, |
461
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
462
|
|
|
|
|
|
|
{ |
463
|
|
|
|
|
|
|
return compare_exchange_weak(__i1, __i2, __m, |
464
|
|
|
|
|
|
|
__cmpexch_failure_order(__m)); |
465
|
|
|
|
|
|
|
} |
466
|
|
|
|
|
|
|
|
467
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
468
|
|
|
|
|
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2, |
469
|
|
|
|
|
|
|
memory_order __m1, memory_order __m2) noexcept |
470
|
|
|
|
|
|
|
{ |
471
|
|
|
|
|
|
|
memory_order __b2 = __m2 & __memory_order_mask; |
472
|
|
|
|
|
|
|
memory_order __b1 = __m1 & __memory_order_mask; |
473
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_release); |
474
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_acq_rel); |
475
|
|
|
|
|
|
|
__glibcxx_assert(__b2 <= __b1); |
476
|
|
|
|
|
|
|
|
477
|
|
|
|
|
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2); |
478
|
|
|
|
|
|
|
} |
479
|
|
|
|
|
|
|
|
480
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
481
|
|
|
|
|
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2, |
482
|
|
|
|
|
|
|
memory_order __m1, |
483
|
|
|
|
|
|
|
memory_order __m2) volatile noexcept |
484
|
|
|
|
|
|
|
{ |
485
|
|
|
|
|
|
|
memory_order __b2 = __m2 & __memory_order_mask; |
486
|
|
|
|
|
|
|
memory_order __b1 = __m1 & __memory_order_mask; |
487
|
|
|
|
|
|
|
|
488
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_release); |
489
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_acq_rel); |
490
|
|
|
|
|
|
|
__glibcxx_assert(__b2 <= __b1); |
491
|
|
|
|
|
|
|
|
492
|
|
|
|
|
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2); |
493
|
|
|
|
|
|
|
} |
494
|
|
|
|
|
|
|
|
495
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
496
|
|
|
|
|
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2, |
497
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
498
|
|
|
|
|
|
|
{ |
499
|
|
|
|
|
|
|
return compare_exchange_strong(__i1, __i2, __m, |
500
|
|
|
|
|
|
|
__cmpexch_failure_order(__m)); |
501
|
|
|
|
|
|
|
} |
502
|
|
|
|
|
|
|
|
503
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
504
|
|
|
|
|
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2, |
505
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
506
|
|
|
|
|
|
|
{ |
507
|
|
|
|
|
|
|
return compare_exchange_strong(__i1, __i2, __m, |
508
|
|
|
|
|
|
|
__cmpexch_failure_order(__m)); |
509
|
|
|
|
|
|
|
} |
510
|
|
|
|
|
|
|
|
511
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
512
|
|
|
|
|
|
|
fetch_add(__int_type __i, |
513
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
514
|
|
|
|
|
|
|
{ return __atomic_fetch_add(&_M_i, __i, __m); } |
515
|
|
|
|
|
|
|
|
516
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
517
|
|
|
|
|
|
|
fetch_add(__int_type __i, |
518
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
519
|
|
|
|
|
|
|
{ return __atomic_fetch_add(&_M_i, __i, __m); } |
520
|
|
|
|
|
|
|
|
521
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
522
|
|
|
|
|
|
|
fetch_sub(__int_type __i, |
523
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
524
|
|
|
|
|
|
|
{ return __atomic_fetch_sub(&_M_i, __i, __m); } |
525
|
|
|
|
|
|
|
|
526
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
527
|
|
|
|
|
|
|
fetch_sub(__int_type __i, |
528
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
529
|
|
|
|
|
|
|
{ return __atomic_fetch_sub(&_M_i, __i, __m); } |
530
|
|
|
|
|
|
|
|
531
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
532
|
|
|
|
|
|
|
fetch_and(__int_type __i, |
533
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
534
|
|
|
|
|
|
|
{ return __atomic_fetch_and(&_M_i, __i, __m); } |
535
|
|
|
|
|
|
|
|
536
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
537
|
|
|
|
|
|
|
fetch_and(__int_type __i, |
538
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
539
|
|
|
|
|
|
|
{ return __atomic_fetch_and(&_M_i, __i, __m); } |
540
|
|
|
|
|
|
|
|
541
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
542
|
|
|
|
|
|
|
fetch_or(__int_type __i, |
543
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
544
|
|
|
|
|
|
|
{ return __atomic_fetch_or(&_M_i, __i, __m); } |
545
|
|
|
|
|
|
|
|
546
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
547
|
|
|
|
|
|
|
fetch_or(__int_type __i, |
548
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
549
|
|
|
|
|
|
|
{ return __atomic_fetch_or(&_M_i, __i, __m); } |
550
|
|
|
|
|
|
|
|
551
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
552
|
|
|
|
|
|
|
fetch_xor(__int_type __i, |
553
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
554
|
|
|
|
|
|
|
{ return __atomic_fetch_xor(&_M_i, __i, __m); } |
555
|
|
|
|
|
|
|
|
556
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __int_type |
557
|
|
|
|
|
|
|
fetch_xor(__int_type __i, |
558
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
559
|
|
|
|
|
|
|
{ return __atomic_fetch_xor(&_M_i, __i, __m); } |
560
|
|
|
|
|
|
|
}; |
561
|
|
|
|
|
|
|
|
562
|
|
|
|
|
|
|
|
563
|
|
|
|
|
|
|
/// Partial specialization for pointer types. |
564
|
|
|
|
|
|
|
template |
565
|
|
|
|
|
|
|
struct __atomic_base<_PTp*> |
566
|
|
|
|
|
|
|
{ |
567
|
|
|
|
|
|
|
private: |
568
|
|
|
|
|
|
|
typedef _PTp* __pointer_type; |
569
|
|
|
|
|
|
|
|
570
|
|
|
|
|
|
|
__pointer_type _M_p; |
571
|
|
|
|
|
|
|
|
572
|
|
|
|
|
|
|
// Factored out to facilitate explicit specialization. |
573
|
|
|
|
|
|
|
constexpr ptrdiff_t |
574
|
|
|
|
|
|
|
_M_type_size(ptrdiff_t __d) const { return __d * sizeof(_PTp); } |
575
|
|
|
|
|
|
|
|
576
|
|
|
|
|
|
|
constexpr ptrdiff_t |
577
|
|
|
|
|
|
|
_M_type_size(ptrdiff_t __d) const volatile { return __d * sizeof(_PTp); } |
578
|
|
|
|
|
|
|
|
579
|
|
|
|
|
|
|
public: |
580
|
|
|
|
|
|
|
__atomic_base() noexcept = default; |
581
|
|
|
|
|
|
|
~__atomic_base() noexcept = default; |
582
|
|
|
|
|
|
|
__atomic_base(const __atomic_base&) = delete; |
583
|
|
|
|
|
|
|
__atomic_base& operator=(const __atomic_base&) = delete; |
584
|
|
|
|
|
|
|
__atomic_base& operator=(const __atomic_base&) volatile = delete; |
585
|
|
|
|
|
|
|
|
586
|
|
|
|
|
|
|
// Requires __pointer_type convertible to _M_p. |
587
|
|
|
|
|
|
|
constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { } |
588
|
|
|
|
|
|
|
|
589
|
|
|
|
|
|
|
operator __pointer_type() const noexcept |
590
|
|
|
|
|
|
|
{ return load(); } |
591
|
|
|
|
|
|
|
|
592
|
|
|
|
|
|
|
operator __pointer_type() const volatile noexcept |
593
|
|
|
|
|
|
|
{ return load(); } |
594
|
|
|
|
|
|
|
|
595
|
|
|
|
|
|
|
__pointer_type |
596
|
|
|
|
|
|
|
operator=(__pointer_type __p) noexcept |
597
|
|
|
|
|
|
|
{ |
598
|
|
|
|
|
|
|
store(__p); |
599
|
|
|
|
|
|
|
return __p; |
600
|
|
|
|
|
|
|
} |
601
|
|
|
|
|
|
|
|
602
|
|
|
|
|
|
|
__pointer_type |
603
|
|
|
|
|
|
|
operator=(__pointer_type __p) volatile noexcept |
604
|
|
|
|
|
|
|
{ |
605
|
|
|
|
|
|
|
store(__p); |
606
|
|
|
|
|
|
|
return __p; |
607
|
|
|
|
|
|
|
} |
608
|
|
|
|
|
|
|
|
609
|
|
|
|
|
|
|
__pointer_type |
610
|
|
|
|
|
|
|
operator++(int) noexcept |
611
|
|
|
|
|
|
|
{ return fetch_add(1); } |
612
|
|
|
|
|
|
|
|
613
|
|
|
|
|
|
|
__pointer_type |
614
|
|
|
|
|
|
|
operator++(int) volatile noexcept |
615
|
|
|
|
|
|
|
{ return fetch_add(1); } |
616
|
|
|
|
|
|
|
|
617
|
|
|
|
|
|
|
__pointer_type |
618
|
|
|
|
|
|
|
operator--(int) noexcept |
619
|
|
|
|
|
|
|
{ return fetch_sub(1); } |
620
|
|
|
|
|
|
|
|
621
|
|
|
|
|
|
|
__pointer_type |
622
|
|
|
|
|
|
|
operator--(int) volatile noexcept |
623
|
|
|
|
|
|
|
{ return fetch_sub(1); } |
624
|
|
|
|
|
|
|
|
625
|
|
|
|
|
|
|
__pointer_type |
626
|
|
|
|
|
|
|
operator++() noexcept |
627
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_p, _M_type_size(1), |
628
|
|
|
|
|
|
|
memory_order_seq_cst); } |
629
|
|
|
|
|
|
|
|
630
|
|
|
|
|
|
|
__pointer_type |
631
|
|
|
|
|
|
|
operator++() volatile noexcept |
632
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_p, _M_type_size(1), |
633
|
|
|
|
|
|
|
memory_order_seq_cst); } |
634
|
|
|
|
|
|
|
|
635
|
|
|
|
|
|
|
__pointer_type |
636
|
|
|
|
|
|
|
operator--() noexcept |
637
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_p, _M_type_size(1), |
638
|
|
|
|
|
|
|
memory_order_seq_cst); } |
639
|
|
|
|
|
|
|
|
640
|
|
|
|
|
|
|
__pointer_type |
641
|
|
|
|
|
|
|
operator--() volatile noexcept |
642
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_p, _M_type_size(1), |
643
|
|
|
|
|
|
|
memory_order_seq_cst); } |
644
|
|
|
|
|
|
|
|
645
|
|
|
|
|
|
|
__pointer_type |
646
|
|
|
|
|
|
|
operator+=(ptrdiff_t __d) noexcept |
647
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_p, _M_type_size(__d), |
648
|
|
|
|
|
|
|
memory_order_seq_cst); } |
649
|
|
|
|
|
|
|
|
650
|
|
|
|
|
|
|
__pointer_type |
651
|
|
|
|
|
|
|
operator+=(ptrdiff_t __d) volatile noexcept |
652
|
|
|
|
|
|
|
{ return __atomic_add_fetch(&_M_p, _M_type_size(__d), |
653
|
|
|
|
|
|
|
memory_order_seq_cst); } |
654
|
|
|
|
|
|
|
|
655
|
|
|
|
|
|
|
__pointer_type |
656
|
|
|
|
|
|
|
operator-=(ptrdiff_t __d) noexcept |
657
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_p, _M_type_size(__d), |
658
|
|
|
|
|
|
|
memory_order_seq_cst); } |
659
|
|
|
|
|
|
|
|
660
|
|
|
|
|
|
|
__pointer_type |
661
|
|
|
|
|
|
|
operator-=(ptrdiff_t __d) volatile noexcept |
662
|
|
|
|
|
|
|
{ return __atomic_sub_fetch(&_M_p, _M_type_size(__d), |
663
|
|
|
|
|
|
|
memory_order_seq_cst); } |
664
|
|
|
|
|
|
|
|
665
|
|
|
|
|
|
|
bool |
666
|
|
|
|
|
|
|
is_lock_free() const noexcept |
667
|
|
|
|
|
|
|
{ |
668
|
|
|
|
|
|
|
// Produce a fake, minimally aligned pointer. |
669
|
|
|
|
|
|
|
return __atomic_is_lock_free(sizeof(_M_p), |
670
|
|
|
|
|
|
|
reinterpret_cast(-__alignof(_M_p))); |
671
|
|
|
|
|
|
|
} |
672
|
|
|
|
|
|
|
|
673
|
|
|
|
|
|
|
bool |
674
|
|
|
|
|
|
|
is_lock_free() const volatile noexcept |
675
|
|
|
|
|
|
|
{ |
676
|
|
|
|
|
|
|
// Produce a fake, minimally aligned pointer. |
677
|
|
|
|
|
|
|
return __atomic_is_lock_free(sizeof(_M_p), |
678
|
|
|
|
|
|
|
reinterpret_cast(-__alignof(_M_p))); |
679
|
|
|
|
|
|
|
} |
680
|
|
|
|
|
|
|
|
681
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
682
|
|
|
|
|
|
|
store(__pointer_type __p, |
683
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
684
|
|
|
|
|
|
|
{ |
685
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
686
|
|
|
|
|
|
|
|
687
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acquire); |
688
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
689
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_consume); |
690
|
|
|
|
|
|
|
|
691
|
|
|
|
|
|
|
__atomic_store_n(&_M_p, __p, __m); |
692
|
|
|
|
|
|
|
} |
693
|
|
|
|
|
|
|
|
694
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE void |
695
|
|
|
|
|
|
|
store(__pointer_type __p, |
696
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
697
|
|
|
|
|
|
|
{ |
698
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
699
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acquire); |
700
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
701
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_consume); |
702
|
|
|
|
|
|
|
|
703
|
|
|
|
|
|
|
__atomic_store_n(&_M_p, __p, __m); |
704
|
|
|
|
|
|
|
} |
705
|
|
|
|
|
|
|
|
706
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
707
|
|
|
|
|
|
|
load(memory_order __m = memory_order_seq_cst) const noexcept |
708
|
|
|
|
|
|
|
{ |
709
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
710
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_release); |
711
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
712
|
|
|
|
|
|
|
|
713
|
|
|
|
|
|
|
return __atomic_load_n(&_M_p, __m); |
714
|
|
|
|
|
|
|
} |
715
|
|
|
|
|
|
|
|
716
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
717
|
|
|
|
|
|
|
load(memory_order __m = memory_order_seq_cst) const volatile noexcept |
718
|
|
|
|
|
|
|
{ |
719
|
|
|
|
|
|
|
memory_order __b = __m & __memory_order_mask; |
720
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_release); |
721
|
|
|
|
|
|
|
__glibcxx_assert(__b != memory_order_acq_rel); |
722
|
|
|
|
|
|
|
|
723
|
|
|
|
|
|
|
return __atomic_load_n(&_M_p, __m); |
724
|
|
|
|
|
|
|
} |
725
|
|
|
|
|
|
|
|
726
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
727
|
|
|
|
|
|
|
exchange(__pointer_type __p, |
728
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
729
|
|
|
|
|
|
|
{ |
730
|
|
|
|
|
|
|
return __atomic_exchange_n(&_M_p, __p, __m); |
731
|
|
|
|
|
|
|
} |
732
|
|
|
|
|
|
|
|
733
|
|
|
|
|
|
|
|
734
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
735
|
|
|
|
|
|
|
exchange(__pointer_type __p, |
736
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
737
|
|
|
|
|
|
|
{ |
738
|
|
|
|
|
|
|
return __atomic_exchange_n(&_M_p, __p, __m); |
739
|
|
|
|
|
|
|
} |
740
|
|
|
|
|
|
|
|
741
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
742
|
|
|
|
|
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, |
743
|
|
|
|
|
|
|
memory_order __m1, |
744
|
|
|
|
|
|
|
memory_order __m2) noexcept |
745
|
|
|
|
|
|
|
{ |
746
|
|
|
|
|
|
|
memory_order __b2 = __m2 & __memory_order_mask; |
747
|
|
|
|
|
|
|
memory_order __b1 = __m1 & __memory_order_mask; |
748
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_release); |
749
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_acq_rel); |
750
|
|
|
|
|
|
|
__glibcxx_assert(__b2 <= __b1); |
751
|
|
|
|
|
|
|
|
752
|
|
|
|
|
|
|
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2); |
753
|
|
|
|
|
|
|
} |
754
|
|
|
|
|
|
|
|
755
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE bool |
756
|
|
|
|
|
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, |
757
|
|
|
|
|
|
|
memory_order __m1, |
758
|
|
|
|
|
|
|
memory_order __m2) volatile noexcept |
759
|
|
|
|
|
|
|
{ |
760
|
|
|
|
|
|
|
memory_order __b2 = __m2 & __memory_order_mask; |
761
|
|
|
|
|
|
|
memory_order __b1 = __m1 & __memory_order_mask; |
762
|
|
|
|
|
|
|
|
763
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_release); |
764
|
|
|
|
|
|
|
__glibcxx_assert(__b2 != memory_order_acq_rel); |
765
|
|
|
|
|
|
|
__glibcxx_assert(__b2 <= __b1); |
766
|
|
|
|
|
|
|
|
767
|
|
|
|
|
|
|
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2); |
768
|
|
|
|
|
|
|
} |
769
|
|
|
|
|
|
|
|
770
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
771
|
|
|
|
|
|
|
fetch_add(ptrdiff_t __d, |
772
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
773
|
|
|
|
|
|
|
{ return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); } |
774
|
|
|
|
|
|
|
|
775
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
776
|
|
|
|
|
|
|
fetch_add(ptrdiff_t __d, |
777
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
778
|
|
|
|
|
|
|
{ return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); } |
779
|
|
|
|
|
|
|
|
780
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
781
|
|
|
|
|
|
|
fetch_sub(ptrdiff_t __d, |
782
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) noexcept |
783
|
|
|
|
|
|
|
{ return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); } |
784
|
|
|
|
|
|
|
|
785
|
|
|
|
|
|
|
_GLIBCXX_ALWAYS_INLINE __pointer_type |
786
|
|
|
|
|
|
|
fetch_sub(ptrdiff_t __d, |
787
|
|
|
|
|
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept |
788
|
|
|
|
|
|
|
{ return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); } |
789
|
|
|
|
|
|
|
}; |
790
|
|
|
|
|
|
|
|
791
|
|
|
|
|
|
|
// @} group atomics |
792
|
|
|
|
|
|
|
|
793
|
|
|
|
|
|
|
_GLIBCXX_END_NAMESPACE_VERSION |
794
|
|
|
|
|
|
|
} // namespace std |
795
|
|
|
|
|
|
|
|
796
|
|
|
|
|
|
|
#endif |