00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031 #ifndef _GLIBCXX_ATOMIC_0_H
00032 #define _GLIBCXX_ATOMIC_0_H 1
00033
00034 #pragma GCC system_header
00035
00036 namespace std _GLIBCXX_VISIBILITY(default)
00037 {
00038 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00039
00040
00041 namespace __atomic0
00042 {
00043 _GLIBCXX_BEGIN_EXTERN_C
00044
00045 void
00046 atomic_flag_clear_explicit(__atomic_flag_base*, memory_order)
00047 _GLIBCXX_NOTHROW;
00048
00049 void
00050 __atomic_flag_wait_explicit(__atomic_flag_base*, memory_order)
00051 _GLIBCXX_NOTHROW;
00052
00053 _GLIBCXX_CONST __atomic_flag_base*
00054 __atomic_flag_for_address(const volatile void* __z) _GLIBCXX_NOTHROW;
00055
00056 _GLIBCXX_END_EXTERN_C
00057
00058
00059 #define _ATOMIC_MEMBER_ _M_i
00060
00061
00062 #define _ATOMIC_LOAD_(__a, __x) \
00063 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00064 __i_type* __p = &_ATOMIC_MEMBER_; \
00065 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00066 __atomic_flag_wait_explicit(__g, __x); \
00067 __i_type __r = *__p; \
00068 atomic_flag_clear_explicit(__g, __x); \
00069 __r; })
00070
00071 #define _ATOMIC_STORE_(__a, __n, __x) \
00072 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00073 __i_type* __p = &_ATOMIC_MEMBER_; \
00074 __typeof__(__n) __w = (__n); \
00075 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00076 __atomic_flag_wait_explicit(__g, __x); \
00077 *__p = __w; \
00078 atomic_flag_clear_explicit(__g, __x); \
00079 __w; })
00080
00081 #define _ATOMIC_MODIFY_(__a, __o, __n, __x) \
00082 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00083 __i_type* __p = &_ATOMIC_MEMBER_; \
00084 __typeof__(__n) __w = (__n); \
00085 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00086 __atomic_flag_wait_explicit(__g, __x); \
00087 __i_type __r = *__p; \
00088 *__p __o __w; \
00089 atomic_flag_clear_explicit(__g, __x); \
00090 __r; })
00091
00092 #define _ATOMIC_CMPEXCHNG_(__a, __e, __n, __x) \
00093 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00094 __i_type* __p = &_ATOMIC_MEMBER_; \
00095 __typeof__(__e) __q = (__e); \
00096 __typeof__(__n) __w = (__n); \
00097 bool __r; \
00098 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00099 __atomic_flag_wait_explicit(__g, __x); \
00100 __i_type __t = *__p; \
00101 if (*__q == __t) \
00102 { \
00103 *__p = (__i_type)__w; \
00104 __r = true; \
00105 } \
00106 else { *__q = __t; __r = false; } \
00107 atomic_flag_clear_explicit(__g, __x); \
00108 __r; })
00109
00110
00111
00112 struct atomic_flag : public __atomic_flag_base
00113 {
00114 atomic_flag() = default;
00115 ~atomic_flag() = default;
00116 atomic_flag(const atomic_flag&) = delete;
00117 atomic_flag& operator=(const atomic_flag&) = delete;
00118 atomic_flag& operator=(const atomic_flag&) volatile = delete;
00119
00120
00121 atomic_flag(bool __i): __atomic_flag_base({ __i }) { }
00122
00123 bool
00124 test_and_set(memory_order __m = memory_order_seq_cst);
00125
00126 bool
00127 test_and_set(memory_order __m = memory_order_seq_cst) volatile;
00128
00129 void
00130 clear(memory_order __m = memory_order_seq_cst);
00131
00132 void
00133 clear(memory_order __m = memory_order_seq_cst) volatile;
00134 };
00135
00136
00137
00138
00139
00140
00141
00142
00143
00144
00145
00146
00147
00148
00149
00150
00151
00152
00153
00154
00155
00156
00157
00158
00159
00160 template<typename _ITp>
00161 struct __atomic_base
00162 {
00163 private:
00164 typedef _ITp __int_type;
00165
00166 __int_type _M_i;
00167
00168 public:
00169 __atomic_base() = default;
00170 ~__atomic_base() = default;
00171 __atomic_base(const __atomic_base&) = delete;
00172 __atomic_base& operator=(const __atomic_base&) = delete;
00173 __atomic_base& operator=(const __atomic_base&) volatile = delete;
00174
00175
00176 constexpr __atomic_base(__int_type __i): _M_i (__i) { }
00177
00178 operator __int_type() const
00179 { return load(); }
00180
00181 operator __int_type() const volatile
00182 { return load(); }
00183
00184 __int_type
00185 operator=(__int_type __i)
00186 {
00187 store(__i);
00188 return __i;
00189 }
00190
00191 __int_type
00192 operator=(__int_type __i) volatile
00193 {
00194 store(__i);
00195 return __i;
00196 }
00197
00198 __int_type
00199 operator++(int)
00200 { return fetch_add(1); }
00201
00202 __int_type
00203 operator++(int) volatile
00204 { return fetch_add(1); }
00205
00206 __int_type
00207 operator--(int)
00208 { return fetch_sub(1); }
00209
00210 __int_type
00211 operator--(int) volatile
00212 { return fetch_sub(1); }
00213
00214 __int_type
00215 operator++()
00216 { return fetch_add(1) + 1; }
00217
00218 __int_type
00219 operator++() volatile
00220 { return fetch_add(1) + 1; }
00221
00222 __int_type
00223 operator--()
00224 { return fetch_sub(1) - 1; }
00225
00226 __int_type
00227 operator--() volatile
00228 { return fetch_sub(1) - 1; }
00229
00230 __int_type
00231 operator+=(__int_type __i)
00232 { return fetch_add(__i) + __i; }
00233
00234 __int_type
00235 operator+=(__int_type __i) volatile
00236 { return fetch_add(__i) + __i; }
00237
00238 __int_type
00239 operator-=(__int_type __i)
00240 { return fetch_sub(__i) - __i; }
00241
00242 __int_type
00243 operator-=(__int_type __i) volatile
00244 { return fetch_sub(__i) - __i; }
00245
00246 __int_type
00247 operator&=(__int_type __i)
00248 { return fetch_and(__i) & __i; }
00249
00250 __int_type
00251 operator&=(__int_type __i) volatile
00252 { return fetch_and(__i) & __i; }
00253
00254 __int_type
00255 operator|=(__int_type __i)
00256 { return fetch_or(__i) | __i; }
00257
00258 __int_type
00259 operator|=(__int_type __i) volatile
00260 { return fetch_or(__i) | __i; }
00261
00262 __int_type
00263 operator^=(__int_type __i)
00264 { return fetch_xor(__i) ^ __i; }
00265
00266 __int_type
00267 operator^=(__int_type __i) volatile
00268 { return fetch_xor(__i) ^ __i; }
00269
00270 bool
00271 is_lock_free() const
00272 { return false; }
00273
00274 bool
00275 is_lock_free() const volatile
00276 { return false; }
00277
00278 void
00279 store(__int_type __i, memory_order __m = memory_order_seq_cst)
00280 {
00281 __glibcxx_assert(__m != memory_order_acquire);
00282 __glibcxx_assert(__m != memory_order_acq_rel);
00283 __glibcxx_assert(__m != memory_order_consume);
00284 _ATOMIC_STORE_(this, __i, __m);
00285 }
00286
00287 void
00288 store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00289 {
00290 __glibcxx_assert(__m != memory_order_acquire);
00291 __glibcxx_assert(__m != memory_order_acq_rel);
00292 __glibcxx_assert(__m != memory_order_consume);
00293 _ATOMIC_STORE_(this, __i, __m);
00294 }
00295
00296 __int_type
00297 load(memory_order __m = memory_order_seq_cst) const
00298 {
00299 __glibcxx_assert(__m != memory_order_release);
00300 __glibcxx_assert(__m != memory_order_acq_rel);
00301 return _ATOMIC_LOAD_(this, __m);
00302 }
00303
00304 __int_type
00305 load(memory_order __m = memory_order_seq_cst) const volatile
00306 {
00307 __glibcxx_assert(__m != memory_order_release);
00308 __glibcxx_assert(__m != memory_order_acq_rel);
00309 return _ATOMIC_LOAD_(this, __m);
00310 }
00311
00312 __int_type
00313 exchange(__int_type __i, memory_order __m = memory_order_seq_cst)
00314 { return _ATOMIC_MODIFY_(this, =, __i, __m); }
00315
00316 __int_type
00317 exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00318 { return _ATOMIC_MODIFY_(this, =, __i, __m); }
00319
00320 bool
00321 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00322 memory_order __m1, memory_order __m2)
00323 {
00324 __glibcxx_assert(__m2 != memory_order_release);
00325 __glibcxx_assert(__m2 != memory_order_acq_rel);
00326 __glibcxx_assert(__m2 <= __m1);
00327 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00328 }
00329
00330 bool
00331 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00332 memory_order __m1, memory_order __m2) volatile
00333 {
00334 __glibcxx_assert(__m2 != memory_order_release);
00335 __glibcxx_assert(__m2 != memory_order_acq_rel);
00336 __glibcxx_assert(__m2 <= __m1);
00337 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00338 }
00339
00340 bool
00341 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00342 memory_order __m = memory_order_seq_cst)
00343 {
00344 return compare_exchange_weak(__i1, __i2, __m,
00345 __calculate_memory_order(__m));
00346 }
00347
00348 bool
00349 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00350 memory_order __m = memory_order_seq_cst) volatile
00351 {
00352 return compare_exchange_weak(__i1, __i2, __m,
00353 __calculate_memory_order(__m));
00354 }
00355
00356 bool
00357 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00358 memory_order __m1, memory_order __m2)
00359 {
00360 __glibcxx_assert(__m2 != memory_order_release);
00361 __glibcxx_assert(__m2 != memory_order_acq_rel);
00362 __glibcxx_assert(__m2 <= __m1);
00363 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00364 }
00365
00366 bool
00367 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00368 memory_order __m1, memory_order __m2) volatile
00369 {
00370 __glibcxx_assert(__m2 != memory_order_release);
00371 __glibcxx_assert(__m2 != memory_order_acq_rel);
00372 __glibcxx_assert(__m2 <= __m1);
00373 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00374 }
00375
00376 bool
00377 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00378 memory_order __m = memory_order_seq_cst)
00379 {
00380 return compare_exchange_strong(__i1, __i2, __m,
00381 __calculate_memory_order(__m));
00382 }
00383
00384 bool
00385 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00386 memory_order __m = memory_order_seq_cst) volatile
00387 {
00388 return compare_exchange_strong(__i1, __i2, __m,
00389 __calculate_memory_order(__m));
00390 }
00391
00392 __int_type
00393 fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst)
00394 { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
00395
00396 __int_type
00397 fetch_add(__int_type __i,
00398 memory_order __m = memory_order_seq_cst) volatile
00399 { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
00400
00401 __int_type
00402 fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst)
00403 { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
00404
00405 __int_type
00406 fetch_sub(__int_type __i,
00407 memory_order __m = memory_order_seq_cst) volatile
00408 { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
00409
00410 __int_type
00411 fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst)
00412 { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
00413
00414 __int_type
00415 fetch_and(__int_type __i,
00416 memory_order __m = memory_order_seq_cst) volatile
00417 { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
00418
00419 __int_type
00420 fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst)
00421 { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
00422
00423 __int_type
00424 fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00425 { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
00426
00427 __int_type
00428 fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst)
00429 { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
00430
00431 __int_type
00432 fetch_xor(__int_type __i,
00433 memory_order __m = memory_order_seq_cst) volatile
00434 { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
00435 };
00436
00437
00438
00439 template<typename _PTp>
00440 struct __atomic_base<_PTp*>
00441 {
00442 private:
00443 typedef _PTp* __return_pointer_type;
00444 typedef void* __pointer_type;
00445 __pointer_type _M_i;
00446
00447 public:
00448 __atomic_base() = default;
00449 ~__atomic_base() = default;
00450 __atomic_base(const __atomic_base&) = delete;
00451 __atomic_base& operator=(const __atomic_base&) = delete;
00452 __atomic_base& operator=(const __atomic_base&) volatile = delete;
00453
00454
00455 constexpr __atomic_base(__return_pointer_type __p): _M_i (__p) { }
00456
00457 operator __return_pointer_type() const
00458 { return reinterpret_cast<__return_pointer_type>(load()); }
00459
00460 operator __return_pointer_type() const volatile
00461 { return reinterpret_cast<__return_pointer_type>(load()); }
00462
00463 __return_pointer_type
00464 operator=(__pointer_type __p)
00465 {
00466 store(__p);
00467 return reinterpret_cast<__return_pointer_type>(__p);
00468 }
00469
00470 __return_pointer_type
00471 operator=(__pointer_type __p) volatile
00472 {
00473 store(__p);
00474 return reinterpret_cast<__return_pointer_type>(__p);
00475 }
00476
00477 __return_pointer_type
00478 operator++(int)
00479 { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
00480
00481 __return_pointer_type
00482 operator++(int) volatile
00483 { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
00484
00485 __return_pointer_type
00486 operator--(int)
00487 { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
00488
00489 __return_pointer_type
00490 operator--(int) volatile
00491 { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
00492
00493 __return_pointer_type
00494 operator++()
00495 { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
00496
00497 __return_pointer_type
00498 operator++() volatile
00499 { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
00500
00501 __return_pointer_type
00502 operator--()
00503 { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
00504
00505 __return_pointer_type
00506 operator--() volatile
00507 { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
00508
00509 __return_pointer_type
00510 operator+=(ptrdiff_t __d)
00511 { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
00512
00513 __return_pointer_type
00514 operator+=(ptrdiff_t __d) volatile
00515 { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
00516
00517 __return_pointer_type
00518 operator-=(ptrdiff_t __d)
00519 { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
00520
00521 __return_pointer_type
00522 operator-=(ptrdiff_t __d) volatile
00523 { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
00524
00525 bool
00526 is_lock_free() const
00527 { return true; }
00528
00529 bool
00530 is_lock_free() const volatile
00531 { return true; }
00532
00533 void
00534 store(__pointer_type __p, memory_order __m = memory_order_seq_cst)
00535 {
00536 __glibcxx_assert(__m != memory_order_acquire);
00537 __glibcxx_assert(__m != memory_order_acq_rel);
00538 __glibcxx_assert(__m != memory_order_consume);
00539 _ATOMIC_STORE_(this, __p, __m);
00540 }
00541
00542 void
00543 store(__pointer_type __p,
00544 memory_order __m = memory_order_seq_cst) volatile
00545 {
00546 __glibcxx_assert(__m != memory_order_acquire);
00547 __glibcxx_assert(__m != memory_order_acq_rel);
00548 __glibcxx_assert(__m != memory_order_consume);
00549 volatile __pointer_type* __p2 = &_M_i;
00550 __typeof__(__p) __w = (__p);
00551 __atomic_flag_base* __g = __atomic_flag_for_address(__p2);
00552 __atomic_flag_wait_explicit(__g, __m);
00553 *__p2 = reinterpret_cast<__pointer_type>(__w);
00554 atomic_flag_clear_explicit(__g, __m);
00555 __w;
00556 }
00557
00558 __return_pointer_type
00559 load(memory_order __m = memory_order_seq_cst) const
00560 {
00561 __glibcxx_assert(__m != memory_order_release);
00562 __glibcxx_assert(__m != memory_order_acq_rel);
00563 void* __v = _ATOMIC_LOAD_(this, __m);
00564 return reinterpret_cast<__return_pointer_type>(__v);
00565 }
00566
00567 __return_pointer_type
00568 load(memory_order __m = memory_order_seq_cst) const volatile
00569 {
00570 __glibcxx_assert(__m != memory_order_release);
00571 __glibcxx_assert(__m != memory_order_acq_rel);
00572 void* __v = _ATOMIC_LOAD_(this, __m);
00573 return reinterpret_cast<__return_pointer_type>(__v);
00574 }
00575
00576 __return_pointer_type
00577 exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst)
00578 {
00579 void* __v = _ATOMIC_MODIFY_(this, =, __p, __m);
00580 return reinterpret_cast<__return_pointer_type>(__v);
00581 }
00582
00583 __return_pointer_type
00584 exchange(__pointer_type __p,
00585 memory_order __m = memory_order_seq_cst) volatile
00586 {
00587 volatile __pointer_type* __p2 = &_M_i;
00588 __typeof__(__p) __w = (__p);
00589 __atomic_flag_base* __g = __atomic_flag_for_address(__p2);
00590 __atomic_flag_wait_explicit(__g, __m);
00591 __pointer_type __r = *__p2;
00592 *__p2 = __w;
00593 atomic_flag_clear_explicit(__g, __m);
00594 __r;
00595 return reinterpret_cast<__return_pointer_type>(_M_i);
00596 }
00597
00598 bool
00599 compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
00600 memory_order __m1, memory_order __m2)
00601 {
00602 __glibcxx_assert(__m2 != memory_order_release);
00603 __glibcxx_assert(__m2 != memory_order_acq_rel);
00604 __glibcxx_assert(__m2 <= __m1);
00605 __pointer_type& __p1 = reinterpret_cast<void*&>(__rp1);
00606 return _ATOMIC_CMPEXCHNG_(this, &__p1, __p2, __m1);
00607 }
00608
00609 bool
00610 compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
00611 memory_order __m1, memory_order __m2) volatile
00612 {
00613 __glibcxx_assert(__m2 != memory_order_release);
00614 __glibcxx_assert(__m2 != memory_order_acq_rel);
00615 __glibcxx_assert(__m2 <= __m1);
00616 __pointer_type& __p1 = reinterpret_cast<void*&>(__rp1);
00617 return _ATOMIC_CMPEXCHNG_(this, &__p1, __p2, __m1);
00618 }
00619
00620 __return_pointer_type
00621 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00622 {
00623 void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
00624 return reinterpret_cast<__return_pointer_type>(__v);
00625 }
00626
00627 __return_pointer_type
00628 fetch_add(ptrdiff_t __d,
00629 memory_order __m = memory_order_seq_cst) volatile
00630 {
00631 void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
00632 return reinterpret_cast<__return_pointer_type>(__v);
00633 }
00634
00635 __return_pointer_type
00636 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00637 {
00638 void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
00639 return reinterpret_cast<__return_pointer_type>(__v);
00640 }
00641
00642 __return_pointer_type
00643 fetch_sub(ptrdiff_t __d,
00644 memory_order __m = memory_order_seq_cst) volatile
00645 {
00646 void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
00647 return reinterpret_cast<__return_pointer_type>(__v);
00648 }
00649 };
00650
00651 #undef _ATOMIC_LOAD_
00652 #undef _ATOMIC_STORE_
00653 #undef _ATOMIC_MODIFY_
00654 #undef _ATOMIC_CMPEXCHNG_
00655 }
00656
00657 _GLIBCXX_END_NAMESPACE_VERSION
00658 }
00659
00660 #endif