00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031 #ifndef _GLIBCXX_ATOMIC_0_H
00032 #define _GLIBCXX_ATOMIC_0_H 1
00033
00034 #pragma GCC system_header
00035
00036 namespace std _GLIBCXX_VISIBILITY(default)
00037 {
00038 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00039
00040
00041 namespace __atomic0
00042 {
00043 _GLIBCXX_BEGIN_EXTERN_C
00044
00045 void
00046 atomic_flag_clear_explicit(__atomic_flag_base*, memory_order)
00047 _GLIBCXX_NOTHROW;
00048
00049 void
00050 __atomic_flag_wait_explicit(__atomic_flag_base*, memory_order)
00051 _GLIBCXX_NOTHROW;
00052
00053 _GLIBCXX_CONST __atomic_flag_base*
00054 __atomic_flag_for_address(const volatile void* __z) _GLIBCXX_NOTHROW;
00055
00056 _GLIBCXX_END_EXTERN_C
00057
00058
00059 #define _ATOMIC_MEMBER_ _M_i
00060
00061
00062 #define _ATOMIC_LOAD_(__a, __x) \
00063 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00064 __i_type* __p = &_ATOMIC_MEMBER_; \
00065 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00066 __atomic_flag_wait_explicit(__g, __x); \
00067 __i_type __r = *__p; \
00068 atomic_flag_clear_explicit(__g, __x); \
00069 __r; })
00070
00071 #define _ATOMIC_STORE_(__a, __m, __x) \
00072 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00073 __i_type* __p = &_ATOMIC_MEMBER_; \
00074 __typeof__(__m) __v = (__m); \
00075 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00076 __atomic_flag_wait_explicit(__g, __x); \
00077 *__p = __v; \
00078 atomic_flag_clear_explicit(__g, __x); \
00079 __v; })
00080
00081 #define _ATOMIC_MODIFY_(__a, __o, __m, __x) \
00082 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00083 __i_type* __p = &_ATOMIC_MEMBER_; \
00084 __typeof__(__m) __v = (__m); \
00085 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00086 __atomic_flag_wait_explicit(__g, __x); \
00087 __i_type __r = *__p; \
00088 *__p __o __v; \
00089 atomic_flag_clear_explicit(__g, __x); \
00090 __r; })
00091
00092 #define _ATOMIC_CMPEXCHNG_(__a, __e, __m, __x) \
00093 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
00094 __i_type* __p = &_ATOMIC_MEMBER_; \
00095 __typeof__(__e) __q = (__e); \
00096 __typeof__(__m) __v = (__m); \
00097 bool __r; \
00098 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00099 __atomic_flag_wait_explicit(__g, __x); \
00100 __i_type __t = *__p; \
00101 if (*__q == __t) \
00102 { \
00103 *__p = (__i_type)__v; \
00104 __r = true; \
00105 } \
00106 else { *__q = __t; __r = false; } \
00107 atomic_flag_clear_explicit(__g, __x); \
00108 __r; })
00109
00110
00111
00112 struct atomic_flag : public __atomic_flag_base
00113 {
00114 atomic_flag() = default;
00115 ~atomic_flag() = default;
00116 atomic_flag(const atomic_flag&) = delete;
00117 atomic_flag& operator=(const atomic_flag&) = delete;
00118 atomic_flag& operator=(const atomic_flag&) volatile = delete;
00119
00120
00121 atomic_flag(bool __i): __atomic_flag_base({ __i }) { }
00122
00123 bool
00124 test_and_set(memory_order __m = memory_order_seq_cst);
00125
00126 bool
00127 test_and_set(memory_order __m = memory_order_seq_cst) volatile;
00128
00129 void
00130 clear(memory_order __m = memory_order_seq_cst);
00131
00132 void
00133 clear(memory_order __m = memory_order_seq_cst) volatile;
00134 };
00135
00136
00137
00138 struct atomic_address
00139 {
00140 private:
00141 void* _M_i;
00142
00143 public:
00144 atomic_address() = default;
00145 ~atomic_address() = default;
00146 atomic_address(const atomic_address&) = delete;
00147 atomic_address& operator=(const atomic_address&) = delete;
00148 atomic_address& operator=(const atomic_address&) volatile = delete;
00149
00150 constexpr atomic_address(void* __v): _M_i (__v) { }
00151
00152 bool
00153 is_lock_free() const { return false; }
00154
00155 bool
00156 is_lock_free() const volatile { return false; }
00157
00158 void
00159 store(void* __v, memory_order __m = memory_order_seq_cst)
00160 {
00161 __glibcxx_assert(__m != memory_order_acquire);
00162 __glibcxx_assert(__m != memory_order_acq_rel);
00163 __glibcxx_assert(__m != memory_order_consume);
00164 _ATOMIC_STORE_(this, __v, __m);
00165 }
00166
00167 void
00168 store(void* __v, memory_order __m = memory_order_seq_cst) volatile
00169 {
00170 __glibcxx_assert(__m != memory_order_acquire);
00171 __glibcxx_assert(__m != memory_order_acq_rel);
00172 __glibcxx_assert(__m != memory_order_consume);
00173 _ATOMIC_STORE_(this, __v, __m);
00174 }
00175
00176 void*
00177 load(memory_order __m = memory_order_seq_cst) const
00178 {
00179 __glibcxx_assert(__m != memory_order_release);
00180 __glibcxx_assert(__m != memory_order_acq_rel);
00181 return _ATOMIC_LOAD_(this, __m);
00182 }
00183
00184 void*
00185 load(memory_order __m = memory_order_seq_cst) const volatile
00186 {
00187 __glibcxx_assert(__m != memory_order_release);
00188 __glibcxx_assert(__m != memory_order_acq_rel);
00189 return _ATOMIC_LOAD_(this, __m);
00190 }
00191
00192 void*
00193 exchange(void* __v, memory_order __m = memory_order_seq_cst)
00194 { return _ATOMIC_MODIFY_(this, =, __v, __m); }
00195
00196 void*
00197 exchange(void* __v, memory_order __m = memory_order_seq_cst) volatile
00198 { return _ATOMIC_MODIFY_(this, =, __v, __m); }
00199
00200 bool
00201 compare_exchange_weak(void*& __v1, void* __v2, memory_order __m1,
00202 memory_order __m2)
00203 {
00204 __glibcxx_assert(__m2 != memory_order_release);
00205 __glibcxx_assert(__m2 != memory_order_acq_rel);
00206 __glibcxx_assert(__m2 <= __m1);
00207 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00208 }
00209
00210 bool
00211 compare_exchange_weak(void*& __v1, void* __v2, memory_order __m1,
00212 memory_order __m2) volatile
00213 {
00214 __glibcxx_assert(__m2 != memory_order_release);
00215 __glibcxx_assert(__m2 != memory_order_acq_rel);
00216 __glibcxx_assert(__m2 <= __m1);
00217 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00218 }
00219
00220 bool
00221 compare_exchange_weak(void*& __v1, void* __v2,
00222 memory_order __m = memory_order_seq_cst)
00223 {
00224 return compare_exchange_weak(__v1, __v2, __m,
00225 __calculate_memory_order(__m));
00226 }
00227
00228 bool
00229 compare_exchange_weak(void*& __v1, void* __v2,
00230 memory_order __m = memory_order_seq_cst) volatile
00231 {
00232 return compare_exchange_weak(__v1, __v2, __m,
00233 __calculate_memory_order(__m));
00234 }
00235
00236 bool
00237 compare_exchange_weak(const void*& __v1, const void* __v2,
00238 memory_order __m1, memory_order __m2)
00239 {
00240 __glibcxx_assert(__m2 != memory_order_release);
00241 __glibcxx_assert(__m2 != memory_order_acq_rel);
00242 __glibcxx_assert(__m2 <= __m1);
00243 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00244 }
00245
00246 bool
00247 compare_exchange_weak(const void*& __v1, const void* __v2,
00248 memory_order __m1, memory_order __m2) volatile
00249 {
00250 __glibcxx_assert(__m2 != memory_order_release);
00251 __glibcxx_assert(__m2 != memory_order_acq_rel);
00252 __glibcxx_assert(__m2 <= __m1);
00253 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00254 }
00255
00256 bool
00257 compare_exchange_weak(const void*& __v1, const void* __v2,
00258 memory_order __m = memory_order_seq_cst)
00259 {
00260 return compare_exchange_weak(__v1, __v2, __m,
00261 __calculate_memory_order(__m));
00262 }
00263
00264 bool
00265 compare_exchange_weak(const void*& __v1, const void* __v2,
00266 memory_order __m = memory_order_seq_cst) volatile
00267 {
00268 return compare_exchange_weak(__v1, __v2, __m,
00269 __calculate_memory_order(__m));
00270 }
00271
00272 bool
00273 compare_exchange_strong(void*& __v1, void* __v2, memory_order __m1,
00274 memory_order __m2)
00275 {
00276 __glibcxx_assert(__m2 != memory_order_release);
00277 __glibcxx_assert(__m2 != memory_order_acq_rel);
00278 __glibcxx_assert(__m2 <= __m1);
00279 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00280 }
00281
00282 bool
00283 compare_exchange_strong(void*& __v1, void* __v2, memory_order __m1,
00284 memory_order __m2) volatile
00285 {
00286 __glibcxx_assert(__m2 != memory_order_release);
00287 __glibcxx_assert(__m2 != memory_order_acq_rel);
00288 __glibcxx_assert(__m2 <= __m1);
00289 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00290 }
00291
00292 bool
00293 compare_exchange_strong(void*& __v1, void* __v2,
00294 memory_order __m = memory_order_seq_cst)
00295 {
00296 return compare_exchange_strong(__v1, __v2, __m,
00297 __calculate_memory_order(__m));
00298 }
00299
00300 bool
00301 compare_exchange_strong(void*& __v1, void* __v2,
00302 memory_order __m = memory_order_seq_cst) volatile
00303 {
00304 return compare_exchange_strong(__v1, __v2, __m,
00305 __calculate_memory_order(__m));
00306 }
00307
00308 bool
00309 compare_exchange_strong(const void*& __v1, const void* __v2,
00310 memory_order __m1, memory_order __m2)
00311 {
00312 __glibcxx_assert(__m2 != memory_order_release);
00313 __glibcxx_assert(__m2 != memory_order_acq_rel);
00314 __glibcxx_assert(__m2 <= __m1);
00315 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00316 }
00317
00318 bool
00319 compare_exchange_strong(const void*& __v1, const void* __v2,
00320 memory_order __m1, memory_order __m2) volatile
00321 {
00322 __glibcxx_assert(__m2 != memory_order_release);
00323 __glibcxx_assert(__m2 != memory_order_acq_rel);
00324 __glibcxx_assert(__m2 <= __m1);
00325 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00326 }
00327
00328 bool
00329 compare_exchange_strong(const void*& __v1, const void* __v2,
00330 memory_order __m = memory_order_seq_cst)
00331 {
00332 return compare_exchange_strong(__v1, __v2, __m,
00333 __calculate_memory_order(__m));
00334 }
00335
00336 bool
00337 compare_exchange_strong(const void*& __v1, const void* __v2,
00338 memory_order __m = memory_order_seq_cst) volatile
00339 {
00340 return compare_exchange_strong(__v1, __v2, __m,
00341 __calculate_memory_order(__m));
00342 }
00343
00344 void*
00345 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00346 {
00347 void** __p = &(_M_i);
00348 __atomic_flag_base* __g = __atomic_flag_for_address(__p);
00349 __atomic_flag_wait_explicit(__g, __m);
00350 void* __r = *__p;
00351 *__p = (void*)((char*)(*__p) + __d);
00352 atomic_flag_clear_explicit(__g, __m);
00353 return __r;
00354 }
00355
00356 void*
00357 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile
00358 {
00359 void* volatile* __p = &(_M_i);
00360 __atomic_flag_base* __g = __atomic_flag_for_address(__p);
00361 __atomic_flag_wait_explicit(__g, __m);
00362 void* __r = *__p;
00363 *__p = (void*)((char*)(*__p) + __d);
00364 atomic_flag_clear_explicit(__g, __m);
00365 return __r;
00366 }
00367
00368 void*
00369 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00370 {
00371 void** __p = &(_M_i);
00372 __atomic_flag_base* __g = __atomic_flag_for_address(__p);
00373 __atomic_flag_wait_explicit(__g, __m);
00374 void* __r = *__p;
00375 *__p = (void*)((char*)(*__p) - __d);
00376 atomic_flag_clear_explicit(__g, __m);
00377 return __r;
00378 }
00379
00380 void*
00381 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile
00382 {
00383 void* volatile* __p = &(_M_i);
00384 __atomic_flag_base* __g = __atomic_flag_for_address(__p);
00385 __atomic_flag_wait_explicit(__g, __m);
00386 void* __r = *__p;
00387 *__p = (void*)((char*)(*__p) - __d);
00388 atomic_flag_clear_explicit(__g, __m);
00389 return __r;
00390 }
00391
00392 operator void*() const
00393 { return load(); }
00394
00395 operator void*() const volatile
00396 { return load(); }
00397
00398
00399 void*
00400 operator=(void* __v)
00401 {
00402 store(__v);
00403 return __v;
00404 }
00405
00406 void*
00407 operator=(void* __v) volatile
00408 {
00409 store(__v);
00410 return __v;
00411 }
00412
00413 void*
00414 operator+=(ptrdiff_t __d)
00415 { return fetch_add(__d) + __d; }
00416
00417 void*
00418 operator+=(ptrdiff_t __d) volatile
00419 { return fetch_add(__d) + __d; }
00420
00421 void*
00422 operator-=(ptrdiff_t __d)
00423 { return fetch_sub(__d) - __d; }
00424
00425 void*
00426 operator-=(ptrdiff_t __d) volatile
00427 { return fetch_sub(__d) - __d; }
00428 };
00429
00430
00431
00432
00433
00434
00435
00436
00437
00438
00439
00440
00441
00442
00443
00444
00445
00446
00447
00448
00449
00450
00451
00452
00453
00454 template<typename _ITp>
00455 struct __atomic_base
00456 {
00457 private:
00458 typedef _ITp __int_type;
00459
00460 __int_type _M_i;
00461
00462 public:
00463 __atomic_base() = default;
00464 ~__atomic_base() = default;
00465 __atomic_base(const __atomic_base&) = delete;
00466 __atomic_base& operator=(const __atomic_base&) = delete;
00467 __atomic_base& operator=(const __atomic_base&) volatile = delete;
00468
00469
00470 constexpr __atomic_base(__int_type __i): _M_i (__i) { }
00471
00472 operator __int_type() const
00473 { return load(); }
00474
00475 operator __int_type() const volatile
00476 { return load(); }
00477
00478 __int_type
00479 operator=(__int_type __i)
00480 {
00481 store(__i);
00482 return __i;
00483 }
00484
00485 __int_type
00486 operator=(__int_type __i) volatile
00487 {
00488 store(__i);
00489 return __i;
00490 }
00491
00492 __int_type
00493 operator++(int)
00494 { return fetch_add(1); }
00495
00496 __int_type
00497 operator++(int) volatile
00498 { return fetch_add(1); }
00499
00500 __int_type
00501 operator--(int)
00502 { return fetch_sub(1); }
00503
00504 __int_type
00505 operator--(int) volatile
00506 { return fetch_sub(1); }
00507
00508 __int_type
00509 operator++()
00510 { return fetch_add(1) + 1; }
00511
00512 __int_type
00513 operator++() volatile
00514 { return fetch_add(1) + 1; }
00515
00516 __int_type
00517 operator--()
00518 { return fetch_sub(1) - 1; }
00519
00520 __int_type
00521 operator--() volatile
00522 { return fetch_sub(1) - 1; }
00523
00524 __int_type
00525 operator+=(__int_type __i)
00526 { return fetch_add(__i) + __i; }
00527
00528 __int_type
00529 operator+=(__int_type __i) volatile
00530 { return fetch_add(__i) + __i; }
00531
00532 __int_type
00533 operator-=(__int_type __i)
00534 { return fetch_sub(__i) - __i; }
00535
00536 __int_type
00537 operator-=(__int_type __i) volatile
00538 { return fetch_sub(__i) - __i; }
00539
00540 __int_type
00541 operator&=(__int_type __i)
00542 { return fetch_and(__i) & __i; }
00543
00544 __int_type
00545 operator&=(__int_type __i) volatile
00546 { return fetch_and(__i) & __i; }
00547
00548 __int_type
00549 operator|=(__int_type __i)
00550 { return fetch_or(__i) | __i; }
00551
00552 __int_type
00553 operator|=(__int_type __i) volatile
00554 { return fetch_or(__i) | __i; }
00555
00556 __int_type
00557 operator^=(__int_type __i)
00558 { return fetch_xor(__i) ^ __i; }
00559
00560 __int_type
00561 operator^=(__int_type __i) volatile
00562 { return fetch_xor(__i) ^ __i; }
00563
00564 bool
00565 is_lock_free() const
00566 { return false; }
00567
00568 bool
00569 is_lock_free() const volatile
00570 { return false; }
00571
00572 void
00573 store(__int_type __i, memory_order __m = memory_order_seq_cst)
00574 {
00575 __glibcxx_assert(__m != memory_order_acquire);
00576 __glibcxx_assert(__m != memory_order_acq_rel);
00577 __glibcxx_assert(__m != memory_order_consume);
00578 _ATOMIC_STORE_(this, __i, __m);
00579 }
00580
00581 void
00582 store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00583 {
00584 __glibcxx_assert(__m != memory_order_acquire);
00585 __glibcxx_assert(__m != memory_order_acq_rel);
00586 __glibcxx_assert(__m != memory_order_consume);
00587 _ATOMIC_STORE_(this, __i, __m);
00588 }
00589
00590 __int_type
00591 load(memory_order __m = memory_order_seq_cst) const
00592 {
00593 __glibcxx_assert(__m != memory_order_release);
00594 __glibcxx_assert(__m != memory_order_acq_rel);
00595 return _ATOMIC_LOAD_(this, __m);
00596 }
00597
00598 __int_type
00599 load(memory_order __m = memory_order_seq_cst) const volatile
00600 {
00601 __glibcxx_assert(__m != memory_order_release);
00602 __glibcxx_assert(__m != memory_order_acq_rel);
00603 return _ATOMIC_LOAD_(this, __m);
00604 }
00605
00606 __int_type
00607 exchange(__int_type __i, memory_order __m = memory_order_seq_cst)
00608 { return _ATOMIC_MODIFY_(this, =, __i, __m); }
00609
00610 __int_type
00611 exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00612 { return _ATOMIC_MODIFY_(this, =, __i, __m); }
00613
00614 bool
00615 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00616 memory_order __m1, memory_order __m2)
00617 {
00618 __glibcxx_assert(__m2 != memory_order_release);
00619 __glibcxx_assert(__m2 != memory_order_acq_rel);
00620 __glibcxx_assert(__m2 <= __m1);
00621 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00622 }
00623
00624 bool
00625 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00626 memory_order __m1, memory_order __m2) volatile
00627 {
00628 __glibcxx_assert(__m2 != memory_order_release);
00629 __glibcxx_assert(__m2 != memory_order_acq_rel);
00630 __glibcxx_assert(__m2 <= __m1);
00631 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00632 }
00633
00634 bool
00635 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00636 memory_order __m = memory_order_seq_cst)
00637 {
00638 return compare_exchange_weak(__i1, __i2, __m,
00639 __calculate_memory_order(__m));
00640 }
00641
00642 bool
00643 compare_exchange_weak(__int_type& __i1, __int_type __i2,
00644 memory_order __m = memory_order_seq_cst) volatile
00645 {
00646 return compare_exchange_weak(__i1, __i2, __m,
00647 __calculate_memory_order(__m));
00648 }
00649
00650 bool
00651 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00652 memory_order __m1, memory_order __m2)
00653 {
00654 __glibcxx_assert(__m2 != memory_order_release);
00655 __glibcxx_assert(__m2 != memory_order_acq_rel);
00656 __glibcxx_assert(__m2 <= __m1);
00657 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00658 }
00659
00660 bool
00661 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00662 memory_order __m1, memory_order __m2) volatile
00663 {
00664 __glibcxx_assert(__m2 != memory_order_release);
00665 __glibcxx_assert(__m2 != memory_order_acq_rel);
00666 __glibcxx_assert(__m2 <= __m1);
00667 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00668 }
00669
00670 bool
00671 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00672 memory_order __m = memory_order_seq_cst)
00673 {
00674 return compare_exchange_strong(__i1, __i2, __m,
00675 __calculate_memory_order(__m));
00676 }
00677
00678 bool
00679 compare_exchange_strong(__int_type& __i1, __int_type __i2,
00680 memory_order __m = memory_order_seq_cst) volatile
00681 {
00682 return compare_exchange_strong(__i1, __i2, __m,
00683 __calculate_memory_order(__m));
00684 }
00685
00686 __int_type
00687 fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst)
00688 { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
00689
00690 __int_type
00691 fetch_add(__int_type __i,
00692 memory_order __m = memory_order_seq_cst) volatile
00693 { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
00694
00695 __int_type
00696 fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst)
00697 { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
00698
00699 __int_type
00700 fetch_sub(__int_type __i,
00701 memory_order __m = memory_order_seq_cst) volatile
00702 { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
00703
00704 __int_type
00705 fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst)
00706 { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
00707
00708 __int_type
00709 fetch_and(__int_type __i,
00710 memory_order __m = memory_order_seq_cst) volatile
00711 { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
00712
00713 __int_type
00714 fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst)
00715 { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
00716
00717 __int_type
00718 fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00719 { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
00720
00721 __int_type
00722 fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst)
00723 { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
00724
00725 __int_type
00726 fetch_xor(__int_type __i,
00727 memory_order __m = memory_order_seq_cst) volatile
00728 { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
00729 };
00730
00731 #undef _ATOMIC_LOAD_
00732 #undef _ATOMIC_STORE_
00733 #undef _ATOMIC_MODIFY_
00734 #undef _ATOMIC_CMPEXCHNG_
00735 }
00736
00737 _GLIBCXX_END_NAMESPACE_VERSION
00738 }
00739
00740 #endif