2 * urcu/uatomic/builtins-generic.h
4 * Copyright (c) 2023 Olivier Dion <odion@efficios.com>
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 #ifndef _URCU_UATOMIC_BUILTINS_GENERIC_H
22 #define _URCU_UATOMIC_BUILTINS_GENERIC_H
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
27 #define uatomic_store(addr, v, mo) \
29 __atomic_store_n(cmm_cast_volatile(addr), v, \
31 cmm_seq_cst_fence_after_atomic(mo); \
34 #define uatomic_set(addr, v) \
36 uatomic_store(addr, v, CMM_RELAXED); \
39 #define uatomic_load(addr, mo) \
42 __typeof__(*(addr)) _value = \
43 __atomic_load_n(cmm_cast_volatile(addr), \
45 cmm_seq_cst_fence_after_atomic(mo); \
50 #define uatomic_read(addr) \
51 uatomic_load(addr, CMM_RELAXED)
53 #define uatomic_cmpxchg_mo(addr, old, new, mos, mof) \
56 __typeof__(*(addr)) _old = (__typeof__(*(addr)))old; \
58 if (__atomic_compare_exchange_n(cmm_cast_volatile(addr), \
62 cmm_seq_cst_fence_after_atomic(mos); \
64 cmm_seq_cst_fence_after_atomic(mof); \
69 #define uatomic_cmpxchg(addr, old, new) \
70 uatomic_cmpxchg_mo(addr, old, new, CMM_SEQ_CST_FENCE, CMM_RELAXED)
72 #define uatomic_xchg_mo(addr, v, mo) \
75 __typeof__((*addr)) _old = \
76 __atomic_exchange_n(cmm_cast_volatile(addr), v, \
78 cmm_seq_cst_fence_after_atomic(mo); \
82 #define uatomic_xchg(addr, v) \
83 uatomic_xchg_mo(addr, v, CMM_SEQ_CST_FENCE)
85 #define uatomic_add_return_mo(addr, v, mo) \
88 __typeof__(*(addr)) _old = \
89 __atomic_add_fetch(cmm_cast_volatile(addr), v, \
91 cmm_seq_cst_fence_after_atomic(mo); \
95 #define uatomic_add_return(addr, v) \
96 uatomic_add_return_mo(addr, v, CMM_SEQ_CST_FENCE)
98 #define uatomic_sub_return_mo(addr, v, mo) \
101 __typeof__(*(addr)) _old = \
102 __atomic_sub_fetch(cmm_cast_volatile(addr), v, \
104 cmm_seq_cst_fence_after_atomic(mo); \
108 #define uatomic_sub_return(addr, v) \
109 uatomic_sub_return_mo(addr, v, CMM_SEQ_CST_FENCE)
111 #define uatomic_and_mo(addr, mask, mo) \
113 (void) __atomic_and_fetch(cmm_cast_volatile(addr), mask, \
115 cmm_seq_cst_fence_after_atomic(mo); \
118 #define uatomic_and(addr, mask) \
119 uatomic_and_mo(addr, mask, CMM_SEQ_CST)
121 #define uatomic_or_mo(addr, mask, mo) \
123 (void) __atomic_or_fetch(cmm_cast_volatile(addr), mask, \
125 cmm_seq_cst_fence_after_atomic(mo); \
129 #define uatomic_or(addr, mask) \
130 uatomic_or_mo(addr, mask, CMM_RELAXED)
132 #define uatomic_add_mo(addr, v, mo) \
133 (void) uatomic_add_return_mo(addr, v, mo)
135 #define uatomic_add(addr, v) \
136 uatomic_add_mo(addr, v, CMM_RELAXED)
138 #define uatomic_sub_mo(addr, v, mo) \
139 (void) uatomic_sub_return_mo(addr, v, mo)
141 #define uatomic_sub(addr, v) \
142 uatomic_sub_mo(addr, v, CMM_RELAXED)
144 #define uatomic_inc_mo(addr, mo) \
145 uatomic_add_mo(addr, 1, mo)
147 #define uatomic_inc(addr) \
148 uatomic_inc_mo(addr, CMM_RELAXED)
150 #define uatomic_dec_mo(addr, mo) \
151 uatomic_sub_mo(addr, 1, mo)
153 #define uatomic_dec(addr) \
154 uatomic_dec_mo(addr, CMM_RELAXED)
156 #define cmm_smp_mb__before_uatomic_and() cmm_smp_mb()
157 #define cmm_smp_mb__after_uatomic_and() cmm_smp_mb()
159 #define cmm_smp_mb__before_uatomic_or() cmm_smp_mb()
160 #define cmm_smp_mb__after_uatomic_or() cmm_smp_mb()
162 #define cmm_smp_mb__before_uatomic_add() cmm_smp_mb()
163 #define cmm_smp_mb__after_uatomic_add() cmm_smp_mb()
165 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb()
166 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb()
168 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb()
169 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb()
171 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb()
172 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb()
174 #endif /* _URCU_UATOMIC_BUILTINS_X86_H */
This page took 0.040245 seconds and 4 git commands to generate.