set thread names in server
[public/netxms.git] / include / nxatomic.h
1 /*
2 ** NetXMS - Network Management System
3 ** Copyright (C) 2003-2017 Victor Kirhenshtein
4 **
5 ** This program is free software; you can redistribute it and/or modify
6 ** it under the terms of the GNU Lesser General Public License as published
7 ** by the Free Software Foundation; either version 3 of the License, or
8 ** (at your option) any later version.
9 **
10 ** This program is distributed in the hope that it will be useful,
11 ** but WITHOUT ANY WARRANTY; without even the implied warranty of
12 ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 ** GNU General Public License for more details.
14 **
15 ** You should have received a copy of the GNU Lesser General Public License
16 ** along with this program; if not, write to the Free Software
17 ** Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18 **
19 ** File: nxatomic.h
20 **
21 **/
22
23 #ifndef _nxatomic_h_
24 #define _nxatomic_h_
25
26 #include <nms_common.h>
27
28 #ifdef __cplusplus
29
30 #ifdef __sun
31 #include <sys/atomic.h>
32 #endif
33
34 #if defined(__HP_aCC) && HAVE_ATOMIC_H
35 #include <atomic.h>
36 #endif
37
38 #ifdef _WIN32
39
40 typedef volatile LONG VolatileCounter;
41
42 #else
43
44 #if defined(__sun)
45
46 typedef volatile uint32_t VolatileCounter;
47
48 #if !HAVE_ATOMIC_INC_32_NV
49 extern "C" volatile uint32_t solaris9_atomic_inc32(volatile uint32_t *v);
50 #endif
51
52 #if !HAVE_ATOMIC_DEC_32_NV
53 extern "C" volatile uint32_t solaris9_atomic_dec32(volatile uint32_t *v);
54 #endif
55
56 #if !HAVE_ATOMIC_SWAP_PTR
57 extern "C" void *solaris9_atomic_swap_ptr(volatile *void *target, void *value);
58 #endif
59
60 /**
61 * Atomically increment 32-bit value by 1
62 */
63 inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
64 {
65 #if HAVE_ATOMIC_INC_32_NV
66 return atomic_inc_32_nv(v);
67 #else
68 return solaris9_atomic_inc32(v);
69 #endif
70 }
71
72 /**
73 * Atomically decrement 32-bit value by 1
74 */
75 inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
76 {
77 #if HAVE_ATOMIC_DEC_32_NV
78 return atomic_dec_32_nv(v);
79 #else
80 return solaris9_atomic_dec32(v);
81 #endif
82 }
83
84 /**
85 * Atomically set pointer
86 */
87 inline void *InterlockedExchangePointer(void *volatile *target, void *value)
88 {
89 #if HAVE_ATOMIC_SWAP_PTR
90 return atomic_swap_ptr(target, value);
91 #else
92 return solaris9_atomic_swap_ptr(target, value);
93 #endif
94 }
95
96 #elif defined(__HP_aCC)
97
98 typedef volatile uint32_t VolatileCounter;
99
100 #if defined(__hppa) && !HAVE_ATOMIC_H
101 VolatileCounter parisc_atomic_inc(VolatileCounter *v);
102 VolatileCounter parisc_atomic_dec(VolatileCounter *v);
103 #endif
104
105 /**
106 * Atomically increment 32-bit value by 1
107 */
108 inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
109 {
110 #if HAVE_ATOMIC_H
111 return atomic_inc_32(v) + 1;
112 #else
113 #ifdef __hppa
114 return parisc_atomic_inc(v);
115 #else
116 _Asm_mf(_DFLT_FENCE);
117 return (uint32_t)_Asm_fetchadd(_FASZ_W, _SEM_ACQ, (void *)v, +1, _LDHINT_NONE) + 1;
118 #endif
119 #endif
120 }
121
122 /**
123 * Atomically decrement 32-bit value by 1
124 */
125 inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
126 {
127 #if HAVE_ATOMIC_H
128 return atomic_dec_32(v) - 1;
129 #else
130 #ifdef __hppa
131 return parisc_atomic_inc(v);
132 #else
133 _Asm_mf(_DFLT_FENCE);
134 return (uint32_t)_Asm_fetchadd(_FASZ_W, _SEM_ACQ, (void *)v, -1, _LDHINT_NONE) - 1;
135 #endif
136 #endif
137 }
138
139 #elif defined(__IBMC__) || defined(__IBMCPP__)
140
141 typedef volatile INT32 VolatileCounter;
142
143 /**
144 * Atomically increment 32-bit value by 1
145 */
146 inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
147 {
148 #if !HAVE_DECL___SYNC_ADD_AND_FETCH
149 VolatileCounter oldval;
150 do
151 {
152 oldval = __lwarx(v);
153 } while(__stwcx(v, oldval + 1) == 0);
154 return oldval + 1;
155 #else
156 return __sync_add_and_fetch(v, 1);
157 #endif
158 }
159
160 /**
161 * Atomically decrement 32-bit value by 1
162 */
163 inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
164 {
165 #if !HAVE_DECL___SYNC_SUB_AND_FETCH
166 VolatileCounter oldval;
167 do
168 {
169 oldval = __lwarx(v);
170 } while(__stwcx(v, oldval - 1) == 0);
171 return oldval - 1;
172 #else
173 return __sync_sub_and_fetch(v, 1);
174 #endif
175 }
176
177 /**
178 * Atomically set pointer
179 */
180 inline void *InterlockedExchangePointer(void *volatile *target, void *value)
181 {
182 void *oldval;
183 do
184 {
185 #ifdef __64BIT__
186 oldval = (void *)__ldarx((long *)target);
187 #else
188 oldval = (void *)__lwarx((int *)target);
189 #endif
190 #ifdef __64BIT__
191 } while(__stdcx((long *)target, (long)value) == 0);
192 #else
193 } while(__stwcx((int *)target, (int)value) == 0);
194 #endif
195 return oldval;
196 }
197
198 #else /* not Solaris nor HP-UX nor AIX */
199
200 typedef volatile INT32 VolatileCounter;
201
202 /**
203 * Atomically increment 32-bit value by 1
204 */
205 inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
206 {
207 #if defined(__GNUC__) && ((__GNUC__ < 4) || (__GNUC_MINOR__ < 1)) && (defined(__i386__) || defined(__x86_64__))
208 VolatileCounter temp = 1;
209 __asm__ __volatile__("lock; xaddl %0,%1" : "+r" (temp), "+m" (*v) : : "memory");
210 return temp + 1;
211 #else
212 return __sync_add_and_fetch(v, 1);
213 #endif
214 }
215
216 /**
217 * Atomically decrement 32-bit value by 1
218 */
219 inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
220 {
221 #if defined(__GNUC__) && ((__GNUC__ < 4) || (__GNUC_MINOR__ < 1)) && (defined(__i386__) || defined(__x86_64__))
222 VolatileCounter temp = -1;
223 __asm__ __volatile__("lock; xaddl %0,%1" : "+r" (temp), "+m" (*v) : : "memory");
224 return temp - 1;
225 #else
226 return __sync_sub_and_fetch(v, 1);
227 #endif
228 }
229
230 /**
231 * Atomically set pointer
232 */
233 inline void *InterlockedExchangePointer(void* volatile *target, void *value)
234 {
235 #if defined(__GNUC__) && ((__GNUC__ < 4) || (__GNUC_MINOR__ < 1)) && (defined(__i386__) || defined(__x86_64__))
236 void *oldval;
237 #ifdef __64BIT__
238 __asm__ __volatile__("xchgq %q2, %1" : "=a" (oldval), "+m" (*target) : "0" (value));
239 #else
240 __asm__ __volatile__("xchgl %2, %1" : "=a" (oldval), "+m" (*target) : "0" (value));
241 #endif
242 return oldval;
243 #else
244 __sync_synchronize();
245 return __sync_lock_test_and_set(target, value);
246 #endif
247 }
248
249 #endif /* __sun */
250
251 #endif /* _WIN32 */
252
253 /**
254 * Atomically set pointer - helper template
255 */
256 template<typename T> T *InterlockedExchangeObjectPointer(T* volatile *target, T *value)
257 {
258 return static_cast<T*>(InterlockedExchangePointer(reinterpret_cast<void* volatile *>(target), value));
259 }
260
261 #endif /* __cplusplus */
262
263 #endif