Improved debug tag performance
[public/netxms.git] / include / nxatomic.h
CommitLineData
5023a194
VK
1/*
2** NetXMS - Network Management System
3** Copyright (C) 2003-2017 Victor Kirhenshtein
4**
5** This program is free software; you can redistribute it and/or modify
6** it under the terms of the GNU Lesser General Public License as published
7** by the Free Software Foundation; either version 3 of the License, or
8** (at your option) any later version.
9**
10** This program is distributed in the hope that it will be useful,
11** but WITHOUT ANY WARRANTY; without even the implied warranty of
12** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13** GNU General Public License for more details.
14**
15** You should have received a copy of the GNU Lesser General Public License
16** along with this program; if not, write to the Free Software
17** Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18**
19** File: nxatomic.h
20**
21**/
22
23#ifndef _nxatomic_h_
24#define _nxatomic_h_
25
26#include <nms_common.h>
27
28#ifdef __sun
29#include <sys/atomic.h>
30#endif
31
32#if defined(__HP_aCC) && HAVE_ATOMIC_H
33#include <atomic.h>
34#endif
35
36#ifdef _WIN32
37
38typedef volatile LONG VolatileCounter;
39
40#else
41
42#if defined(__sun)
43
44typedef volatile uint32_t VolatileCounter;
45
46#if !HAVE_ATOMIC_INC_32_NV
47extern "C" volatile uint32_t solaris9_atomic_inc32(volatile uint32_t *v);
48#endif
49
50#if !HAVE_ATOMIC_DEC_32_NV
51extern "C" volatile uint32_t solaris9_atomic_dec32(volatile uint32_t *v);
52#endif
53
54#if !HAVE_ATOMIC_SWAP_PTR
55extern "C" void *solaris9_atomic_swap_ptr(volatile *void *target, void *value);
56#endif
57
58/**
59 * Atomically increment 32-bit value by 1
60 */
61inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
62{
63#if HAVE_ATOMIC_INC_32_NV
64 return atomic_inc_32_nv(v);
65#else
66 return solaris9_atomic_inc32(v);
67#endif
68}
69
70/**
71 * Atomically decrement 32-bit value by 1
72 */
73inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
74{
75#if HAVE_ATOMIC_DEC_32_NV
76 return atomic_dec_32_nv(v);
77#else
78 return solaris9_atomic_dec32(v);
79#endif
80}
81
82/**
83 * Atomically set pointer
84 */
85inline void *InterlockedExchangePointer(void *volatile *target, void *value)
86{
87#if HAVE_ATOMIC_SWAP_PTR
88 return atomic_swap_ptr(target, value);
89#else
90 return solaris9_atomic_swap_ptr(target, value);
91#endif
92}
93
94#elif defined(__HP_aCC)
95
96typedef volatile uint32_t VolatileCounter;
97
98#if defined(__hppa) && !HAVE_ATOMIC_H
99VolatileCounter parisc_atomic_inc(VolatileCounter *v);
100VolatileCounter parisc_atomic_dec(VolatileCounter *v);
101#endif
102
103/**
104 * Atomically increment 32-bit value by 1
105 */
106inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
107{
108#if HAVE_ATOMIC_H
109 return atomic_inc_32(v) + 1;
110#else
111#ifdef __hppa
112 return parisc_atomic_inc(v);
113#else
114 _Asm_mf(_DFLT_FENCE);
115 return (uint32_t)_Asm_fetchadd(_FASZ_W, _SEM_ACQ, (void *)v, +1, _LDHINT_NONE) + 1;
116#endif
117#endif
118}
119
120/**
121 * Atomically decrement 32-bit value by 1
122 */
123inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
124{
125#if HAVE_ATOMIC_H
126 return atomic_dec_32(v) - 1;
127#else
128#ifdef __hppa
129 return parisc_atomic_inc(v);
130#else
131 _Asm_mf(_DFLT_FENCE);
132 return (uint32_t)_Asm_fetchadd(_FASZ_W, _SEM_ACQ, (void *)v, -1, _LDHINT_NONE) - 1;
133#endif
134#endif
135}
136
137#elif defined(__IBMC__) || defined(__IBMCPP__)
138
139typedef volatile INT32 VolatileCounter;
140
141/**
142 * Atomically increment 32-bit value by 1
143 */
144inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
145{
146#if !HAVE_DECL___SYNC_ADD_AND_FETCH
147 VolatileCounter oldval;
148 do
149 {
150 oldval = __lwarx(v);
151 } while(__stwcx(v, oldval + 1) == 0);
152 return oldval + 1;
153#else
154 return __sync_add_and_fetch(v, 1);
155#endif
156}
157
158/**
159 * Atomically decrement 32-bit value by 1
160 */
161inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
162{
163#if !HAVE_DECL___SYNC_SUB_AND_FETCH
164 VolatileCounter oldval;
165 do
166 {
167 oldval = __lwarx(v);
168 } while(__stwcx(v, oldval - 1) == 0);
169 return oldval - 1;
170#else
171 return __sync_sub_and_fetch(v, 1);
172#endif
173}
174
175/**
176 * Atomically set pointer
177 */
178inline void *InterlockedExchangePointer(void *volatile *target, void *value)
179{
180 void *oldval;
181 do
182 {
183#ifdef __64BIT__
184 oldval = (void *)__ldarx((long *)target);
185#else
186 oldval = (void *)__lwarx((int *)target);
187#endif
188#ifdef __64BIT__
189 } while(__stdcx((long *)target, (long)value) == 0);
190#else
191 } while(__stwcx((int *)target, (int)value) == 0);
192#endif
193 return oldval;
194}
195
196#else /* not Solaris nor HP-UX nor AIX */
197
198typedef volatile INT32 VolatileCounter;
199
200/**
201 * Atomically increment 32-bit value by 1
202 */
203inline VolatileCounter InterlockedIncrement(VolatileCounter *v)
204{
205#if defined(__GNUC__) && ((__GNUC__ < 4) || (__GNUC_MINOR__ < 1)) && (defined(__i386__) || defined(__x86_64__))
206 VolatileCounter temp = 1;
207 __asm__ __volatile__("lock; xaddl %0,%1" : "+r" (temp), "+m" (*v) : : "memory");
208 return temp + 1;
209#else
210 return __sync_add_and_fetch(v, 1);
211#endif
212}
213
214/**
215 * Atomically decrement 32-bit value by 1
216 */
217inline VolatileCounter InterlockedDecrement(VolatileCounter *v)
218{
219#if defined(__GNUC__) && ((__GNUC__ < 4) || (__GNUC_MINOR__ < 1)) && (defined(__i386__) || defined(__x86_64__))
220 VolatileCounter temp = -1;
221 __asm__ __volatile__("lock; xaddl %0,%1" : "+r" (temp), "+m" (*v) : : "memory");
222 return temp - 1;
223#else
224 return __sync_sub_and_fetch(v, 1);
225#endif
226}
227
228/**
229 * Atomically set pointer
230 */
93b48f0e 231inline void *InterlockedExchangePointer(void* volatile *target, void *value)
5023a194
VK
232{
233#if defined(__GNUC__) && ((__GNUC__ < 4) || (__GNUC_MINOR__ < 1)) && (defined(__i386__) || defined(__x86_64__))
234 void *oldval;
235#ifdef __64BIT__
236 __asm__ __volatile__("xchgq %q2, %1" : "=a" (oldval), "+m" (*target) : "0" (value));
237#else
238 __asm__ __volatile__("xchgl %2, %1" : "=a" (oldval), "+m" (*target) : "0" (value));
239#endif
240 return oldval;
241#else
242 __sync_synchronize();
243 return __sync_lock_test_and_set(target, value);
244#endif
245}
246
247#endif /* __sun */
248
249#endif /* _WIN32 */
250
93b48f0e
EJ
251#ifdef __cplusplus
252
253/**
254 * Atomically set pointer
255 */
256template<typename T> T *InterlockedExchangePointer(T* volatile *target, T *value)
257{
258 return static_cast<T*>(InterlockedExchangePointer(reinterpret_cast<void* volatile *>(target), value));
259}
260
261#endif /* __cplusplus */
262
5023a194 263#endif