Add atomic_sub_* functions to libspl.
[zfs.git] / lib / libspl / asm-generic / atomic.c
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License, Version 1.0 only
6  * (the "License").  You may not use this file except in compliance
7  * with the License.
8  *
9  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10  * or http://www.opensolaris.org/os/licensing.
11  * See the License for the specific language governing permissions
12  * and limitations under the License.
13  *
14  * When distributing Covered Code, include this CDDL HEADER in each
15  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16  * If applicable, add the following below this CDDL HEADER, with the
17  * fields enclosed by brackets "[]" replaced with your own identifying
18  * information: Portions Copyright [yyyy] [name of copyright owner]
19  *
20  * CDDL HEADER END
21  */
22 /*
23  * Copyright (c) 2009 by Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26
27 #include <atomic.h>
28 #include <assert.h>
29 #include <pthread.h>
30
31 /*
32  * All operations are implemented by serializing them through a global
33  * pthread mutex.  This provides a correct generic implementation.
34  * However all supported architectures are encouraged to provide a
35  * native implementation is assembly for performance reasons.
36  */
37 pthread_mutex_t atomic_lock = PTHREAD_MUTEX_INITIALIZER;
38
39 /*
40  * Theses are the void returning variants
41  */
42
43 #define ATOMIC_INC(name, type) \
44         void atomic_inc_##name(volatile type *target)                   \
45         {                                                               \
46                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
47                 (*target)++;                                            \
48                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
49         }
50
51 ATOMIC_INC(long, unsigned long)
52 ATOMIC_INC(8, uint8_t)
53 ATOMIC_INC(uchar, uchar_t)
54 ATOMIC_INC(16, uint16_t)
55 ATOMIC_INC(ushort, ushort_t)
56 ATOMIC_INC(32, uint32_t)
57 ATOMIC_INC(uint, uint_t)
58 ATOMIC_INC(ulong, ulong_t)
59 ATOMIC_INC(64, uint64_t)
60
61
62 #define ATOMIC_DEC(name, type) \
63         void atomic_dec_##name(volatile type *target)                   \
64         {                                                               \
65                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
66                 (*target)--;                                            \
67                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
68         }
69
70 ATOMIC_DEC(long, unsigned long)
71 ATOMIC_DEC(8, uint8_t)
72 ATOMIC_DEC(uchar, uchar_t)
73 ATOMIC_DEC(16, uint16_t)
74 ATOMIC_DEC(ushort, ushort_t)
75 ATOMIC_DEC(32, uint32_t)
76 ATOMIC_DEC(uint, uint_t)
77 ATOMIC_DEC(ulong, ulong_t)
78 ATOMIC_DEC(64, uint64_t)
79
80
81 #define ATOMIC_ADD(name, type1, type2) \
82         void atomic_add_##name(volatile type1 *target, type2 bits)      \
83         {                                                               \
84                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
85                 *target += bits;                                        \
86                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
87         }
88
89 ATOMIC_ADD(8, uint8_t, int8_t)
90 ATOMIC_ADD(char, uchar_t, signed char)
91 ATOMIC_ADD(16, uint16_t, int16_t)
92 ATOMIC_ADD(short, ushort_t, short)
93 ATOMIC_ADD(32, uint32_t, int32_t)
94 ATOMIC_ADD(int, uint_t, int)
95 ATOMIC_ADD(long, ulong_t, long)
96 ATOMIC_ADD(64, uint64_t, int64_t)
97
98 void atomic_add_ptr(volatile void *target, ssize_t bits)
99 {
100         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
101         *(caddr_t *)target += bits;
102         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
103 }
104
105
106 #define ATOMIC_SUB(name, type1, type2) \
107         void atomic_sub_##name(volatile type1 *target, type2 bits)      \
108         {                                                               \
109                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
110                 *target -= bits;                                        \
111                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
112         }
113
114 ATOMIC_SUB(8, uint8_t, int8_t)
115 ATOMIC_SUB(char, uchar_t, signed char)
116 ATOMIC_SUB(16, uint16_t, int16_t)
117 ATOMIC_SUB(short, ushort_t, short)
118 ATOMIC_SUB(32, uint32_t, int32_t)
119 ATOMIC_SUB(int, uint_t, int)
120 ATOMIC_SUB(long, ulong_t, long)
121 ATOMIC_SUB(64, uint64_t, int64_t)
122
123 void atomic_sub_ptr(volatile void *target, ssize_t bits)
124 {
125         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
126         *(caddr_t *)target -= bits;
127         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
128 }
129
130
131 #define ATOMIC_OR(name, type) \
132         void atomic_or_##name(volatile type *target, type bits)         \
133         {                                                               \
134                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
135                 *target |= bits;                                        \
136                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
137         }
138
139 ATOMIC_OR(8, uint8_t)
140 ATOMIC_OR(uchar, uchar_t)
141 ATOMIC_OR(16, uint16_t)
142 ATOMIC_OR(ushort, ushort_t)
143 ATOMIC_OR(32, uint32_t)
144 ATOMIC_OR(uint, uint_t)
145 ATOMIC_OR(ulong, ulong_t)
146 ATOMIC_OR(64, uint64_t)
147
148
149 #define ATOMIC_AND(name, type) \
150         void atomic_and_##name(volatile type *target, type bits)        \
151         {                                                               \
152                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
153                 *target &= bits;                                        \
154                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
155         }
156
157 ATOMIC_AND(8, uint8_t)
158 ATOMIC_AND(uchar, uchar_t)
159 ATOMIC_AND(16, uint16_t)
160 ATOMIC_AND(ushort, ushort_t)
161 ATOMIC_AND(32, uint32_t)
162 ATOMIC_AND(uint, uint_t)
163 ATOMIC_AND(ulong, ulong_t)
164 ATOMIC_AND(64, uint64_t)
165
166
167 /*
168  * New value returning variants
169  */
170
171 #define ATOMIC_INC_NV(name, type) \
172         type atomic_inc_##name##_nv(volatile type *target)              \
173         {                                                               \
174                 type rc;                                                \
175                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
176                 rc = (++(*target));                                     \
177                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
178                 return rc;                                              \
179         }
180
181 ATOMIC_INC_NV(long, unsigned long)
182 ATOMIC_INC_NV(8, uint8_t)
183 ATOMIC_INC_NV(uchar, uchar_t)
184 ATOMIC_INC_NV(16, uint16_t)
185 ATOMIC_INC_NV(ushort, ushort_t)
186 ATOMIC_INC_NV(32, uint32_t)
187 ATOMIC_INC_NV(uint, uint_t)
188 ATOMIC_INC_NV(ulong, ulong_t)
189 ATOMIC_INC_NV(64, uint64_t)
190
191
192 #define ATOMIC_DEC_NV(name, type) \
193         type atomic_dec_##name##_nv(volatile type *target)              \
194         {                                                               \
195                 type rc;                                                \
196                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
197                 rc = (--(*target));                                     \
198                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
199                 return rc;                                              \
200         }
201
202 ATOMIC_DEC_NV(long, unsigned long)
203 ATOMIC_DEC_NV(8, uint8_t)
204 ATOMIC_DEC_NV(uchar, uchar_t)
205 ATOMIC_DEC_NV(16, uint16_t)
206 ATOMIC_DEC_NV(ushort, ushort_t)
207 ATOMIC_DEC_NV(32, uint32_t)
208 ATOMIC_DEC_NV(uint, uint_t)
209 ATOMIC_DEC_NV(ulong, ulong_t)
210 ATOMIC_DEC_NV(64, uint64_t)
211
212
213 #define ATOMIC_ADD_NV(name, type1, type2) \
214         type1 atomic_add_##name##_nv(volatile type1 *target, type2 bits)\
215         {                                                               \
216                 type1 rc;                                               \
217                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
218                 rc = (*target += bits);                                 \
219                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
220                 return rc;                                              \
221         }
222
223 ATOMIC_ADD_NV(8, uint8_t, int8_t)
224 ATOMIC_ADD_NV(char, uchar_t, signed char)
225 ATOMIC_ADD_NV(16, uint16_t, int16_t)
226 ATOMIC_ADD_NV(short, ushort_t, short)
227 ATOMIC_ADD_NV(32, uint32_t, int32_t)
228 ATOMIC_ADD_NV(int, uint_t, int)
229 ATOMIC_ADD_NV(long, ulong_t, long)
230 ATOMIC_ADD_NV(64, uint64_t, int64_t)
231
232 void *atomic_add_ptr_nv(volatile void *target, ssize_t bits)
233 {
234         void *ptr;
235
236         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
237         ptr = (*(caddr_t *)target += bits);
238         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
239
240         return ptr;
241 }
242
243
244 #define ATOMIC_SUB_NV(name, type1, type2) \
245         type1 atomic_sub_##name##_nv(volatile type1 *target, type2 bits)\
246         {                                                               \
247                 type1 rc;                                               \
248                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
249                 rc = (*target -= bits);                                 \
250                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
251                 return rc;                                              \
252         }
253
254 ATOMIC_SUB_NV(8, uint8_t, int8_t)
255 ATOMIC_SUB_NV(char, uchar_t, signed char)
256 ATOMIC_SUB_NV(16, uint16_t, int16_t)
257 ATOMIC_SUB_NV(short, ushort_t, short)
258 ATOMIC_SUB_NV(32, uint32_t, int32_t)
259 ATOMIC_SUB_NV(int, uint_t, int)
260 ATOMIC_SUB_NV(long, ulong_t, long)
261 ATOMIC_SUB_NV(64, uint64_t, int64_t)
262
263 void *atomic_sub_ptr_nv(volatile void *target, ssize_t bits)
264 {
265         void *ptr;
266
267         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
268         ptr = (*(caddr_t *)target -= bits);
269         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
270
271         return ptr;
272 }
273
274
275 #define ATOMIC_OR_NV(name, type) \
276         type atomic_or_##name##_nv(volatile type *target, type bits)    \
277         {                                                               \
278                 type rc;                                                \
279                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
280                 rc = (*target |= bits);                                 \
281                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
282                 return rc;                                              \
283         }
284
285 ATOMIC_OR_NV(long, unsigned long)
286 ATOMIC_OR_NV(8, uint8_t)
287 ATOMIC_OR_NV(uchar, uchar_t)
288 ATOMIC_OR_NV(16, uint16_t)
289 ATOMIC_OR_NV(ushort, ushort_t)
290 ATOMIC_OR_NV(32, uint32_t)
291 ATOMIC_OR_NV(uint, uint_t)
292 ATOMIC_OR_NV(ulong, ulong_t)
293 ATOMIC_OR_NV(64, uint64_t)
294
295
296 #define ATOMIC_AND_NV(name, type) \
297         type atomic_and_##name##_nv(volatile type *target, type bits)   \
298         {                                                               \
299                 type rc;                                                \
300                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
301                 rc = (*target &= bits);                                 \
302                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
303                 return rc;                                              \
304         }
305
306 ATOMIC_AND_NV(long, unsigned long)
307 ATOMIC_AND_NV(8, uint8_t)
308 ATOMIC_AND_NV(uchar, uchar_t)
309 ATOMIC_AND_NV(16, uint16_t)
310 ATOMIC_AND_NV(ushort, ushort_t)
311 ATOMIC_AND_NV(32, uint32_t)
312 ATOMIC_AND_NV(uint, uint_t)
313 ATOMIC_AND_NV(ulong, ulong_t)
314 ATOMIC_AND_NV(64, uint64_t)
315
316
317 /*
318  *  If *arg1 == arg2, set *arg1 = arg3; return old value
319  */
320
321 #define ATOMIC_CAS(name, type) \
322         type atomic_cas_##name(volatile type *target, type arg1, type arg2) \
323         {                                                               \
324                 type old;                                               \
325                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
326                 old = *target;                                          \
327                 if (old == arg1)                                        \
328                         *target = arg2;                                 \
329                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
330                 return old;                                             \
331         }
332
333 ATOMIC_CAS(8, uint8_t)
334 ATOMIC_CAS(uchar, uchar_t)
335 ATOMIC_CAS(16, uint16_t)
336 ATOMIC_CAS(ushort, ushort_t)
337 ATOMIC_CAS(32, uint32_t)
338 ATOMIC_CAS(uint, uint_t)
339 ATOMIC_CAS(ulong, ulong_t)
340 ATOMIC_CAS(64, uint64_t)
341
342 void *atomic_cas_ptr(volatile void *target, void *arg1, void *arg2)
343 {
344         void *old;
345
346         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
347         old = *(void **)target;
348         if (old == arg1)
349                 *(void **)target = arg2;
350         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
351
352         return old;
353 }
354
355
356 /*
357  * Swap target and return old value
358  */
359
360 #define ATOMIC_SWAP(name, type) \
361         type atomic_swap_##name(volatile type *target, type bits)       \
362         {                                                               \
363                 type old;                                               \
364                 VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);      \
365                 old = *target;                                          \
366                 *target = bits;                                         \
367                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);    \
368                 return old;                                             \
369         }
370
371 ATOMIC_SWAP(8, uint8_t)
372 ATOMIC_SWAP(uchar, uchar_t)
373 ATOMIC_SWAP(16, uint16_t)
374 ATOMIC_SWAP(ushort, ushort_t)
375 ATOMIC_SWAP(32, uint32_t)
376 ATOMIC_SWAP(uint, uint_t)
377 ATOMIC_SWAP(ulong, ulong_t)
378 ATOMIC_SWAP(64, uint64_t)
379
380 void *atomic_swap_ptr(volatile void *target, void *bits)
381 {
382         void *old;
383
384         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
385         old = *(void **)target;
386         *(void **)target = bits;
387         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
388
389         return old;
390 }
391
392
393 int atomic_set_long_excl(volatile ulong_t *target, uint_t value)
394 {
395         ulong_t bit;
396
397         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
398         bit = (1UL << value);
399         if ((*target & bit) != 0) {
400                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
401                 return -1;
402         }
403         *target |= bit;
404         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
405
406         return 0;
407 }
408
409 int atomic_clear_long_excl(volatile ulong_t *target, uint_t value)
410 {
411         ulong_t bit;
412
413         VERIFY3S(pthread_mutex_lock(&atomic_lock), ==, 0);
414         bit = (1UL << value);
415         if ((*target & bit) != 0) {
416                 VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
417                 return -1;
418         }
419         *target &= ~bit;
420         VERIFY3S(pthread_mutex_unlock(&atomic_lock), ==, 0);
421
422         return 0;
423 }
424
425 void membar_enter(void)
426 {
427         /* XXX - Implement me */
428 }
429
430 void membar_exit(void)
431 {
432         /* XXX - Implement me */
433 }
434
435 void membar_producer(void)
436 {
437         /* XXX - Implement me */
438 }
439
440 void membar_consumer(void)
441 {
442         /* XXX - Implement me */
443 }
444
445 /* Legacy kernel interfaces; they will go away (eventually). */
446
447 uint8_t cas8(uint8_t *target, uint8_t arg1, uint8_t arg2)
448 {
449         return atomic_cas_8(target, arg1, arg2);
450 }
451
452 uint32_t cas32(uint32_t *target, uint32_t arg1, uint32_t arg2)
453 {
454         return atomic_cas_32(target, arg1, arg2);
455 }
456
457 uint64_t cas64(uint64_t *target, uint64_t arg1, uint64_t arg2)
458 {
459         return atomic_cas_64(target, arg1, arg2);
460 }
461
462 ulong_t caslong(ulong_t *target, ulong_t arg1, ulong_t arg2)
463 {
464         return atomic_cas_ulong(target, arg1, arg2);
465 }
466
467 void *casptr(void *target, void *arg1, void *arg2)
468 {
469         return atomic_cas_ptr(target, arg1, arg2);
470 }
471
472 void atomic_and_long(ulong_t *target, ulong_t bits)
473 {
474         return atomic_and_ulong(target, bits);
475 }
476
477 void atomic_or_long(ulong_t *target, ulong_t bits)
478 {
479         return atomic_or_ulong(target, bits);
480 }