DPDK 22.11.1
Loading...
Searching...
No Matches
rte_atomic.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5#ifndef _RTE_ATOMIC_H_
6#define _RTE_ATOMIC_H_
7
15#include <stdint.h>
16#include <rte_compat.h>
17#include <rte_common.h>
18
19#ifdef __DOXYGEN__
20
24
30static inline void rte_mb(void);
31
38static inline void rte_wmb(void);
39
46static inline void rte_rmb(void);
48
52
59static inline void rte_smp_mb(void);
60
68static inline void rte_smp_wmb(void);
69
77static inline void rte_smp_rmb(void);
79
83
90static inline void rte_io_mb(void);
91
99static inline void rte_io_wmb(void);
100
108static inline void rte_io_rmb(void);
110
111#endif /* __DOXYGEN__ */
112
119#define rte_compiler_barrier() do { \
120 asm volatile ("" : : : "memory"); \
121} while(0)
122
126static inline void rte_atomic_thread_fence(int memorder);
127
128/*------------------------- 16 bit atomic operations -------------------------*/
129
146static inline int
147rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
148
149#ifdef RTE_FORCE_INTRINSICS
150static inline int
151rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
152{
153 return __sync_bool_compare_and_swap(dst, exp, src);
154}
155#endif
156
172static inline uint16_t
173rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
174
175#ifdef RTE_FORCE_INTRINSICS
176static inline uint16_t
177rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
178{
179#if defined(__clang__)
180 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
181#else
182 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
183#endif
184}
185#endif
186
190typedef struct {
191 volatile int16_t cnt;
193
197#define RTE_ATOMIC16_INIT(val) { (val) }
198
205static inline void
207{
208 v->cnt = 0;
209}
210
219static inline int16_t
221{
222 return v->cnt;
223}
224
233static inline void
234rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
235{
236 v->cnt = new_value;
237}
238
247static inline void
249{
250 __sync_fetch_and_add(&v->cnt, inc);
251}
252
261static inline void
263{
264 __sync_fetch_and_sub(&v->cnt, dec);
265}
266
273static inline void
275
276#ifdef RTE_FORCE_INTRINSICS
277static inline void
279{
280 rte_atomic16_add(v, 1);
281}
282#endif
283
290static inline void
292
293#ifdef RTE_FORCE_INTRINSICS
294static inline void
296{
297 rte_atomic16_sub(v, 1);
298}
299#endif
300
314static inline int16_t
316{
317 return __sync_add_and_fetch(&v->cnt, inc);
318}
319
334static inline int16_t
336{
337 return __sync_sub_and_fetch(&v->cnt, dec);
338}
339
352
353#ifdef RTE_FORCE_INTRINSICS
354static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
355{
356 return __sync_add_and_fetch(&v->cnt, 1) == 0;
357}
358#endif
359
372
373#ifdef RTE_FORCE_INTRINSICS
374static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
375{
376 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
377}
378#endif
379
392
393#ifdef RTE_FORCE_INTRINSICS
394static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
395{
396 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
397}
398#endif
399
406static inline void rte_atomic16_clear(rte_atomic16_t *v)
407{
408 v->cnt = 0;
409}
410
411/*------------------------- 32 bit atomic operations -------------------------*/
412
429static inline int
430rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
431
432#ifdef RTE_FORCE_INTRINSICS
433static inline int
434rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
435{
436 return __sync_bool_compare_and_swap(dst, exp, src);
437}
438#endif
439
455static inline uint32_t
456rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
457
458#ifdef RTE_FORCE_INTRINSICS
459static inline uint32_t
460rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
461{
462#if defined(__clang__)
463 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
464#else
465 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
466#endif
467}
468#endif
469
473typedef struct {
474 volatile int32_t cnt;
476
480#define RTE_ATOMIC32_INIT(val) { (val) }
481
488static inline void
490{
491 v->cnt = 0;
492}
493
502static inline int32_t
504{
505 return v->cnt;
506}
507
516static inline void
517rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
518{
519 v->cnt = new_value;
520}
521
530static inline void
532{
533 __sync_fetch_and_add(&v->cnt, inc);
534}
535
544static inline void
546{
547 __sync_fetch_and_sub(&v->cnt, dec);
548}
549
556static inline void
558
559#ifdef RTE_FORCE_INTRINSICS
560static inline void
562{
563 rte_atomic32_add(v, 1);
564}
565#endif
566
573static inline void
575
576#ifdef RTE_FORCE_INTRINSICS
577static inline void
579{
580 rte_atomic32_sub(v,1);
581}
582#endif
583
597static inline int32_t
599{
600 return __sync_add_and_fetch(&v->cnt, inc);
601}
602
617static inline int32_t
619{
620 return __sync_sub_and_fetch(&v->cnt, dec);
621}
622
635
636#ifdef RTE_FORCE_INTRINSICS
637static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
638{
639 return __sync_add_and_fetch(&v->cnt, 1) == 0;
640}
641#endif
642
655
656#ifdef RTE_FORCE_INTRINSICS
657static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
658{
659 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
660}
661#endif
662
675
676#ifdef RTE_FORCE_INTRINSICS
677static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
678{
679 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
680}
681#endif
682
689static inline void rte_atomic32_clear(rte_atomic32_t *v)
690{
691 v->cnt = 0;
692}
693
694/*------------------------- 64 bit atomic operations -------------------------*/
695
711static inline int
712rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
713
714#ifdef RTE_FORCE_INTRINSICS
715static inline int
716rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
717{
718 return __sync_bool_compare_and_swap(dst, exp, src);
719}
720#endif
721
737static inline uint64_t
738rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
739
740#ifdef RTE_FORCE_INTRINSICS
741static inline uint64_t
742rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
743{
744#if defined(__clang__)
745 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
746#else
747 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
748#endif
749}
750#endif
751
755typedef struct {
756 volatile int64_t cnt;
758
762#define RTE_ATOMIC64_INIT(val) { (val) }
763
770static inline void
772
773#ifdef RTE_FORCE_INTRINSICS
774static inline void
776{
777#ifdef __LP64__
778 v->cnt = 0;
779#else
780 int success = 0;
781 uint64_t tmp;
782
783 while (success == 0) {
784 tmp = v->cnt;
785 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
786 tmp, 0);
787 }
788#endif
789}
790#endif
791
800static inline int64_t
802
803#ifdef RTE_FORCE_INTRINSICS
804static inline int64_t
806{
807#ifdef __LP64__
808 return v->cnt;
809#else
810 int success = 0;
811 uint64_t tmp;
812
813 while (success == 0) {
814 tmp = v->cnt;
815 /* replace the value by itself */
816 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
817 tmp, tmp);
818 }
819 return tmp;
820#endif
821}
822#endif
823
832static inline void
833rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
834
835#ifdef RTE_FORCE_INTRINSICS
836static inline void
837rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
838{
839#ifdef __LP64__
840 v->cnt = new_value;
841#else
842 int success = 0;
843 uint64_t tmp;
844
845 while (success == 0) {
846 tmp = v->cnt;
847 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
848 tmp, new_value);
849 }
850#endif
851}
852#endif
853
862static inline void
864
865#ifdef RTE_FORCE_INTRINSICS
866static inline void
867rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
868{
869 __sync_fetch_and_add(&v->cnt, inc);
870}
871#endif
872
881static inline void
883
884#ifdef RTE_FORCE_INTRINSICS
885static inline void
886rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
887{
888 __sync_fetch_and_sub(&v->cnt, dec);
889}
890#endif
891
898static inline void
900
901#ifdef RTE_FORCE_INTRINSICS
902static inline void
904{
905 rte_atomic64_add(v, 1);
906}
907#endif
908
915static inline void
917
918#ifdef RTE_FORCE_INTRINSICS
919static inline void
921{
922 rte_atomic64_sub(v, 1);
923}
924#endif
925
939static inline int64_t
941
942#ifdef RTE_FORCE_INTRINSICS
943static inline int64_t
945{
946 return __sync_add_and_fetch(&v->cnt, inc);
947}
948#endif
949
963static inline int64_t
965
966#ifdef RTE_FORCE_INTRINSICS
967static inline int64_t
969{
970 return __sync_sub_and_fetch(&v->cnt, dec);
971}
972#endif
973
986
987#ifdef RTE_FORCE_INTRINSICS
988static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
989{
990 return rte_atomic64_add_return(v, 1) == 0;
991}
992#endif
993
1006
1007#ifdef RTE_FORCE_INTRINSICS
1008static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1009{
1010 return rte_atomic64_sub_return(v, 1) == 0;
1011}
1012#endif
1013
1026
1027#ifdef RTE_FORCE_INTRINSICS
1028static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1029{
1030 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1031}
1032#endif
1033
1040static inline void rte_atomic64_clear(rte_atomic64_t *v);
1041
1042#ifdef RTE_FORCE_INTRINSICS
1043static inline void rte_atomic64_clear(rte_atomic64_t *v)
1044{
1045 rte_atomic64_set(v, 0);
1046}
1047#endif
1048
1049/*------------------------ 128 bit atomic operations -------------------------*/
1050
1055typedef struct {
1057 union {
1058 uint64_t val[2];
1059#ifdef RTE_ARCH_64
1060 __extension__ __int128 int128;
1061#endif
1062 };
1063} __rte_aligned(16) rte_int128_t;
1064
1065#ifdef __DOXYGEN__
1066
1106__rte_experimental
1107static inline int
1109 rte_int128_t *exp,
1110 const rte_int128_t *src,
1111 unsigned int weak,
1112 int success,
1113 int failure);
1114
1115#endif /* __DOXYGEN__ */
1116
1117#endif /* _RTE_ATOMIC_H_ */
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static void rte_atomic16_dec(rte_atomic16_t *v)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static void rte_io_rmb(void)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition rte_atomic.h:689
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_io_mb(void)
static void rte_io_wmb(void)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_atomic64_clear(rte_atomic64_t *v)
static void rte_smp_mb(void)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition rte_atomic.h:335
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition rte_atomic.h:234
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition rte_atomic.h:406
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition rte_atomic.h:248
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_mb(void)
static void rte_atomic32_inc(rte_atomic32_t *v)
static void rte_smp_wmb(void)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition rte_atomic.h:262
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_atomic32_dec(rte_atomic32_t *v)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_atomic64_dec(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition rte_atomic.h:618
static void rte_atomic16_init(rte_atomic16_t *v)
Definition rte_atomic.h:206
static void rte_smp_rmb(void)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition rte_atomic.h:503
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition rte_atomic.h:531
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition rte_atomic.h:517
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition rte_atomic.h:545
static void rte_atomic64_inc(rte_atomic64_t *v)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition rte_atomic.h:220
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_atomic64_init(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition rte_atomic.h:315
static void rte_atomic32_init(rte_atomic32_t *v)
Definition rte_atomic.h:489
static void rte_atomic16_inc(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition rte_atomic.h:598
#define RTE_STD_C11
Definition rte_common.h:39
volatile int16_t cnt
Definition rte_atomic.h:191
volatile int32_t cnt
Definition rte_atomic.h:474
volatile int64_t cnt
Definition rte_atomic.h:756