52#define RTE_RWLOCK_WAIT 0x1
53#define RTE_RWLOCK_WRITE 0x2
54#define RTE_RWLOCK_MASK (RTE_RWLOCK_WAIT | RTE_RWLOCK_WRITE)
56#define RTE_RWLOCK_READ 0x4
65#define RTE_RWLOCK_INITIALIZER { 0 }
92 while (__atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED)
97 x = __atomic_add_fetch(&rwl->cnt, RTE_RWLOCK_READ,
101 if (
likely(!(x & RTE_RWLOCK_MASK)))
105 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_READ,
125 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED);
128 if (x & RTE_RWLOCK_MASK)
132 x = __atomic_add_fetch(&rwl->cnt, RTE_RWLOCK_READ,
136 if (
unlikely(x & RTE_RWLOCK_MASK)) {
137 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_READ,
154 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_READ, __ATOMIC_RELEASE);
172 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED);
173 if (x < RTE_RWLOCK_WRITE &&
174 __atomic_compare_exchange_n(&rwl->cnt, &x, x + RTE_RWLOCK_WRITE,
175 1, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED))
193 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED);
196 if (
likely(x < RTE_RWLOCK_WRITE)) {
198 if (__atomic_compare_exchange_n(&rwl->cnt, &x, RTE_RWLOCK_WRITE, 1,
199 __ATOMIC_ACQUIRE, __ATOMIC_RELAXED))
223 __atomic_fetch_sub(&rwl->cnt, RTE_RWLOCK_WRITE, __ATOMIC_RELEASE);
static void rte_pause(void)
static void rte_rwlock_read_unlock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_write_unlock(rte_rwlock_t *rwl)
static void rte_rwlock_write_unlock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_write_trylock(rte_rwlock_t *rwl)
static void rte_rwlock_write_lock(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock(rte_rwlock_t *rwl)
static void rte_rwlock_init(rte_rwlock_t *rwl)
static void rte_rwlock_write_lock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_read_trylock(rte_rwlock_t *rwl)
static void rte_rwlock_read_unlock(rte_rwlock_t *rwl)