16 #ifndef included_clib_lock_h 17 #define included_clib_lock_h 23 #define CLIB_PAUSE() __builtin_ia32_pause () 24 #elif defined (__aarch64__) || defined (__arm__) 25 #define CLIB_PAUSE() __asm__ ("yield") 31 #define CLIB_LOCK_DBG(_p) \ 33 (*_p)->frame_address = __builtin_frame_address (0); \ 34 (*_p)->pid = getpid (); \ 35 (*_p)->thread_index = os_get_thread_index (); \ 37 #define CLIB_LOCK_DBG_CLEAR(_p) \ 39 (*_p)->frame_address = 0; \ 41 (*_p)->thread_index = 0; \ 44 #define CLIB_LOCK_DBG(_p) 45 #define CLIB_LOCK_DBG_CLEAR(_p) 48 #define CLIB_SPINLOCK_IS_LOCKED(_p) (*(_p))->lock 49 #define CLIB_SPINLOCK_ASSERT_LOCKED(_p) ASSERT(CLIB_SPINLOCK_IS_LOCKED((_p))) 160 (&(*p)->rw_cnt, &cnt, cnt + 1, 1));
167 ASSERT ((*p)->rw_cnt > 0);
static void clib_rwlock_reader_lock(clib_rwlock_t *p)
#define CLIB_CACHE_LINE_ALIGN_MARK(mark)
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
static void clib_rwlock_writer_lock(clib_rwlock_t *p)
CLIB_CACHE_LINE_ALIGN_MARK(cacheline0)
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
static void clib_rwlock_free(clib_rwlock_t *p)
static_always_inline void clib_spinlock_unlock_if_init(clib_spinlock_t *p)
static void clib_spinlock_free(clib_spinlock_t *p)
#define CLIB_LOCK_DBG_CLEAR(_p)
#define static_always_inline
static void clib_spinlock_init(clib_spinlock_t *p)
static void clib_rwlock_init(clib_rwlock_t *p)
#define clib_atomic_release(a)
static void clib_rwlock_reader_unlock(clib_rwlock_t *p)
static void clib_rwlock_writer_unlock(clib_rwlock_t *p)
struct clib_rw_lock_ * clib_rwlock_t
#define clib_atomic_load_relax_n(a)
static void clib_mem_free(void *p)
#define clib_atomic_fetch_sub_rel(a, b)
#define clib_atomic_cmp_and_swap_acq_relax_n(addr, exp, new, weak)
static void * clib_mem_alloc_aligned(uword size, uword align)
#define CLIB_CACHE_LINE_BYTES
static_always_inline void clib_spinlock_lock_if_init(clib_spinlock_t *p)
#define CLIB_LOCK_DBG(_p)