38 #ifndef included_clib_smp_h 39 #define included_clib_smp_h 105 return n < m->
n_cpus ? n : 0;
108 #define clib_smp_compare_and_swap(addr,new,old) __sync_val_compare_and_swap(addr,old,new) 109 #define clib_smp_swap(addr,new) __sync_lock_test_and_set(addr,new) 110 #define clib_smp_atomic_add(addr,increment) __sync_fetch_and_add(addr,increment) 112 #if defined (i386) || defined (__x86_64__) 113 #define clib_smp_pause() do { asm volatile ("pause"); } while (0) 116 #ifndef clib_smp_pause 117 #define clib_smp_pause() do { } while (0) 325 #define clib_exec_on_global_heap(body) \ 327 void * __clib_exec_on_global_heap_saved_heap; \ 330 __clib_exec_on_global_heap_saved_heap = clib_mem_set_heap (clib_smp_main.global_heap); \ 336 clib_mem_set_heap (__clib_exec_on_global_heap_saved_heap); \ 340 void * bootstrap_function,
341 uword bootstrap_function_arg);
#define clib_smp_compare_and_swap(addr, new, old)
always_inline void * clib_smp_stack_top_for_cpu(clib_smp_main_t *m, uword cpu)
volatile clib_smp_lock_wait_type_t wait_type
bad routing header type(not 4)") sr_error (NO_MORE_SEGMENTS
clib_smp_lock_header_t header
always_inline void * clib_smp_vm_base_for_cpu(clib_smp_main_t *m, uword cpu)
u8 clib_smp_quarter_word_t
void clib_smp_lock_init(clib_smp_lock_t **l)
always_inline void clib_smp_lock_for_reader(clib_smp_lock_t *l)
always_inline void clib_smp_lock_for_writer(clib_smp_lock_t *l)
clib_smp_per_cpu_main_t * per_cpu_mains
always_inline void clib_smp_unlock(clib_smp_lock_t *l)
always_inline clib_smp_lock_header_t clib_smp_lock_set_header(clib_smp_lock_t *l, clib_smp_lock_header_t new_hdr, clib_smp_lock_header_t old)
clib_smp_lock_wait_type_t
#define ASSERT_AND_PANIC(truth)
always_inline uword clib_smp_lock_header_is_equal(clib_smp_lock_header_t h0, clib_smp_lock_header_t h1)
always_inline void clib_smp_unlock_for_writer(clib_smp_lock_t *l)
always_inline uword os_get_cpu_number_inline(void)
clib_smp_main_t clib_smp_main
always_inline void os_sched_yield(void)
always_inline void clib_smp_unlock_inline(clib_smp_lock_t *l, clib_smp_lock_type_t type)
always_inline void clib_smp_unlock_for_reader(clib_smp_lock_t *l)
always_inline void clib_smp_lock_inline(clib_smp_lock_t *l, clib_smp_lock_type_t type)
u8 log2_n_per_cpu_vm_bytes
u8 log2_n_per_cpu_stack_bytes
void clib_smp_unlock_slow_path(clib_smp_lock_t *l, uword my_cpu, clib_smp_lock_header_t h, clib_smp_lock_type_t type)
void clib_smp_lock_free(clib_smp_lock_t **l)
uword os_smp_bootstrap(uword n_cpus, void *bootstrap_function, uword bootstrap_function_arg)
void clib_smp_lock_slow_path(clib_smp_lock_t *l, uword my_cpu, clib_smp_lock_header_t h, clib_smp_lock_type_t type)
#define CLIB_CACHE_LINE_BYTES
always_inline void clib_smp_lock(clib_smp_lock_t *l)