40 #ifndef included_vlib_buffer_funcs_h 41 #define included_vlib_buffer_funcs_h 46 #undef always_inline // dpdk and clib use conflicting always_inline macros. 47 #include <rte_config.h> 51 #define always_inline static inline 53 #define always_inline static inline __attribute__ ((__always_inline__)) 145 uword content_len = 0;
168 (((
uword)buffer_index) <<
181 #define vlib_prefetch_buffer_with_index(vm,bi,type) \ 183 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \ 184 vlib_prefetch_buffer_header (_b, type); \ 195 #define vlib_buffer_foreach_allocated(vm,bi,body) \ 197 vlib_main_t * _vmain = (vm); \ 198 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \ 199 hash_pair_t * _vbpair; \ 200 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \ 201 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \ 202 (bi) = _vbpair->key; \ 247 uword next_buffer_stride,
283 u32 free_list_index);
384 *error =
clib_error_return (0,
"failed to allocate %wd bytes of I/O memory", n_bytes);
411 void * data,
u32 n_data_bytes);
427 struct rte_mbuf * mb = rte_mbuf_from_vlib_buffer(first);
428 rte_pktmbuf_reset(mb);
446 struct rte_mbuf * mb;
447 mb = rte_mbuf_from_vlib_buffer(first);
450 mb = rte_mbuf_from_vlib_buffer(last);
451 mb->next = rte_mbuf_from_vlib_buffer(next_buffer);
453 mb = rte_mbuf_from_vlib_buffer(next_buffer);
474 struct rte_mbuf * mb_first = rte_mbuf_from_vlib_buffer(first);
475 struct rte_mbuf * mb_last = rte_mbuf_from_vlib_buffer(last);
476 mb_first->pkt_len += len;
477 mb_last->data_len += len;
489 void *data,
u16 data_len)
513 void * data,
u16 data_len);
540 uword n_packet_data_bytes,
541 uword min_n_buffers_each_physmem_alloc,
576 n += vlib_buffer_index_length_in_chain (vm, f[0]);
606 dst->
i[0] = src->
i[0];
607 if (1 *
sizeof (dst->
i[0]) < 16)
608 dst->
i[1] = src->
i[1];
609 if (2 *
sizeof (dst->
i[0]) < 16)
610 dst->
i[2] = src->
i[2];
613 #define _(f) ASSERT (dst->b.f == src->b.f) 635 dst0->
i[0] = dst1->
i[0] = src->
i[0];
636 if (1 *
sizeof (dst0->
i[0]) < 16)
637 dst0->
i[1] = dst1->
i[1] = src->
i[1];
638 if (2 *
sizeof (dst0->
i[0]) < 16)
639 dst0->
i[2] = dst1->
i[2] = src->
i[2];
642 #define _(f)
ASSERT (dst0->
b.f == src->
b.f && dst1->
b.f == src->
b.f)
667 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
670 p =
hash_get (vlib_buffer_state_validation_hash, b);
675 hash_set (vlib_buffer_state_validation_hash, b, expected);
679 if (p[0] != expected)
692 p[0] ?
"busy" :
"free",
693 expected ?
"busy" :
"free");
698 *vlib_buffer_state_validation_lock = 0;
711 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
714 hash_set (vlib_buffer_state_validation_hash, b, expected);
717 *vlib_buffer_state_validation_lock = 0;
always_inline uword round_pow2(uword x, uword pow2)
#define hash_set(h, key, value)
sll srl srl sll sra u16x4 i
u32 vlib_buffer_get_or_create_free_list(vlib_main_t *vm, u32 n_data_bytes, char *fmt,...)
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
format_function_t format_vlib_buffer_contents
u32 free_list_index
Buffer free list that this buffer was allocated from and will be freed to.
vlib_physmem_main_t physmem_main
format_function_t format_vlib_buffer
u32 vlib_buffer_alloc_from_free_list(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u32 free_list_index)
Allocate buffers from specific freelist into supplied array.
static void vlib_validate_buffer_in_use(vlib_buffer_t *b, u32 expected)
vlib_buffer_t buffer_init_template
struct vlib_main_t * vlib_main
always_inline void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
#define CLIB_LOG2_CACHE_LINE_BYTES
always_inline void vlib_physmem_free(vlib_main_t *vm, void *mem)
void *(* os_physmem_alloc_aligned)(vlib_physmem_main_t *pm, uword n_bytes, uword alignment)
vlib_buffer_main_t * buffer_main
always_inline vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, u32 next_bi)
#define STRUCT_OFFSET_OF(t, f)
uword * vlib_buffer_state_validation_hash
always_inline void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
always_inline u32 vlib_buffer_free_list_buffer_size(vlib_main_t *vm, u32 free_list_index)
struct vlib_serialize_buffer_main_t::@27::@30 rx
clib_error_t * vlib_buffer_pool_create(vlib_main_t *vm, unsigned num_mbufs, unsigned socket_id)
always_inline void vlib_buffer_init_two_for_free_list(vlib_buffer_t *_dst0, vlib_buffer_t *_dst1, vlib_buffer_free_list_t *fl)
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_physmem_alloc, char *fmt,...)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
always_inline heap_elt_t * last(heap_header_t *h)
void vlib_aligned_memcpy(void *_dst, void *_src, int n_bytes)
always_inline uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
u32 * vlib_buffer_state_validation_lock
#define clib_warning(format, args...)
always_inline vlib_buffer_free_list_t * vlib_buffer_get_free_list(vlib_main_t *vm, u32 free_list_index)
static uword pointer_to_uword(const void *p)
#define VLIB_BUFFER_NEXT_PRESENT
always_inline heap_elt_t * first(heap_header_t *h)
always_inline vlib_buffer_known_state_t vlib_buffer_is_known(vlib_main_t *vm, u32 buffer_index)
#define VLIB_BUFFER_PRE_DATA_SIZE
always_inline void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
#define pool_elt_at_index(p, i)
always_inline void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
u16 current_length
Nbytes between current data and the end of this buffer.
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
always_inline void * vlib_physmem_alloc(vlib_main_t *vm, clib_error_t **error, uword n_bytes)
format_function_t format_vlib_buffer_and_data
always_inline vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
uword os_get_cpu_number(void)
always_inline uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
always_inline void * clib_mem_set_heap(void *heap)
#define clib_fifo_foreach(v, f, body)
#define uword_to_pointer(u, type)
always_inline u64 vlib_physmem_offset_to_physical(vlib_physmem_main_t *pm, uword o)
serialize_stream_t stream
u8 * vlib_validate_buffers(vlib_main_t *vm, u32 *buffers, uword next_buffer_stride, uword n_buffers, vlib_buffer_known_state_t known_state, uword follow_chain)
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
#define vec_free(V)
Free vector's memory (no header).
#define clib_memcpy(a, b, c)
#define VLIB_BUFFER_TOTAL_LENGTH_VALID
vlib_main_t vlib_global_main
vlib_buffer_known_state_t
uword data_function_opaque
vlib_copy_unit_t i[sizeof(vlib_buffer_t)/sizeof(vlib_copy_unit_t)]
static void vlib_validate_buffer_set_in_use(vlib_buffer_t *b, u32 expected)
always_inline uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
vhost_vring_state_t state
u32 next_buffer
Next buffer for this linked-list of buffers.
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, u32 free_list_index, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
u32 vlib_buffer_add_data(vlib_main_t *vm, u32 free_list_index, u32 buffer_index, void *data, u32 n_data_bytes)
u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
always_inline u64 vlib_physmem_virtual_to_physical(vlib_main_t *vm, void *mem)
always_inline u64 vlib_get_buffer_data_physical_address(vlib_main_t *vm, u32 buffer_index)
void * vlib_buffer_state_heap
always_inline void vlib_buffer_init_for_free_list(vlib_buffer_t *_dst, vlib_buffer_free_list_t *fl)
void vlib_buffer_delete_free_list(vlib_main_t *vm, u32 free_list_index)
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
always_inline void vlib_buffer_set_known_state(vlib_main_t *vm, u32 buffer_index, vlib_buffer_known_state_t state)
vlib_buffer_free_list_t * buffer_free_list_pool
always_inline void vlib_copy_buffers(u32 *dst, u32 *src, u32 n)
uword * buffer_known_hash
always_inline void * vlib_physmem_alloc_aligned(vlib_main_t *vm, clib_error_t **error, uword n_bytes, uword alignment)
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
always_inline uword vlib_physmem_offset_of(vlib_physmem_main_t *pm, void *p)
vlib_physmem_region_t virtual
always_inline u32 unserialize_vlib_buffer_n_bytes(serialize_main_t *m)
always_inline u32 vlib_buffer_round_size(u32 size)
always_inline f64 vlib_time_now(vlib_main_t *vm)
always_inline u16 vlib_buffer_chain_append_data(vlib_main_t *vm, u32 free_list_index, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
#define CLIB_MEMORY_BARRIER()
always_inline void * vlib_physmem_at_offset(vlib_physmem_main_t *pm, uword offset)
void vlib_packet_template_get_packet_helper(vlib_main_t *vm, vlib_packet_template_t *t)
#define clib_error_return(e, args...)
void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
#define CLIB_CACHE_LINE_BYTES
u32 flags
buffer flags: VLIB_BUFFER_IS_TRACED: trace this buffer.
void(* os_physmem_free)(void *x)
always_inline void vlib_buffer_chain_init(vlib_buffer_t *first)
u32 min_n_buffers_each_physmem_alloc
always_inline vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
always_inline u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
u32 vlib_buffer_create_free_list(vlib_main_t *vm, u32 n_data_bytes, char *fmt,...)