40 #ifndef included_vlib_buffer_funcs_h 41 #define included_vlib_buffer_funcs_h 79 #ifdef CLIB_HAVE_VEC256 85 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
86 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
97 #ifdef CLIB_HAVE_VEC256 98 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
101 #elif defined (CLIB_HAVE_VEC128) 103 u32x4 bi4 = u32x4_load_unaligned (bi);
105 #if defined (__aarch64__) 176 #ifdef CLIB_HAVE_VEC256 177 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
183 u64x4 v0 = u64x4_load_unaligned (b);
184 u64x4 v1 = u64x4_load_unaligned (b + 4);
199 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
200 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
249 return (b->
flags & VLIB_BUFFER_NEXT_PRESENT
299 uword content_len = 0;
309 if (!(b->
flags & VLIB_BUFFER_NEXT_PRESENT))
337 #define vlib_prefetch_buffer_with_index(vm,bi,type) \ 339 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \ 340 vlib_prefetch_buffer_header (_b, type); \ 351 #define vlib_buffer_foreach_allocated(vm,bi,body) \ 353 vlib_main_t * _vmain = (vm); \ 354 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \ 355 hash_pair_t * _vbpair; \ 356 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \ 357 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \ 358 (bi) = _vbpair->key; \ 428 b->
flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
432 b->
flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
468 n_buffers =
clib_min (len, n_buffers);
472 src = fl->
buffers + len - n_buffers;
474 _vec_len (fl->
buffers) -= n_buffers;
483 src = fl->
buffers + len - n_buffers;
485 _vec_len (fl->
buffers) -= n_buffers;
521 u32 ring_size,
u32 n_buffers)
525 ASSERT (n_buffers <= ring_size);
603 u32 ring_size,
u32 n_buffers)
605 ASSERT (n_buffers <= ring_size);
628 u32 ring_size,
u32 n_buffers)
630 ASSERT (n_buffers <= ring_size);
662 uword n_unaligned_buffers);
724 u32 buffer_index,
void *data,
u32 n_data_bytes);
731 uword n_alloc, n_buffers = 1;
732 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
736 while (s->
flags & VLIB_BUFFER_NEXT_PRESENT)
741 u32 new_buffers[n_buffers];
767 for (i = 1; i < n_buffers; i++)
798 u16 n_buffers,
u16 head_end_offset)
805 ASSERT (n_buffers <= 256);
809 buffers[0] = src_buffer;
810 for (i = 1; i < n_buffers; i++)
824 buffers[0] = src_buffer;
832 for (i = 0; i < n_buffers; i++)
847 d->
flags = s->
flags | VLIB_BUFFER_NEXT_PRESENT;
848 d->
flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
857 while (s->
flags & VLIB_BUFFER_NEXT_PRESENT)
880 u16 n_buffers,
u16 head_end_offset)
885 while (n_buffers > 256)
891 (buffers + n_cloned),
892 256, head_end_offset);
897 n_buffers, head_end_offset);
913 ASSERT ((head->
flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
917 head->
flags |= VLIB_BUFFER_NEXT_PRESENT;
918 head->
flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
919 head->
flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
920 head->
flags |= (tail->
flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
928 if (tail->
flags & VLIB_BUFFER_NEXT_PRESENT)
941 first->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
942 first->
flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
953 last->
flags |= VLIB_BUFFER_NEXT_PRESENT;
955 next_buffer->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1030 uword n_packet_data_bytes,
1031 uword min_n_buffers_each_alloc,
1058 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
1067 n += vlib_buffer_index_length_in_chain (vm, f[0]);
1103 #define _(f) ASSERT (dst->f == src->f); 1120 u32 buffer_index,
u8 do_init)
1168 if (p[0] != expected)
1181 p[0] ?
"busy" :
"free", expected ?
"busy" :
"free");
1211 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256) 1227 !(first->
flags & VLIB_BUFFER_NEXT_PRESENT))
1250 if (first->
flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1257 if (second->
flags & VLIB_BUFFER_NEXT_PRESENT)
1263 first->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1265 second->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1269 (first->
flags & VLIB_BUFFER_NEXT_PRESENT));
1284 u32 bytes_left = 0, data_size;
1285 u16 src_left, dst_left, n_buffers = 1;
1296 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
1307 if (is_cloned && bytes_left >= dst_left)
1310 u32 space_needed = bytes_left - dst_left;
1320 while (len < space_needed)
1328 b->
flags = VLIB_BUFFER_NEXT_PRESENT;
1335 to_free = first->next_buffer;
1336 first->next_buffer = tail;
1341 src_left = sb->current_length;
1356 dst_left = data_size;
1360 while (src_left == 0)
1362 ASSERT (sb->flags & VLIB_BUFFER_NEXT_PRESENT);
1364 src_left = sb->current_length;
1368 bytes_to_copy =
clib_min (dst_left, src_left);
1373 bytes_to_copy =
clib_min (bytes_to_copy, sp - dp);
1378 src_left -= bytes_to_copy;
1379 dst_left -= bytes_to_copy;
1380 dp += bytes_to_copy;
1381 sp += bytes_to_copy;
1382 bytes_left -= bytes_to_copy;
1388 if (is_cloned && to_free)
1392 if (db->
flags & VLIB_BUFFER_NEXT_PRESENT)
1394 db->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1397 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
1404 first->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
vlib_physmem_region_index_t physmem_region
vlib_main_t vlib_global_main
#define hash_set(h, key, value)
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer's data.
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
#define STRUCT_MARK_PTR(v, f)
format_function_t format_vlib_buffer_contents
u32 opaque[10]
Opaque data used by sub-graphs for their own purposes.
static_always_inline u64x2 u32x4_extend_to_u64x2_high(u32x4 v)
format_function_t format_vlib_buffer
static void vlib_buffer_chain_compress(vlib_main_t *vm, vlib_buffer_t *first, u32 **discard_vector)
compress buffer chain in a way where the first buffer is at least VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SI...
vlib_buffer_callbacks_t cb
static void vlib_buffer_attach_clone(vlib_main_t *vm, vlib_buffer_t *head, vlib_buffer_t *tail)
Attach cloned tail to the buffer.
static void vlib_validate_buffer_in_use(vlib_buffer_t *b, u32 expected)
static vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, u32 next_bi)
static f64 vlib_time_now(vlib_main_t *vm)
static void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
vlib_buffer_t buffer_init_template
struct vlib_main_t * vlib_main
#define CLIB_LOG2_CACHE_LINE_BYTES
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
u8 buffer_pool_index
index of buffer pool this buffer belongs.
foreach_avx2_vec256i static foreach_avx2_vec256u u32x8 u32x8_permute(u32x8 v, u32x8 idx)
#define STRUCT_OFFSET_OF(t, f)
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
static void vlib_buffer_delete_free_list(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index)
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
uword * vlib_buffer_state_validation_hash
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
vlib_buffer_free_list_index_t index
static u64 vlib_get_buffer_data_physical_address(vlib_main_t *vm, u32 buffer_index)
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
vlib_buffer_free_list_index_t free_list_index
static_always_inline void vlib_get_buffer_indices(vlib_main_t *vm, vlib_buffer_t **b, u32 *bi, uword count)
Translate array of buffer pointers into buffer indices.
#define vec_add1_aligned(V, E, A)
Add 1 element to end of vector (alignment specified).
static u16 vlib_buffer_clone_256(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create a maximum of 256 clones of buffer and store them in the supplied array.
u32 vlib_buffer_add_data(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index, u32 buffer_index, void *data, u32 n_data_bytes)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
#define static_always_inline
void vlib_aligned_memcpy(void *_dst, void *_src, int n_bytes)
vlib_buffer_free_no_next_cb_t * vlib_buffer_free_no_next_cb
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE
minimum data size of first buffer in a buffer chain
u32 * vlib_buffer_state_validation_lock
vhost_vring_state_t state
static void vlib_copy_buffers(u32 *dst, u32 *src, u32 n)
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
epu8_epi32 epu16_epi32 u64x2
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
static heap_elt_t * first(heap_header_t *h)
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
u32 min_n_buffers_each_alloc
u16 current_length
Nbytes between current data and the end of this buffer.
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
format_function_t format_vlib_buffer_and_data
void(* vlib_buffer_delete_free_list_cb)(struct vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index)
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
#define VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX
static_always_inline u32x4 u32x4_shuffle(u32x4 v, const int a, const int b, const int c, const int d)
vlib_buffer_pool_t * buffer_pools
vlib_buffer_free_list_t * buffer_free_list_pool
static_always_inline void vlib_get_buffer_indices_with_offset(vlib_main_t *vm, void **b, u32 *bi, uword count, i32 offset)
Translate array of buffer pointers into buffer indices with offset.
struct vlib_serialize_buffer_main_t::@32::@35 rx
#define clib_fifo_foreach(v, f, body)
static void vlib_buffer_add_to_free_list(vlib_main_t *vm, vlib_buffer_free_list_t *f, u32 buffer_index, u8 do_init)
static uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
static void vlib_buffer_set_known_state(u32 buffer_index, vlib_buffer_known_state_t state)
static void vlib_buffer_chain_init(vlib_buffer_t *first)
#define vec_add_aligned(V, E, N, A)
Add N elements to end of vector V (no header, specified alignment)
serialize_stream_t stream
clib_spinlock_t buffer_known_hash_lockp
vlib_buffer_fill_free_list_cb_t * vlib_buffer_fill_free_list_cb
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
static u16 vlib_buffer_clone(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create multiple clones of buffer and store them in the supplied array.
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
#define vec_free(V)
Free vector's memory (no header).
static void * clib_mem_set_heap(void *heap)
#define clib_warning(format, args...)
#define clib_memcpy(a, b, c)
static vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
void vlib_buffer_validate_alloc_free(vlib_main_t *vm, u32 *buffers, uword n_buffers, vlib_buffer_known_state_t expected_state)
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
static uword round_pow2(uword x, uword pow2)
vlib_buffer_known_state_t
uword data_function_opaque
#define VLIB_BUFFER_DATA_SIZE
static void vlib_validate_buffer_set_in_use(vlib_buffer_t *b, u32 expected)
static_always_inline u64x2 u32x4_extend_to_u64x2(u32x4 v)
#define uword_to_pointer(u, type)
#define vec_delete(V, N, M)
Delete N elements starting at element M.
vlib_buffer_free_cb_t * vlib_buffer_free_cb
static vlib_buffer_free_list_index_t vlib_buffer_get_free_list_index(vlib_buffer_t *b)
void vlib_buffer_free_list_fill_unaligned(vlib_main_t *vm, vlib_buffer_free_list_t *free_list, uword n_unaligned_buffers)
u32 next_buffer
Next buffer for this linked-list of buffers.
static vlib_buffer_free_list_t * vlib_buffer_get_buffer_free_list(vlib_main_t *vm, vlib_buffer_t *b, vlib_buffer_free_list_index_t *index)
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
vlib_buffer_free_list_index_t vlib_buffer_create_free_list(vlib_main_t *vm, u32 n_data_bytes, char *fmt,...)
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
static uword pointer_to_uword(const void *p)
u8 n_add_refs
Number of additional references to this buffer.
void * vlib_buffer_state_heap
static u32 vlib_buffer_alloc_from_free_list(vlib_main_t *vm, u32 *buffers, u32 n_buffers, vlib_buffer_free_list_index_t index)
Allocate buffers from specific freelist into supplied array.
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
template key/value backing page structure
static u16 vlib_buffer_chain_append_data(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
static u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
u32 opaque2[12]
More opaque data, see ../vnet/vnet/buffer.h.
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
static u64 vlib_physmem_virtual_to_physical(vlib_main_t *vm, vlib_physmem_region_index_t idx, void *mem)
uword * buffer_known_hash
static u32 vlib_buffer_free_list_buffer_size(vlib_main_t *vm, vlib_buffer_free_list_index_t index)
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
static void vlib_buffer_init_for_free_list(vlib_buffer_t *dst, vlib_buffer_free_list_t *fl)
static vlib_buffer_known_state_t vlib_buffer_is_known(u32 buffer_index)
vlib_buffer_main_t buffer_main
static void vlib_buffer_set_free_list_index(vlib_buffer_t *b, vlib_buffer_free_list_index_t index)
static_always_inline vlib_buffer_pool_t * vlib_buffer_pool_get(u8 buffer_pool_index)
#define CLIB_MEMORY_BARRIER()
u8 vlib_buffer_free_list_index_t
static void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
void vlib_packet_template_get_packet_helper(vlib_main_t *vm, vlib_packet_template_t *t)
static vlib_buffer_free_list_t * vlib_buffer_get_free_list(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_alloc, char *fmt,...)
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static u32 vlib_buffer_round_size(u32 size)
static u32 unserialize_vlib_buffer_n_bytes(serialize_main_t *m)
vlib_buffer_free_list_index_t free_list_index