40 #ifndef included_vlib_buffer_funcs_h 41 #define included_vlib_buffer_funcs_h 104 #if defined(CLIB_HAVE_VEC512) 105 while (n_indices >= 16)
107 u32x16_store_unaligned (u32x16_load_unaligned (src), dst);
114 #if defined(CLIB_HAVE_VEC256) 115 while (n_indices >= 8)
117 u32x8_store_unaligned (u32x8_load_unaligned (src), dst);
124 #if defined(CLIB_HAVE_VEC128) 125 while (n_indices >= 4)
127 u32x4_store_unaligned (u32x4_load_unaligned (src), dst);
145 u32 ring_size,
u32 n_buffers)
147 ASSERT (n_buffers <= ring_size);
155 u32 n = ring_size - start;
163 u32 ring_size,
u32 n_buffers)
165 ASSERT (n_buffers <= ring_size);
173 u32 n = ring_size - start;
183 #if defined CLIB_HAVE_VEC512 184 b->as_u8x64[0] = bt->as_u8x64[0];
185 #elif defined (CLIB_HAVE_VEC256) 186 b->as_u8x32[0] = bt->as_u8x32[0];
187 b->as_u8x32[1] = bt->as_u8x32[1];
188 #elif defined (CLIB_HAVE_VEC128) 189 b->as_u8x16[0] = bt->as_u8x16[0];
190 b->as_u8x16[1] = bt->as_u8x16[1];
191 b->as_u8x16[2] = bt->as_u8x16[2];
192 b->as_u8x16[3] = bt->as_u8x16[3];
218 #ifdef CLIB_HAVE_VEC256 219 u64x4 off = u64x4_splat (buffer_mem_start + offset);
224 u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
225 u64x4 b1 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 4));
236 #ifdef CLIB_HAVE_VEC256 237 u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
240 #elif defined (CLIB_HAVE_VEC128) 241 u64x2 off = u64x2_splat (buffer_mem_start + offset);
242 u32x4 bi4 = u32x4_load_unaligned (bi);
244 #if defined (__aarch64__) 315 #ifdef CLIB_HAVE_VEC256 316 u32x8
mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
322 u64x4 v0 = u64x4_load_unaligned (b);
323 u64x4 v1 = u64x4_load_unaligned (b + 4);
338 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
339 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
388 return (b->
flags & VLIB_BUFFER_NEXT_PRESENT
438 uword content_len = 0;
448 if (!(b->
flags & VLIB_BUFFER_NEXT_PRESENT))
476 #define vlib_prefetch_buffer_with_index(vm,bi,type) \ 478 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \ 479 vlib_prefetch_buffer_header (_b, type); \ 515 uword next_buffer_stride,
518 uword follow_buffer_next);
567 u8 buffer_pool_index)
575 if (VLIB_BUFFER_ALLOC_FAULT_INJECTOR > 0)
580 n_buffers = vlib_buffer_alloc_may_fail (vm, n_buffers);
593 if (len >= n_buffers)
694 u32 ring_size,
u32 n_buffers)
698 ASSERT (n_buffers <= ring_size);
723 u32 ring_size,
u32 n_buffers,
724 u8 buffer_pool_index)
728 ASSERT (n_buffers <= ring_size);
746 u32 * buffers,
u32 n_buffers)
751 u32 n_cached, n_empty;
759 if (n_buffers <= n_empty)
763 bpt->
n_cached = n_cached + n_buffers;
768 buffers + n_buffers - n_empty, n_empty);
773 n_buffers - n_empty);
774 bp->
n_avail += n_buffers - n_empty;
782 const int queue_size = 128;
784 u8 buffer_pool_index = ~0;
785 u32 n_queue = 0, queue[queue_size + 4];
787 #if defined(CLIB_HAVE_VEC128) 791 .
flags = VLIB_BUFFER_NEXT_PRESENT,
812 #if defined(CLIB_HAVE_VEC128) 813 u8x16 p0, p1, p2, p3, r;
814 p0 = u8x16_load_unaligned (b[0]);
815 p1 = u8x16_load_unaligned (b[1]);
816 p2 = u8x16_load_unaligned (b[2]);
817 p3 = u8x16_load_unaligned (b[3]);
819 r = p0 ^ bpi_vec.as_u8x16[0];
820 r |= p1 ^ bpi_vec.as_u8x16[0];
821 r |= p2 ^ bpi_vec.as_u8x16[0];
822 r |= p3 ^ bpi_vec.as_u8x16[0];
823 r &= bpi_mask.as_u8x16[0];
824 r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
826 sum = !u8x16_is_all_zero (r);
832 sum &= VLIB_BUFFER_NEXT_PRESENT;
863 if (n_queue >= queue_size)
880 if (
PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
890 #if defined(CLIB_HAVE_VEC128) 904 queue[n_queue++] = bi;
907 if (n_queue == queue_size)
913 if (maybe_next && (
flags & VLIB_BUFFER_NEXT_PRESENT))
985 u32 ring_size,
u32 n_buffers)
987 ASSERT (n_buffers <= ring_size);
1010 u32 ring_size,
u32 n_buffers)
1012 ASSERT (n_buffers <= ring_size);
1030 #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \ 1031 (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \ 1032 VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL) 1039 uword n_alloc, n_buffers = 1;
1044 while (s->
flags & VLIB_BUFFER_NEXT_PRESENT)
1049 u32 new_buffers[n_buffers];
1076 for (i = 1; i < n_buffers; i++)
1123 ASSERT ((b->
flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1133 if (source + length <= destination)
1136 memmove (destination, source, length);
1162 ASSERT (n_buffers <= 256);
1164 ASSERT ((offset + head_end_offset) <
1169 buffers[0] = src_buffer;
1173 for (i = 1; i < n_buffers; i++)
1187 buffers[0] = src_buffer;
1194 for (i = 0; i < n_buffers; i++)
1213 VLIB_BUFFER_NEXT_PRESENT;
1223 while (s->
flags & VLIB_BUFFER_NEXT_PRESENT)
1253 while (n_buffers > 256)
1259 (buffers + n_cloned),
1260 256, head_end_offset, offset);
1265 n_buffers, head_end_offset, offset);
1284 u16 n_buffers,
u16 head_end_offset)
1287 head_end_offset, 0);
1301 ASSERT ((head->
flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1304 head->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1305 head->
flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1306 head->
flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1307 head->
flags |= (tail->
flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1315 if (tail->
flags & VLIB_BUFFER_NEXT_PRESENT)
1328 first->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1329 first->
flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1338 last->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1340 next_buffer->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1405 uword n_packet_data_bytes,
1406 uword min_n_buffers_each_alloc,
1431 u32 bytes_left = 0, data_size;
1432 u16 src_left, dst_left, n_buffers = 1;
1443 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
1454 if (is_cloned && bytes_left >= dst_left)
1457 u32 space_needed = bytes_left - dst_left;
1467 while (len < space_needed)
1475 b->
flags = VLIB_BUFFER_NEXT_PRESENT;
1482 to_free = first->next_buffer;
1483 first->next_buffer = tail;
1488 src_left = sb->current_length;
1501 dst_left = data_size;
1513 while (src_left == 0)
1515 ASSERT (sb->flags & VLIB_BUFFER_NEXT_PRESENT);
1517 src_left = sb->current_length;
1521 bytes_to_copy =
clib_min (dst_left, src_left);
1526 bytes_to_copy =
clib_min (bytes_to_copy, sp - dp);
1531 src_left -= bytes_to_copy;
1532 dst_left -= bytes_to_copy;
1533 dp += bytes_to_copy;
1534 sp += bytes_to_copy;
1535 bytes_left -= bytes_to_copy;
1541 if (is_cloned && to_free)
1545 if (db->
flags & VLIB_BUFFER_NEXT_PRESENT)
1547 db->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1550 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
1557 first->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
#define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static uword vlib_buffer_get_current_pa(vlib_main_t *vm, vlib_buffer_t *b)
static __clib_warn_unused_result u32 vlib_buffer_alloc_to_ring_from_pool(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers into ring from specific buffer pool.
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer's data.
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
vl_api_wireguard_peer_flags_t flags
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
#define clib_atomic_add_fetch(a, b)
format_function_t format_vlib_buffer_contents
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
format_function_t format_vlib_buffer
static_always_inline u64x2 u64x2_from_u32x4_high(u32x4 v)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
static_always_inline vlib_buffer_pool_t * vlib_get_buffer_pool(vlib_main_t *vm, u8 buffer_pool_index)
#define clib_memcpy_fast(a, b, c)
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
static void vlib_buffer_attach_clone(vlib_main_t *vm, vlib_buffer_t *head, vlib_buffer_t *tail)
Attach cloned tail to the buffer.
static void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
static void vlib_buffer_move(vlib_main_t *vm, vlib_buffer_t *b, i16 offset)
#define VLIB_BUFFER_PRE_DATA_SIZE
int vlib_buffer_add_data(vlib_main_t *vm, u32 *buffer_index, void *data, u32 n_data_bytes)
u16 current_length
Nbytes between current data and the end of this buffer.
#define CLIB_LOG2_CACHE_LINE_BYTES
static __clib_warn_unused_result u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
vlib_buffer_main_t * buffer_main
foreach_avx2_vec256i static foreach_avx2_vec256u u32x8 u32x8_permute(u32x8 v, u32x8 idx)
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
static vlib_buffer_known_state_t vlib_buffer_is_known(vlib_main_t *vm, u32 buffer_index)
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
static void vlib_buffer_copy_indices_to_ring(u32 *ring, u32 *src, u32 start, u32 ring_size, u32 n_buffers)
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
u8 default_buffer_pool_index_for_numa[VLIB_BUFFER_MAX_NUMA_NODES]
STATIC_ASSERT_OFFSET_OF(vlib_buffer_t, template_end, 64)
u8 buffer_pool_index
index of buffer pool this buffer belongs.
static_always_inline void vlib_get_buffer_indices(vlib_main_t *vm, vlib_buffer_t **b, u32 *bi, uword count)
Translate array of buffer pointers into buffer indices.
format_function_t format_vlib_buffer_no_chain
vlib_buffer_pool_thread_t * threads
static_always_inline __clib_warn_unused_result uword vlib_buffer_pool_get(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
static __clib_warn_unused_result u32 vlib_buffer_alloc_on_numa(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u32 numa_node)
Allocate buffers from specific numa node into supplied array.
#define static_always_inline
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
static u16 vlib_buffer_chain_append_data(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
epu8_epi32 epu16_epi32 u64x2
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
static heap_elt_t * first(heap_header_t *h)
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
static vlib_buffer_t * vlib_buffer_copy_no_chain(vlib_main_t *vm, vlib_buffer_t *b, u32 *di)
u32 min_n_buffers_each_alloc
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
format_function_t format_vlib_buffer_and_data
static __clib_warn_unused_result u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
static void vlib_buffer_validate(vlib_main_t *vm, vlib_buffer_t *b)
vlib_buffer_t buffer_template
u8 * vlib_validate_buffers(vlib_main_t *vm, u32 *buffers, uword next_buffer_stride, uword n_buffers, vlib_buffer_known_state_t known_state, uword follow_buffer_next)
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
static_always_inline u32x4 u32x4_shuffle(u32x4 v, const int a, const int b, const int c, const int d)
vlib_buffer_pool_t * buffer_pools
u32 trace_handle
Specifies trace buffer handle if VLIB_PACKET_IS_TRACED flag is set.
static_always_inline void vlib_get_buffer_indices_with_offset(vlib_main_t *vm, void **b, u32 *bi, uword count, i32 offset)
Translate array of buffer pointers into buffer indices with offset.
static uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
static void vlib_buffer_chain_init(vlib_buffer_t *first)
#define clib_atomic_sub_fetch(a, b)
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
clib_spinlock_t buffer_known_hash_lockp
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
static u16 vlib_buffer_clone(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create multiple clones of buffer and store them in the supplied array.
static_always_inline void vlib_buffer_pool_put(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
sll srl srl sll sra u16x4 i
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
#define vec_free(V)
Free vector's memory (no header).
#define VLIB_BUFFER_MAX_NUMA_NODES
static vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
void vlib_buffer_validate_alloc_free(vlib_main_t *vm, u32 *buffers, uword n_buffers, vlib_buffer_known_state_t expected_state)
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
static uword round_pow2(uword x, uword pow2)
vlib_buffer_known_state_t
#define uword_to_pointer(u, type)
vlib buffer structure definition and a few select access methods.
static u16 vlib_buffer_clone_at_offset(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create multiple clones of buffer and store them in the supplied array.
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
static void * vlib_buffer_ptr_from_index(uword buffer_mem_start, u32 buffer_index, uword offset)
static_always_inline void vlib_buffer_free_inline(vlib_main_t *vm, u32 *buffers, u32 n_buffers, int maybe_next)
static_always_inline u64x2 u64x2_from_u32x4(u32x4 v)
static uword pointer_to_uword(const void *p)
template key/value backing page structure
static u64 vlib_physmem_get_pa(vlib_main_t *vm, void *mem)
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
static vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *last, u32 next_bi)
#define VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ
u32 next_buffer
Next buffer for this linked-list of buffers.
u32 cached_buffers[VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ]
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
VLIB buffer representation.
static void vlib_buffer_copy_indices_from_ring(u32 *dst, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
uword * buffer_known_hash
struct clib_bihash_value offset
template key/value backing page structure
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
static u16 vlib_buffer_clone_256(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create a maximum of 256 clones of buffer and store them in the supplied array.
static void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_alloc, char *fmt,...)
static u8 vlib_buffer_pool_get_default_for_numa(vlib_main_t *vm, u32 numa_node)
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
volatile u8 ref_count
Reference count for this buffer.
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
u32 opaque[10]
Opaque data used by sub-graphs for their own purposes.
static __clib_warn_unused_result u32 vlib_buffer_alloc_from_pool(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers from specific pool into supplied array.