26 #define foreach_avf_input_error \ 27 _(BUFFER_ALLOC, "buffer alloc error") 31 #define _(f,s) AVF_INPUT_ERROR_##f, 43 #define AVF_INPUT_REFILL_TRESHOLD 32 48 #ifdef CLIB_HAVE_VEC256 50 u64x4_store_unaligned (v, (
void *) d);
72 slot = (rxq->
next - n_refill - 1) & mask;
82 AVF_INPUT_ERROR_BUFFER_ALLOC, 1);
124 slot = (slot + 8) & mask;
138 u32 tlnifb = 0,
i = 0;
143 while ((qw1 & AVF_RXD_STATUS_EOP) == 0)
149 b->
flags |= VLIB_BUFFER_NEXT_PRESENT;
157 hb->
flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
170 uword n_rx_bytes = 0;
244 u32 n_trace, n_rx_packets = 0, n_rx_bytes = 0;
247 u32 *bi, *to_next, n_left_to_next;
254 #ifdef CLIB_HAVE_VEC256 255 u64x4 q1x4, or_q1x4 = { 0 };
278 if (next + 11 < size)
291 #ifdef CLIB_HAVE_VEC256 295 q1x4 =
u64x4_gather ((
void *) &d[0].qword[1], (
void *) &d[1].qword[1],
296 (
void *) &d[2].qword[1], (
void *) &d[3].qword[1]);
299 if (!u64x4_is_equal (q1x4 & dd_eop_mask4, dd_eop_mask4))
303 u64x4_store_unaligned (q1x4, ptd->
qw1s + n_rx_packets);
307 next = (next + 4) & mask;
320 bi[0] = rxq->
bufs[next];
326 u16 tail_next = next;
331 tail_next = (tail_next + 1) & mask;
338 or_qw1 |= tail->
qw1s[tail_desc] = td[0].
qword[1];
344 n_tail_desc += tail_desc;
347 or_qw1 |= ptd->
qw1s[n_rx_packets] = d[0].
qword[1];
350 next = (next + 1) & mask;
357 if (n_rx_packets == 0)
361 rxq->
n_enqueued -= n_rx_packets + n_tail_desc;
363 #ifdef CLIB_HAVE_VEC256 364 or_qw1 |= or_q1x4[0] | or_q1x4[1] | or_q1x4[2] | or_q1x4[3];
382 u32 n_left = n_rx_packets,
i = 0, j;
385 while (n_trace && n_left)
426 n_left_to_next -= n_rx_packets;
435 if (ad->
flags & AVF_DEVICE_F_VA_DMA)
455 if ((ad->
flags & AVF_DEVICE_F_ADMIN_UP) == 0)
465 .sibling_of =
"device-input",
468 .state = VLIB_NODE_STATE_DISABLED,
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
vnet_main_t * vnet_get_main(void)
vnet_interface_main_t interface_main
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
#define CLIB_MEMORY_STORE_BARRIER()
static void vlib_error_count(vlib_main_t *vm, uword node_index, uword counter, uword increment)
#define VLIB_NODE_FLAG_TRACE_SUPPORTED
u16 current_length
Nbytes between current data and the end of this buffer.
static vlib_frame_t * vlib_get_frame(vlib_main_t *vm, vlib_frame_t *f)
u32 buffers[AVF_RX_MAX_DESC_IN_CHAIN - 1]
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
#define VLIB_NODE_FN(node)
#define AVF_RXD_STATUS_DD
static u32 vlib_buffer_alloc_to_ring_from_pool(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers into ring from specific buffer pool.
format_function_t format_avf_input_trace
u8 buffer_pool_index
index of buffer pool this buffer belongs.
static void vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
#define static_always_inline
#define ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX
vlib_combined_counter_main_t * combined_sw_if_counters
static_always_inline int vnet_device_input_have_features(u32 sw_if_index)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define vlib_get_new_next_frame(vm, node, next_index, vectors, n_vectors_left)
avf_rx_tail_t tails[AVF_RX_VECTOR_SZ]
static vlib_next_frame_t * vlib_node_runtime_get_next_frame(vlib_main_t *vm, vlib_node_runtime_t *n, u32 next_index)
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
u64 qw1s[AVF_RX_VECTOR_SZ]
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
static_always_inline u64x4 u64x4_gather(void *p0, void *p1, void *p2, void *p3)
#define AVF_RXD_LEN_SHIFT
#define ETH_INPUT_FRAME_F_IP4_CKSUM_OK
u32 node_index
Node index.
#define AVF_RXD_STATUS_EOP
#define VLIB_REGISTER_NODE(x,...)
static_always_inline uword vlib_get_thread_index(void)
#define CLIB_PREFETCH(addr, size, type)
sll srl srl sll sra u16x4 i
static_always_inline int avf_rxd_is_not_dd(avf_rx_desc_t *d)
static void * vlib_frame_scalar_args(vlib_frame_t *f)
Get pointer to frame scalar data.
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
vlib_buffer_t * bufs[AVF_RX_VECTOR_SZ]
vlib_main_t vlib_node_runtime_t * node
u32 per_interface_next_index
vlib_buffer_t buffer_template
#define AVF_RXD_ERROR_IPE
static uword pointer_to_uword(const void *p)
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
#define foreach_device_and_queue(var, vec)
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
u32 next_buffer
Next buffer for this linked-list of buffers.
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
VLIB buffer representation.
avf_per_thread_data_t * per_thread_data
static_always_inline void vnet_feature_start_device_input_x1(u32 sw_if_index, u32 *next0, vlib_buffer_t *b0)
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
u64 qw1s[AVF_RX_MAX_DESC_IN_CHAIN - 1]
static void vlib_frame_no_append(vlib_frame_t *f)
static_always_inline int avf_rxd_is_not_eop(avf_rx_desc_t *d)
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
#define AVF_RX_MAX_DESC_IN_CHAIN
volatile u8 ref_count
Reference count for this buffer.
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.