26 #define foreach_avf_input_error \ 27 _(BUFFER_ALLOC, "buffer alloc error") \ 28 _(RX_PACKET_ERROR, "Rx packet errors") 32 #define _(f,s) AVF_INPUT_ERROR_##f, 44 #define AVF_RX_DESC_STATUS(x) (1 << x) 45 #define AVF_RX_DESC_STATUS_DD AVF_RX_DESC_STATUS(0) 46 #define AVF_RX_DESC_STATUS_EOP AVF_RX_DESC_STATUS(1) 48 #define AVF_INPUT_REFILL_TRESHOLD 32 53 u16 n_refill, mask, n_alloc, slot;
62 slot = (rxq->
next - n_refill - 1) & mask;
71 AVF_INPUT_ERROR_BUFFER_ALLOC, 1);
91 s1 = (slot + 1) & mask;
92 s2 = (slot + 2) & mask;
93 s3 = (slot + 3) & mask;
130 slot = (slot + 4) & mask;
148 slot = (slot + 1) & mask;
164 b->
error = node->
errors[AVF_INPUT_ERROR_RX_PACKET_ERROR];
183 b->
flags |= ptype->flags;
184 return ptype->next_node;
192 u8 maybe_error,
int known_next)
194 uword n_rx_bytes = 0;
332 u32 n_rx_packets = 0, n_rx_bytes = 0;
368 #ifdef CLIB_HAVE_VEC256 370 u64x4 status_dd_eop_mask = u64x4_splat (0x3);
389 if (!u64x4_is_equal (q1x4 & status_dd_eop_mask, status_dd_eop_mask))
393 v = q1x4 & u64x4_splat ((
u64) 0x3FFFFULL);
394 v |= (q1x4 >> 6) & u64x4_splat ((
u64) 0xFFFF << 32);
395 v |= (q1x4 << 18) & u64x4_splat ((
u64) 0xFF << 48);
396 v |= err4 = (q1x4 << 37) & u64x4_splat ((
u64) 0xFF << 56);
398 u64x4_store_unaligned (v, ptd->
rx_vector + n_rxv);
399 maybe_error |= !u64x4_is_all_zero (err4);
422 maybe_error |= rxve->
error;
442 if (ad->
flags & AVF_DEVICE_F_IOVA)
448 n_rx_packets = n_rxv;
472 nexts, n_rxv, maybe_error, 1);
478 nexts, n_rxv, maybe_error, 0);
483 u32 n_left = n_rx_packets;
486 while (n_trace && n_left)
526 if ((ad->
flags & AVF_DEVICE_F_ADMIN_UP) == 0)
536 .sibling_of =
"device-input",
539 .state = VLIB_NODE_STATE_DISABLED,
static_always_inline int ethernet_frame_is_any_tagged_x4(u16 type0, u16 type1, u16 type2, u16 type3)
static u64 avf_get_u64_bits(void *start, int offset, int first, int last)
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
u32 current_config_index
Used by feature subgraph arcs to visit enabled feature nodes.
vnet_main_t * vnet_get_main(void)
vnet_interface_main_t interface_main
#define clib_error(format, args...)
static void vlib_error_count(vlib_main_t *vm, uword node_index, uword counter, uword increment)
#define VLIB_NODE_FN(node)
vlib_error_t * errors
Vector of errors for this node.
format_function_t format_avf_input_trace
static u64 vlib_get_buffer_data_physical_address(vlib_main_t *vm, u32 buffer_index)
static void vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
#define static_always_inline
vlib_combined_counter_main_t * combined_sw_if_counters
static_always_inline int vnet_device_input_have_features(u32 sw_if_index)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
u16 current_length
Nbytes between current data and the end of this buffer.
u32 node_index
Node index.
vlib_error_t error
Error code for buffers to be enqueued to error handler.
#define VLIB_REGISTER_NODE(x,...)
static_always_inline uword vlib_get_thread_index(void)
#define CLIB_PREFETCH(addr, size, type)
static_always_inline void vlib_buffer_enqueue_to_next(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts, uword count)
#define clib_memcpy(a, b, c)
u32 per_interface_next_index
avf_rx_vector_entry_t rx_vector[AVF_RX_VECTOR_SZ]
#define STATIC_ASSERT_OFFSET_OF(s, e, o)
static_always_inline int ethernet_frame_is_tagged(u16 type)
vlib_buffer_t buffer_template
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
static uword pointer_to_uword(const void *p)
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
static_always_inline void clib_memset_u16(void *p, u16 val, uword count)
#define foreach_device_and_queue(var, vec)
static u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
avf_per_thread_data_t * per_thread_data
static_always_inline void vnet_feature_start_device_input_x1(u32 sw_if_index, u32 *next0, vlib_buffer_t *b0)
#define CLIB_MEMORY_BARRIER()
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
#define STATIC_ASSERT_SIZEOF(d, s)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.