26 #define foreach_af_xdp_input_error \ 27 _(POLL_REQUIRED, "poll required") \ 28 _(POLL_FAILURES, "poll failures") 32 #define _(f,s) AF_XDP_INPUT_ERROR_##f, 46 u32 n_left,
const u32 * bi,
u32 next_index,
54 while (n_trace && n_left)
81 xsk_ring_prod__submit (&rxq->
fq, n_alloc);
83 if (!xsk_ring_prod__needs_wakeup (&rxq->
fq))
89 struct pollfd fd = {.fd = rxq->
xsk_fd,.events = POLLIN };
90 ret = poll (&fd, 1, 0);
110 u32 n_alloc, n, n_wrap;
116 n_alloc = xsk_prod_nb_free (&rxq->
fq, 16);
123 n = xsk_ring_prod__reserve (&rxq->
fq, n_alloc, &idx);
126 fill = xsk_ring_prod__fill_addr (&rxq->
fq, idx);
127 n =
clib_min (n_alloc, size - (idx & mask));
128 n_wrap = n_alloc - n;
141 #define bi2addr(bi) \ 142 (((bi) << CLIB_LOG2_CACHE_LINE_BYTES) + (copy ? sizeof(vlib_buffer_t) : 0)) 148 #ifdef CLIB_HAVE_VEC256 149 u64x4 b0 = u64x4_from_u32x4 (*(u32x4u *) (bi + 0));
150 u64x4 b1 = u64x4_from_u32x4 (*(u32x4u *) (bi + 4));
151 *(u64x4u *) (fill + 0) =
bi2addr (b0);
152 *(u64x4u *) (fill + 4) =
bi2addr (b1);
178 fill = xsk_ring_prod__fill_addr (&rxq->
fq, 0);
190 const u32 hw_if_index)
218 u32 n = n_rx, *bi = bis, bytes = 0;
220 #define addr2bi(addr) \ 221 (((addr) - (copy ? sizeof(vlib_buffer_t) : 0)) >> CLIB_LOG2_CACHE_LINE_BYTES) 225 const struct xdp_desc *desc = xsk_ring_cons__rx_desc (&rxq->
rx, idx);
226 bi[0] =
addr2bi (xsk_umem__extract_addr (desc->addr));
230 idx = (idx + 1) & mask;
273 xsk_ring_cons__release (&rxq->
rx, n_rx);
280 u16 qid,
const int copy)
285 u32 next_index, *to_next, n_left_to_next;
286 u32 n_rx_packets, n_rx_bytes;
336 if ((ad->
flags & AF_XDP_DEVICE_F_ADMIN_UP) == 0)
350 .name =
"af_xdp-input",
351 .sibling_of =
"device-input",
354 .state = VLIB_NODE_STATE_DISABLED,
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
vnet_main_t * vnet_get_main(void)
vnet_interface_main_t interface_main
u32 per_interface_next_index
static void vlib_error_count(vlib_main_t *vm, uword node_index, uword counter, uword increment)
#define VLIB_NODE_FLAG_TRACE_SUPPORTED
u16 current_length
Nbytes between current data and the end of this buffer.
static vlib_frame_t * vlib_get_frame(vlib_main_t *vm, vlib_frame_t *f)
#define af_xdp_device_error(dev, fmt,...)
#define VLIB_NODE_FN(node)
static vlib_buffer_known_state_t vlib_buffer_is_known(vlib_main_t *vm, u32 buffer_index)
af_xdp_device_t * devices
vlib_buffer_t * buffer_template
#define static_always_inline
#define ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX
af_xdp_main_t af_xdp_main
vlib_combined_counter_main_t * combined_sw_if_counters
static_always_inline int vnet_device_input_have_features(u32 sw_if_index)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define vlib_get_new_next_frame(vm, node, next_index, vectors, n_vectors_left)
static vlib_next_frame_t * vlib_node_runtime_get_next_frame(vlib_main_t *vm, vlib_node_runtime_t *n, u32 next_index)
format_function_t format_af_xdp_input_trace
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
static __clib_warn_unused_result int vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
u32 node_index
Node index.
#define VLIB_REGISTER_NODE(x,...)
static void * vlib_frame_scalar_args(vlib_frame_t *f)
Get pointer to frame scalar data.
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
vlib_main_t vlib_node_runtime_t * node
#define foreach_device_and_queue(var, vec)
#define STATIC_ASSERT(truth,...)
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
VLIB buffer representation.
static_always_inline void vnet_feature_start_device_input_x1(u32 sw_if_index, u32 *next0, vlib_buffer_t *b0)
void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
vl_api_interface_index_t sw_if_index
static __clib_warn_unused_result u32 vlib_buffer_alloc_from_pool(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers from specific pool into supplied array.