26 #define foreach_rdma_input_error \ 27 _(BUFFER_ALLOC, "buffer alloc error") 31 #define _(f,s) RDMA_INPUT_ERROR_##f, 49 struct ibv_recv_wr wr[VLIB_FRAME_SIZE], *w = wr;
50 struct ibv_sge sge[VLIB_FRAME_SIZE], *s = sge;
97 w[0].next = &w[0] + 1;
101 w[1].next = &w[1] + 1;
102 w[1].sg_list = &s[1];
105 w[2].next = &w[2] + 1;
106 w[2].sg_list = &s[2];
109 w[3].next = &w[3] + 1;
110 w[3].sg_list = &s[3];
123 s[0].lkey = rd->
lkey;
125 w[0].next = &w[0] + 1;
126 w[0].sg_list = &s[0];
138 if (ibv_post_wq_recv (rxq->
wq, wr, &w) != 0)
159 while (n_trace && n_left)
204 u32 * next,
u32 * bi,
struct ibv_wc * wc,
208 u32 n_rx_bytes[4] = { 0 };
213 while (n_left_from >= 4)
239 n_rx_bytes[0] += wc[0].byte_len;
240 n_rx_bytes[1] += wc[1].byte_len;
241 n_rx_bytes[2] += wc[2].byte_len;
242 n_rx_bytes[3] += wc[3].byte_len;
251 while (n_left_from >= 1)
256 n_rx_bytes[0] += wc[0].byte_len;
265 return n_rx_bytes[0] + n_rx_bytes[1] + n_rx_bytes[2] + n_rx_bytes[3];
274 struct ibv_wc wc[VLIB_FRAME_SIZE];
276 u32 next_index, *to_next, n_left_to_next;
277 u32 n_rx_packets, n_rx_bytes;
283 n_rx_packets = ibv_poll_cq (rxq->
cq, VLIB_FRAME_SIZE, wc);
284 ASSERT (n_rx_packets <= rxq->tail - rxq->
head);
295 template_end) /
sizeof (
u64));
298 bt.buffer_pool_index = rd->
pool;
307 ASSERT (n_rx_packets <= n_left_to_next);
319 if (n_tail < n_rx_packets)
322 &wc[n_tail], n_rx_packets - n_tail, &bt);
327 rxq->
head += n_rx_packets;
362 .name =
"rdma-input",
364 .sibling_of =
"device-input",
367 .state = VLIB_NODE_STATE_DISABLED,
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
vnet_main_t * vnet_get_main(void)
vnet_interface_main_t interface_main
#define VLIB_NODE_FLAG_TRACE_SUPPORTED
u16 current_length
Nbytes between current data and the end of this buffer.
static vlib_frame_t * vlib_get_frame(vlib_main_t *vm, vlib_frame_t *f)
u32 per_interface_next_index
format_function_t format_rdma_input_trace
#define STRUCT_OFFSET_OF(t, f)
#define VLIB_NODE_FN(node)
static u32 vlib_buffer_alloc_to_ring_from_pool(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers into ring from specific buffer pool.
u8 buffer_pool_index
index of buffer pool this buffer belongs.
static void vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
#define static_always_inline
#define ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX
vlib_combined_counter_main_t * combined_sw_if_counters
static_always_inline int vnet_device_input_have_features(u32 sw_if_index)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define vlib_get_new_next_frame(vm, node, next_index, vectors, n_vectors_left)
static vlib_next_frame_t * vlib_node_runtime_get_next_frame(vlib_main_t *vm, vlib_node_runtime_t *n, u32 next_index)
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
#define VLIB_REGISTER_NODE(x,...)
#define CLIB_PREFETCH(addr, size, type)
static void * vlib_frame_scalar_args(vlib_frame_t *f)
Get pointer to frame scalar data.
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
vlib_main_t vlib_node_runtime_t * node
static uword is_pow2(uword x)
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
#define foreach_device_and_queue(var, vec)
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
VLIB buffer representation.
static_always_inline void clib_memset_u64(void *p, u64 val, uword count)
static_always_inline void vnet_feature_start_device_input_x1(u32 sw_if_index, u32 *next0, vlib_buffer_t *b0)
static uword vlib_buffer_get_va(vlib_buffer_t *b)
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.