30 struct ibv_wc wc[VLIB_FRAME_SIZE];
35 n_free = ibv_poll_cq (txq->
cq, VLIB_FRAME_SIZE, wc);
39 for (i = 0; i < n_free; i++)
40 to_free[i] = wc[i].wr_id;
55 u32 *from, *f, n_left_from;
56 u32 n_tx_packets, n_tx_failed;
58 struct ibv_send_wr wr[VLIB_FRAME_SIZE], *w = wr;
59 struct ibv_sge sge[VLIB_FRAME_SIZE], *s = sge;
63 n_left_from = frame->n_vectors;
66 memset (w, 0, n_left_from *
sizeof (w[0]));
68 while (n_left_from >= 4)
87 s[0].lkey = rd->mr->lkey;
91 s[1].lkey = rd->mr->lkey;
95 s[2].lkey = rd->mr->lkey;
99 s[3].lkey = rd->mr->lkey;
102 w[0].next = &w[0] + 1;
103 w[0].sg_list = &s[0];
105 w[0].opcode = IBV_WR_SEND;
108 w[1].next = &w[1] + 1;
109 w[1].sg_list = &s[1];
111 w[1].opcode = IBV_WR_SEND;
114 w[2].next = &w[2] + 1;
115 w[2].sg_list = &s[2];
117 w[2].opcode = IBV_WR_SEND;
120 w[3].next = &w[3] + 1;
121 w[3].sg_list = &s[3];
123 w[3].opcode = IBV_WR_SEND;
132 while (n_left_from >= 1)
136 s[0].lkey = rd->mr->lkey;
139 w[0].next = &w[0] + 1;
140 w[0].sg_list = &s[0];
142 w[0].opcode = IBV_WR_SEND;
155 for (i = 0; i < 5; i++)
158 if (0 == ibv_post_send (txq->
qp, w, &w))
163 n_tx_packets = w == wr ? frame->n_vectors : w - wr;
164 n_tx_failed = frame->n_vectors - n_tx_packets;
170 RDMA_TX_ERROR_NO_FREE_SLOTS, n_tx_failed);
static uword vlib_buffer_get_current_va(vlib_buffer_t *b)
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
static void vlib_error_count(vlib_main_t *vm, uword node_index, uword counter, uword increment)
static_always_inline void rdma_device_output_free(vlib_main_t *vm, rdma_txq_t *txq)
static_always_inline void clib_spinlock_unlock_if_init(clib_spinlock_t *p)
u16 current_length
Nbytes between current data and the end of this buffer.
#define static_always_inline
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define VNET_DEVICE_CLASS_TX_FN(devclass)
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
#define CLIB_PREFETCH(addr, size, type)
VNET_DEVICE_CLASS_TX_FN() rdma_device_class(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
VLIB buffer representation.
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
static_always_inline void clib_spinlock_lock_if_init(clib_spinlock_t *p)