30 return d->
qword[1] & 0x0f;
35 u32 * buffers,
u32 n_packets,
int use_va_dma)
40 u16 *slot, n_desc_left, n_packets_left = n_packets;
54 while (n_packets_left && n_desc_left)
57 if (n_packets_left < 8 || n_desc_left < 4)
72 if (or_flags & VLIB_BUFFER_NEXT_PRESENT)
92 d[0].
qword[1] = ((
u64) b[0]->current_length) << 34 | bits;
93 d[1].
qword[1] = ((
u64) b[1]->current_length) << 34 | bits;
94 d[2].
qword[1] = ((
u64) b[2]->current_length) << 34 | bits;
95 d[3].
qword[1] = ((
u64) b[3]->current_length) << 34 | bits;
106 txq->
bufs[next] = buffers[0];
110 if (b[0]->
flags & VLIB_BUFFER_NEXT_PRESENT)
116 while (b0->
flags & VLIB_BUFFER_NEXT_PRESENT)
127 AVF_TX_ERROR_SEGMENT_SIZE_EXCEEDED, 1);
140 while (b[0]->
flags & VLIB_BUFFER_NEXT_PRESENT)
147 d[0].
qword[1] = (((
u64) b[0]->current_length) << 34) |
165 d[0].
qword[1] = (((
u64) b[0]->current_length) << 34) | bits;
181 d = txq->
descs + (next & mask);
186 while (n_packets_left && n_desc_left)
188 txq->
bufs[next & mask] = buffers[0];
192 if (b[0]->
flags & VLIB_BUFFER_NEXT_PRESENT)
197 while (b0->
flags & VLIB_BUFFER_NEXT_PRESENT)
208 AVF_TX_ERROR_SEGMENT_SIZE_EXCEEDED, 1);
217 while (b[0]->
flags & VLIB_BUFFER_NEXT_PRESENT)
224 d[0].
qword[1] = (((
u64) b[0]->current_length) << 34) |
230 d = txq->
descs + (next & mask);
242 d[0].
qword[1] = (((
u64) b[0]->current_length) << 34) | bits;
249 d = txq->
descs + (next & mask);
255 u16 rs_slot = slot[0] = (next - 1) & mask;
256 d = txq->
descs + rs_slot;
263 return n_packets - n_packets_left;
274 u8 qid = thread_index;
282 n_left = frame->n_vectors;
288 i32 complete_slot = -1;
299 complete_slot = slot[0];
304 if (complete_slot >= 0)
307 mask = txq->
size - 1;
309 n_free = (complete_slot + 1 -
first) & mask;
317 if (ad->flags & AVF_DEVICE_F_VA_DMA)
333 AVF_TX_ERROR_NO_FREE_SLOTS, n_left);
338 return frame->n_vectors - n_left;
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static uword vlib_buffer_get_current_pa(vlib_main_t *vm, vlib_buffer_t *b)
static uword vlib_buffer_get_current_va(vlib_buffer_t *b)
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
static void vlib_error_count(vlib_main_t *vm, uword node_index, uword counter, uword increment)
static_always_inline void clib_spinlock_unlock_if_init(clib_spinlock_t *p)
#define static_always_inline
#define vlib_prefetch_buffer_with_index(vm, bi, type)
Prefetch buffer metadata by buffer index The first 64 bytes of buffer contains most header informatio...
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
VNET_DEVICE_CLASS_TX_FN() avf_device_class(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
#define VNET_DEVICE_CLASS_TX_FN(devclass)
static heap_elt_t * first(heap_header_t *h)
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
#define clib_ring_deq(ring)
#define clib_ring_get_first(ring)
u32 node_index
Node index.
static_always_inline u16 avf_tx_enqueue(vlib_main_t *vm, vlib_node_runtime_t *node, avf_txq_t *txq, u32 *buffers, u32 n_packets, int use_va_dma)
#define clib_ring_enq(ring)
static_always_inline u8 avf_tx_desc_get_dtyp(avf_tx_desc_t *d)
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
u32 next_buffer
Next buffer for this linked-list of buffers.
VLIB buffer representation.
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
#define CLIB_MEMORY_BARRIER()
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
static_always_inline void clib_spinlock_lock_if_init(clib_spinlock_t *p)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.