60 u32 data_offset,
u32 n_bytes)
73 for (i = 0; i < n_bytes; i++)
74 if ((bd[i] & pm[i]) != pd[
i])
98 ASSERT (v0 >= v_min && v0 <= v_max);
105 if (is_net_byte_order)
106 v0 = clib_host_to_net_u16 (v0);
111 if (is_net_byte_order)
112 v0 = clib_host_to_net_u32 (v0);
117 if (is_net_byte_order)
118 v0 = clib_host_to_net_u64 (v0);
127 u32 n_bits,
u32 is_net_byte_order,
u32 is_increment)
129 ASSERT (v0 >= v_min && v0 <= v_max);
130 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
138 if (is_net_byte_order)
140 v0 = clib_host_to_net_u16 (v0);
141 v1 = clib_host_to_net_u16 (v1);
148 if (is_net_byte_order)
150 v0 = clib_host_to_net_u32 (v0);
151 v1 = clib_host_to_net_u32 (v1);
158 if (is_net_byte_order)
160 v0 = clib_host_to_net_u64 (v0);
161 v1 = clib_host_to_net_u64 (v1);
174 u32 byte_offset,
u32 is_net_byte_order,
u64 v_min,
u64 v_max)
178 while (n_buffers >= 4)
190 a0 = (
void *) b0 + byte_offset;
191 a1 = (
void *) b1 + byte_offset;
192 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
193 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
195 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
202 while (n_buffers > 0)
211 a0 = (
void *) b0 + byte_offset;
213 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
226 u32 is_net_byte_order,
232 ASSERT (v >= v_min && v <= v_max);
234 while (n_buffers >= 4)
247 a0 = (
void *) b0 + byte_offset;
248 a1 = (
void *) b1 + byte_offset;
249 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
250 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
254 v = v > v_max ? v_min : v;
256 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
260 sum += 2 * v_old + 1;
265 sum -= 2 * v_old + 1;
268 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
273 v = v > v_max ? v_min : v;
274 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
284 while (n_buffers > 0)
294 a0 = (
void *) b0 + byte_offset;
300 v = v > v_max ? v_min : v;
302 ASSERT (v_old >= v_min && v_old <= v_max);
303 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
321 u32 is_net_byte_order,
325 u64 v_diff = v_max - v_min + 1;
336 while (n_buffers >= 4)
349 a0 = (
void *) b0 + byte_offset;
350 a1 = (
void *) b1 + byte_offset;
351 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
352 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
359 u##n * r = random_data; \ 362 random_data = r + 2; \ 379 v0 = v0 > v_max ? v0 - v_diff : v0;
380 v1 = v1 > v_max ? v1 - v_diff : v1;
381 v0 = v0 > v_max ? v0 - v_diff : v0;
382 v1 = v1 > v_max ? v1 - v_diff : v1;
387 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
394 while (n_buffers > 0)
404 a0 = (
void *) b0 + byte_offset;
411 u##n * r = random_data; \ 413 random_data = r + 1; \ 429 v0 = v0 > v_max ? v0 - v_diff : v0;
430 v0 = v0 > v_max ? v0 - v_diff : v0;
435 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
445 clib_mem_unaligned (a##i, t) = \ 446 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \ 455 ASSERT (v0 >= v_min && v0 <= v_max);
456 if (max_bits ==
BITS (
u8))
457 ((
u8 *) a0)[0] = (((
u8 *) a0)[0] & ~mask) | (v0 << shift);
459 else if (max_bits ==
BITS (
u16))
463 else if (max_bits ==
BITS (
u32))
467 else if (max_bits ==
BITS (
u64))
479 ASSERT (v0 >= v_min && v0 <= v_max);
480 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
481 if (max_bits ==
BITS (
u8))
483 ((
u8 *) a0)[0] = (((
u8 *) a0)[0] & ~mask) | (v0 << shift);
484 ((
u8 *) a1)[0] = (((
u8 *) a1)[0] & ~mask) | (v1 << shift);
487 else if (max_bits ==
BITS (
u16))
492 else if (max_bits ==
BITS (
u32))
497 else if (max_bits ==
BITS (
u64))
517 while (n_buffers >= 4)
529 a0 = (
void *) b0 + byte_offset;
530 a1 = (
void *) b1 + byte_offset;
531 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
532 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
535 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
542 while (n_buffers > 0)
551 a0 = (
void *) b0 + byte_offset;
553 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
570 ASSERT (v >= v_min && v <= v_max);
572 while (n_buffers >= 4)
585 a0 = (
void *) b0 + byte_offset;
586 a1 = (
void *) b1 + byte_offset;
587 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
588 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
592 v = v > v_max ? v_min : v;
594 v_old + 0, v_old + 1,
595 v_min, v_max, max_bits, n_bits, mask, shift,
601 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
604 v = v > v_max ? v_min : v;
605 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
612 while (n_buffers > 0)
622 a0 = (
void *) b0 + byte_offset;
626 v = v > v_max ? v_min : v;
628 ASSERT (v_old >= v_min && v_old <= v_max);
629 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
647 u64 v_diff = v_max - v_min + 1;
656 while (n_buffers >= 4)
669 a0 = (
void *) b0 + byte_offset;
670 a1 = (
void *) b1 + byte_offset;
671 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
672 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
679 u##n * r = random_data; \ 682 random_data = r + 2; \ 699 v0 = v0 > v_max ? v0 - v_diff : v0;
700 v1 = v1 > v_max ? v1 - v_diff : v1;
701 v0 = v0 > v_max ? v0 - v_diff : v0;
702 v1 = v1 > v_max ? v1 - v_diff : v1;
704 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
711 while (n_buffers > 0)
721 a0 = (
void *) b0 + byte_offset;
728 u##n * r = random_data; \ 730 random_data = r + 1; \ 746 v0 = v0 > v_max ? v0 - v_diff : v0;
747 v0 = v0 > v_max ? v0 - v_diff : v0;
749 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
763 u32 max_bits, l0, l1, h1, start_bit;
772 start_bit = l0 *
BITS (
u8);
774 max_bits = hi_bit - start_bit;
779 if (edit_type == PG_EDIT_INCREMENT) \ 780 v = do_set_increment (pg, s, buffers, n_buffers, \ 787 else if (edit_type == PG_EDIT_RANDOM) \ 788 do_set_random (pg, s, buffers, n_buffers, \ 795 do_set_fixed (pg, s, buffers, n_buffers, \ 802 if (l1 == 0 && h1 == 0)
818 u32 n_bits = max_bits;
822 mask = ((
u64) 1 << (
u64) n_bits) - 1;
823 mask &= ~(((
u64) 1 << (
u64) shift) - 1);
825 mask <<= max_bits - n_bits;
826 shift += max_bits - n_bits;
832 if (edit_type == PG_EDIT_INCREMENT) \ 833 v = do_setbits_increment (pg, s, buffers, n_buffers, \ 834 BITS (u##n), n_bits, \ 835 l0, v_min, v_max, v, \ 837 else if (edit_type == PG_EDIT_RANDOM) \ 838 do_setbits_random (pg, s, buffers, n_buffers, \ 839 BITS (u##n), n_bits, \ 843 do_setbits_fixed (pg, s, buffers, n_buffers, \ 844 BITS (u##n), n_bits, \ 866 u64 v_min, v_max, length_sum;
897 length_sum = v_min * n_buffers;
917 u32 * buffers,
u32 n_buffers)
922 static u32 *unused_buffers = 0;
924 while (n_buffers > 0)
942 if (n_bytes_left > 0)
943 b->
flags |= VLIB_BUFFER_NEXT_PRESENT;
945 b->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
958 ASSERT (n_bytes_left == 0);
964 if (
vec_len (unused_buffers) > 0)
967 _vec_len (unused_buffers) = 0;
992 lo_bit = hi_bit - e->
n_bits;
995 =
do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
1025 u32 * buffers,
u32 * next_buffers,
u32 n_buffers)
1029 while (n_buffers >= 4)
1036 ni0 = next_buffers[0];
1037 ni1 = next_buffers[1];
1042 b0->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1043 b1->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1052 while (n_buffers > 0)
1058 ni0 = next_buffers[0];
1063 b0->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1072 u32 n_buffers,
u32 data_offset,
u32 n_data,
u32 set_data)
1152 u32 * buffers,
u32 * next_buffers,
u32 n_alloc)
1161 if (n_allocated == 0)
1168 n_alloc = n_allocated;
1181 if (is_start_of_packet)
1198 u32 buffer_alloc_request = 0;
1199 u32 buffer_alloc_result;
1200 u32 current_buffer_index;
1222 buffer_alloc_request += (
vec_len (d0) + (buf_sz - 1)) / buf_sz;
1224 i = ((i + 1) == l) ? 0 : i + 1;
1228 ASSERT (buffer_alloc_request > 0);
1233 if (buffer_alloc_result < buffer_alloc_request)
1235 clib_warning (
"alloc failure, got %d not %d", buffer_alloc_result,
1236 buffer_alloc_request);
1245 current_buffer_index = 0;
1253 u32 bytes_to_copy, bytes_this_chunk;
1264 while (bytes_to_copy)
1266 bytes_this_chunk =
clib_min (bytes_to_copy, buf_sz);
1277 not_last = bytes_this_chunk < bytes_to_copy;
1281 b->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1282 b->
next_buffer = buffers[current_buffer_index + 1];
1284 bytes_to_copy -= bytes_this_chunk;
1285 data_offset += bytes_this_chunk;
1286 current_buffer_index++;
1289 i = ((i + 1) == l) ? 0 : i + 1;
1296 for (i = 0; i < n_alloc; i++)
1315 word i, n_in_fifo, n_alloc, n_free, n_added;
1316 u32 *tail, *start, *
end, *last_tail, *last_start;
1321 if (n_in_fifo >= n_buffers)
1324 n_alloc = n_buffers - n_in_fifo;
1356 last_tail = last_start = 0;
1364 if (n_free < n_alloc)
1371 if (tail + n_alloc <= end)
1381 if (n_added == n && n_alloc > n_added)
1384 (pg, s, bi, start, last_start, n_alloc - n_added);
1397 return n_in_fifo + n_added;
1456 u32 * buffers,
u32 n_buffers)
1494 sizeof (b0[0]) -
sizeof (b0->
pre_data));
1496 sizeof (b1[0]) -
sizeof (b1->
pre_data));
1523 sizeof (b0[0]) -
sizeof (b0->
pre_data));
1531 u32 packet_data_size)
1534 for (
int i = 0;
i < n_buffers;
i++)
1542 u16 ethertype = clib_net_to_host_u16 (eh->
type);
1549 ethertype = clib_net_to_host_u16 (vlan->
type);
1550 l2hdr_sz +=
sizeof (*vlan);
1551 if (ethertype == ETHERNET_TYPE_VLAN)
1554 ethertype = clib_net_to_host_u16 (vlan->
type);
1555 l2hdr_sz +=
sizeof (*vlan);
1569 (VNET_BUFFER_F_IS_IP4 | VNET_BUFFER_F_OFFLOAD_IP_CKSUM);
1570 b0->
flags |= (VNET_BUFFER_F_L2_HDR_OFFSET_VALID
1571 | VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1572 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
1574 else if (
PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP6))
1582 (VNET_BUFFER_F_IS_IP6 | VNET_BUFFER_F_L2_HDR_OFFSET_VALID |
1583 VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1584 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
1586 if (l4_proto == IP_PROTOCOL_TCP)
1588 b0->
flags |= (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM | VNET_BUFFER_F_GSO);
1591 (b0)->l4_hdr_offset);
1597 else if (l4_proto == IP_PROTOCOL_UDP)
1599 b0->
flags |= VNET_BUFFER_F_OFFLOAD_UDP_CKSUM;
1602 (b0)->l4_hdr_offset);
1615 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1616 uword n_packets_generated;
1623 u32 current_config_index = ~(
u32) 0;
1629 n_packets_in_fifo =
pg_stream_fill (pg, s, n_packets_to_generate);
1630 n_packets_to_generate =
clib_min (n_packets_in_fifo, n_packets_to_generate);
1631 n_packets_generated = 0;
1636 current_config_index =
1642 while (n_packets_to_generate > 0)
1664 n_this_frame = n_packets_to_generate;
1665 if (n_this_frame > n_left)
1666 n_this_frame = n_left;
1672 if (head + n_this_frame <= end)
1691 if (current_config_index != ~(
u32) 0)
1692 for (i = 0; i < n_this_frame; i++)
1697 vnet_buffer (b)->feature_arc_index = feature_arc_index;
1710 n_packets_to_generate -= n_this_frame;
1711 n_packets_generated += n_this_frame;
1712 n_left -= n_this_frame;
1718 for (i = 0; i < n_this_frame; i++)
1721 ASSERT ((b->
flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1728 return n_packets_generated;
1784 uword n_packets = 0;
1785 u32 worker_index = 0;
1792 pg_stream_t *s = vec_elt_at_index (pg->streams, i);
1793 n_packets += pg_input_stream (node, pg, s);
1805 .sibling_of =
"device-input",
1811 .state = VLIB_NODE_STATE_DISABLED,
vnet_config_main_t config_main
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
#define clib_fifo_head(v)
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
static uword clib_fifo_elts(void *v)
vnet_main_t * vnet_get_main(void)
vnet_interface_main_t interface_main
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
pg_edit_group_t * edit_groups
#define clib_memcpy_fast(a, b, c)
#define clib_fifo_advance_tail(f, n_elts)
#define VLIB_NODE_FLAG_TRACE_SUPPORTED
static f64 vlib_time_now(vlib_main_t *vm)
#define clib_fifo_resize(f, n_elts)
static void * clib_random_buffer_get_data(clib_random_buffer_t *b, uword n_bytes)
void(* edit_function)(struct pg_main_t *pg, struct pg_stream_t *s, struct pg_edit_group_t *g, u32 *buffers, u32 n_buffers)
u32 * config_index_by_sw_if_index
u16 current_length
Nbytes between current data and the end of this buffer.
static vlib_frame_t * vlib_get_frame(vlib_main_t *vm, vlib_frame_t *f)
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
#define STRUCT_OFFSET_OF(t, f)
static vnet_sw_interface_t * vnet_get_sw_interface(vnet_main_t *vnm, u32 sw_if_index)
static_always_inline int vnet_have_features(u8 arc, u32 sw_if_index)
pg_buffer_index_t * buffer_indices
pg_edit_type_t packet_size_edit_type
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
struct _tcp_header tcp_header_t
u8 * fixed_packet_data_mask
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
static void vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
#define static_always_inline
#define vlib_prefetch_buffer_with_index(vm, bi, type)
Prefetch buffer metadata by buffer index The first 64 bytes of buffer contains most header informatio...
#define ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX
vlib_combined_counter_main_t * combined_sw_if_counters
static uword clib_fifo_free_elts(void *v)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define vlib_get_new_next_frame(vm, node, next_index, vectors, n_vectors_left)
static u32 vlib_get_current_worker_index()
#define vec_end(v)
End (last data address) of vector.
static vlib_next_frame_t * vlib_node_runtime_get_next_frame(vlib_main_t *vm, vlib_node_runtime_t *n, u32 next_index)
u32 last_increment_packet_size
#define clib_bitmap_foreach(i, ai, body)
Macro to iterate across set bits in a bitmap.
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
format_function_t format_vnet_buffer
u32 current_replay_packet_index
u32 ** replay_buffers_by_thread
static u64 pg_edit_get_value(pg_edit_t *e, int hi_or_lo)
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
static void * vnet_get_config_data(vnet_config_main_t *cm, u32 *config_index, u32 *next_index, u32 n_data_bytes)
format_function_t * format_buffer
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
#define VLIB_REGISTER_NODE(x,...)
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
static_always_inline uword vlib_get_thread_index(void)
#define CLIB_PREFETCH(addr, size, type)
#define clib_warning(format, args...)
static uword max_pow2(uword x)
static void * vlib_frame_scalar_args(vlib_frame_t *f)
Get pointer to frame scalar data.
#define pool_is_free_index(P, I)
Use free bitmap to query whether given index is free.
u32 current_config_index
Used by feature subgraph arcs to visit enabled feature nodes.
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
pg_edit_t * non_fixed_edits
u8 ** replay_packet_templates
vlib_main_t vlib_node_runtime_t * node
static uword clib_fifo_advance_head(void *v, uword n_elts)
u8 pre_data[VLIB_BUFFER_PRE_DATA_SIZE]
Space for inserting data before buffer start.
void pg_stream_enable_disable(pg_main_t *pg, pg_stream_t *s, int is_enable)
static_always_inline int ethernet_frame_is_tagged(u16 type)
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
#define clib_mem_unaligned(pointer, type)
static vlib_main_t * vlib_get_main(void)
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
#define vec_elt(v, i)
Get vector value at index i.
u8 device_input_feature_arc_index
Feature arc index for device-input.
u32 sw_if_index[VLIB_N_RX_TX]
#define clib_fifo_add1(f, e)
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 next_buffer
Next buffer for this linked-list of buffers.
VLIB buffer representation.
#define VLIB_BUFFER_MIN_CHAIN_SEG_SIZE
f64 rate_packets_per_second
static int tcp_header_bytes(tcp_header_t *t)
static u32 vlib_num_workers()
#define STRUCT_SIZE_OF(t, f)
static vlib_node_t * vlib_get_node(vlib_main_t *vm, u32 i)
Get vlib node by index.
#define vec_foreach(var, vec)
Vector iterator.
f64 end
end of the time range
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
static void vlib_frame_no_append(vlib_frame_t *f)
static int ip4_header_bytes(const ip4_header_t *i)
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
vnet_feature_config_main_t * feature_config_mains
feature config main objects
static u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
vnet_feature_main_t feature_main
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
clib_random_buffer_t random_buffer
pg_interface_t * interfaces