61 for (i = 0; i < n_alloc; i++)
64 mb = rte_mbuf_from_vlib_buffer(b);
67 new_data_len = (
u16)((
i16) mb->data_len + delta);
68 new_pkt_len = (
u16)((
i16) mb->pkt_len + delta);
70 mb->data_len = new_data_len;
71 mb->pkt_len = new_pkt_len;
79 u32 data_offset,
u32 n_bytes)
93 for (i = 0; i < n_bytes; i++)
94 if ((bd[i] & pm[i]) != pd[
i])
117 u32 is_net_byte_order)
119 ASSERT (v0 >= v_min && v0 <= v_max);
126 if (is_net_byte_order)
127 v0 = clib_host_to_net_u16 (v0);
132 if (is_net_byte_order)
133 v0 = clib_host_to_net_u32 (v0);
138 if (is_net_byte_order)
139 v0 = clib_host_to_net_u64 (v0);
149 u32 is_net_byte_order,
152 ASSERT (v0 >= v_min && v0 <= v_max);
153 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
161 if (is_net_byte_order)
163 v0 = clib_host_to_net_u16 (v0);
164 v1 = clib_host_to_net_u16 (v1);
171 if (is_net_byte_order)
173 v0 = clib_host_to_net_u32 (v0);
174 v1 = clib_host_to_net_u32 (v1);
181 if (is_net_byte_order)
183 v0 = clib_host_to_net_u64 (v0);
184 v1 = clib_host_to_net_u64 (v1);
198 u32 is_net_byte_order,
204 while (n_buffers >= 4)
216 a0 = (
void *) b0 + byte_offset;
217 a1 = (
void *) b1 + byte_offset;
218 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
219 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
221 set_2 (a0, a1, v_min, v_min,
223 n_bits, is_net_byte_order,
230 while (n_buffers > 0)
239 a0 = (
void *) b0 + byte_offset;
243 n_bits, is_net_byte_order);
256 u32 is_net_byte_order,
265 ASSERT (v >= v_min && v <= v_max);
267 while (n_buffers >= 4)
280 a0 = (
void *) b0 + byte_offset;
281 a1 = (
void *) b1 + byte_offset;
282 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
283 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
287 v = v > v_max ? v_min : v;
289 v_old + 0, v_old + 1,
291 n_bits, is_net_byte_order,
303 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
308 v = v > v_max ? v_min : v;
309 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
319 while (n_buffers > 0)
329 a0 = (
void *) b0 + byte_offset;
335 v = v > v_max ? v_min : v;
337 ASSERT (v_old >= v_min && v_old <= v_max);
338 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
356 u32 is_net_byte_order,
363 u64 v_diff = v_max - v_min + 1;
374 while (n_buffers >= 4)
387 a0 = (
void *) b0 + byte_offset;
388 a1 = (
void *) b1 + byte_offset;
389 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
390 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
397 u##n * r = random_data; \ 400 random_data = r + 2; \ 417 v0 = v0 > v_max ? v0 - v_diff : v0;
418 v1 = v1 > v_max ? v1 - v_diff : v1;
419 v0 = v0 > v_max ? v0 - v_diff : v0;
420 v1 = v1 > v_max ? v1 - v_diff : v1;
428 n_bits, is_net_byte_order,
435 while (n_buffers > 0)
445 a0 = (
void *) b0 + byte_offset;
452 u##n * r = random_data; \ 454 random_data = r + 1; \ 470 v0 = v0 > v_max ? v0 - v_diff : v0;
471 v0 = v0 > v_max ? v0 - v_diff : v0;
476 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
486 clib_mem_unaligned (a##i, t) = \ 487 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \ 499 ASSERT (v0 >= v_min && v0 <= v_max);
500 if (max_bits ==
BITS (
u8))
501 ((
u8 *) a0)[0] = (((
u8 *) a0)[0] &~ mask) | (v0 << shift);
503 else if (max_bits ==
BITS (
u16))
507 else if (max_bits ==
BITS (
u32))
511 else if (max_bits ==
BITS (
u64))
527 ASSERT (v0 >= v_min && v0 <= v_max);
528 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
529 if (max_bits ==
BITS (
u8))
531 ((
u8 *) a0)[0] = (((
u8 *) a0)[0] &~ mask) | (v0 << shift);
532 ((
u8 *) a1)[0] = (((
u8 *) a1)[0] &~ mask) | (v1 << shift);
535 else if (max_bits ==
BITS (
u16))
540 else if (max_bits ==
BITS (
u32))
545 else if (max_bits ==
BITS (
u64))
569 while (n_buffers >= 4)
581 a0 = (
void *) b0 + byte_offset;
582 a1 = (
void *) b1 + byte_offset;
583 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
584 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
589 max_bits, n_bits, mask, shift,
596 while (n_buffers > 0)
605 a0 = (
void *) b0 + byte_offset;
607 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
627 ASSERT (v >= v_min && v <= v_max);
629 while (n_buffers >= 4)
642 a0 = (
void *) b0 + byte_offset;
643 a1 = (
void *) b1 + byte_offset;
644 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
645 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
649 v = v > v_max ? v_min : v;
651 v_old + 0, v_old + 1,
653 max_bits, n_bits, mask, shift,
659 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
662 v = v > v_max ? v_min : v;
663 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
670 while (n_buffers > 0)
680 a0 = (
void *) b0 + byte_offset;
684 v = v > v_max ? v_min : v;
686 ASSERT (v_old >= v_min && v_old <= v_max);
687 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
708 u64 v_diff = v_max - v_min + 1;
717 while (n_buffers >= 4)
730 a0 = (
void *) b0 + byte_offset;
731 a1 = (
void *) b1 + byte_offset;
732 CLIB_PREFETCH ((
void *) b2 + byte_offset,
sizeof (v_min), WRITE);
733 CLIB_PREFETCH ((
void *) b3 + byte_offset,
sizeof (v_min), WRITE);
740 u##n * r = random_data; \ 743 random_data = r + 2; \ 760 v0 = v0 > v_max ? v0 - v_diff : v0;
761 v1 = v1 > v_max ? v1 - v_diff : v1;
762 v0 = v0 > v_max ? v0 - v_diff : v0;
763 v1 = v1 > v_max ? v1 - v_diff : v1;
768 max_bits, n_bits, mask, shift,
775 while (n_buffers > 0)
785 a0 = (
void *) b0 + byte_offset;
792 u##n * r = random_data; \ 794 random_data = r + 1; \ 810 v0 = v0 > v_max ? v0 - v_diff : v0;
811 v0 = v0 > v_max ? v0 - v_diff : v0;
813 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
828 u32 max_bits, l0, l1, h1, start_bit;
837 start_bit = l0 *
BITS (
u8);
839 max_bits = hi_bit - start_bit;
844 if (edit_type == PG_EDIT_INCREMENT) \ 845 v = do_set_increment (pg, s, buffers, n_buffers, \ 852 else if (edit_type == PG_EDIT_RANDOM) \ 853 do_set_random (pg, s, buffers, n_buffers, \ 860 do_set_fixed (pg, s, buffers, n_buffers, \ 867 if (l1 == 0 && h1 == 0)
883 u32 n_bits = max_bits;
887 mask = ((
u64) 1 << (
u64) n_bits) - 1;
888 mask &= ~(((
u64) 1 << (
u64) shift) - 1);
890 mask <<= max_bits - n_bits;
891 shift += max_bits - n_bits;
897 if (edit_type == PG_EDIT_INCREMENT) \ 898 v = do_setbits_increment (pg, s, buffers, n_buffers, \ 899 BITS (u##n), n_bits, \ 900 l0, v_min, v_max, v, \ 902 else if (edit_type == PG_EDIT_RANDOM) \ 903 do_setbits_random (pg, s, buffers, n_buffers, \ 904 BITS (u##n), n_bits, \ 908 do_setbits_fixed (pg, s, buffers, n_buffers, \ 909 BITS (u##n), n_bits, \ 933 u64 v_min, v_max, length_sum;
965 length_sum = v_min * n_buffers;
993 static u32 * unused_buffers = 0;
995 while (n_buffers > 0)
1013 if (n_bytes_left > 0)
1029 ASSERT (n_bytes_left == 0);
1035 if (
vec_len (unused_buffers) > 0)
1039 _vec_len (unused_buffers) = 0;
1067 lo_bit = hi_bit - e->
n_bits;
1070 =
do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
1107 while (n_buffers >= 4)
1114 ni0 = next_buffers[0];
1115 ni1 = next_buffers[1];
1130 while (n_buffers > 0)
1136 ni0 = next_buffers[0];
1154 u32 n_left, * b,
i, l;
1180 if (data_offset + n_data >=
vec_len (d0))
1181 n0 =
vec_len (d0) > data_offset ?
vec_len (d0) - data_offset : 0;
1186 i = i + 1 == l ? 0 : i + 1;
1321 if (is_start_of_packet)
1329 if (n_allocated == 0)
1336 n_alloc = n_allocated;
1339 if (DPDK == 0 || CLIB_DEBUG > 0
1351 ASSERT(next_buffers == 0);
1355 if (is_start_of_packet)
1365 for (i = 0; i < n_alloc; i++)
1394 word i, n_in_fifo, n_alloc, n_free, n_added;
1395 u32 * tail, * start, * end, * last_tail, * last_start;
1400 if (n_in_fifo >= n_buffers)
1403 n_alloc = n_buffers - n_in_fifo;
1429 last_tail = last_start = 0;
1437 if (n_free < n_alloc)
1444 if (tail + n_alloc <= end)
1453 if (n_added == n && n_alloc > n_added)
1456 (pg, s, bi, start, last_start, n_alloc - n_added);
1475 struct rte_mbuf *mb;
1477 b = vlib_get_buffer(vm, bi0[0]);
1478 mb = rte_mbuf_from_vlib_buffer(b);
1479 ASSERT(rte_mbuf_refcnt_read(mb) == 1);
1485 return n_in_fifo + n_added;
1548 u32 * b, n_left, stream_index, next_index;
1552 stream_index = s - pg->
streams;
1614 uword n_packets_to_generate)
1617 u32 * to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1618 uword n_packets_generated;
1623 n_packets_in_fifo =
pg_stream_fill (pg, s, n_packets_to_generate);
1624 n_packets_to_generate =
clib_min (n_packets_in_fifo, n_packets_to_generate);
1625 n_packets_generated = 0;
1627 while (n_packets_to_generate > 0)
1629 u32 * head, * start, * end;
1633 n_this_frame = n_packets_to_generate;
1634 if (n_this_frame > n_left)
1635 n_this_frame = n_left;
1641 if (head + n_this_frame <= end)
1660 n_packets_to_generate -= n_this_frame;
1661 n_packets_generated += n_this_frame;
1662 n_left -= n_this_frame;
1666 return n_packets_generated;
1727 uword n_packets = 0;
1730 n_packets += pg_input_stream (node, pg, vec_elt_at_index (pg->streams, i));
1744 .state = VLIB_NODE_STATE_DISABLED,
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
always_inline void * clib_random_buffer_get_data(clib_random_buffer_t *b, uword n_bytes)
always_inline uword max_pow2(uword x)
sll srl srl sll sra u16x4 i
#define clib_fifo_head(v)
always_inline vlib_node_t * vlib_get_node(vlib_main_t *vm, u32 i)
vnet_interface_main_t interface_main
format_function_t format_vlib_buffer
always_inline uword clib_fifo_free_elts(void *v)
pg_edit_group_t * edit_groups
#define clib_fifo_advance_tail(f, n_elts)
#define clib_fifo_resize(f, n_elts)
vlib_buffer_t buffer_init_template
always_inline void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
#define STRUCT_OFFSET_OF(t, f)
pg_buffer_index_t * buffer_indices
pg_edit_type_t packet_size_edit_type
always_inline vlib_main_t * vlib_get_main(void)
u8 * fixed_packet_data_mask
vnet_main_t * vnet_get_main(void)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
#define static_always_inline
always_inline u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
#define vlib_prefetch_buffer_with_index(vm, bi, type)
Prefetch buffer metadata by buffer index The first 64 bytes of buffer contains most header informatio...
u32 local_interface_sw_if_index
vlib_combined_counter_main_t * combined_sw_if_counters
always_inline void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 cpu_index, u32 index, u32 packet_increment, u32 byte_increment)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
u8 pre_data[VLIB_BUFFER_PRE_DATA_SIZE]
Space for inserting data before buffer start.
always_inline uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
always_inline uword clib_fifo_elts(void *v)
#define clib_warning(format, args...)
always_inline vlib_buffer_free_list_t * vlib_buffer_get_free_list(vlib_main_t *vm, u32 free_list_index)
#define vec_end(v)
End (last data address) of vector.
#define VLIB_BUFFER_NEXT_PRESENT
u32 last_increment_packet_size
#define clib_bitmap_foreach(i, ai, body)
#define pool_elt_at_index(p, i)
u16 current_length
Nbytes between current data and the end of this buffer.
u32 current_replay_packet_index
static u64 pg_edit_get_value(pg_edit_t *e, int hi_or_lo)
uword os_get_cpu_number(void)
u32 vlib_buffer_alloc_from_free_list(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u32 free_list_index)
Allocate buffers from specific freelist into supplied array.
#define PG_STREAM_FLAGS_DISABLE_BUFFER_RECYCLE
format_function_t * format_buffer
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
#define clib_fifo_foreach(v, f, body)
#define CLIB_PREFETCH(addr, size, type)
#define clib_memcpy(a, b, c)
#define pool_is_free_index(P, I)
pg_edit_t * non_fixed_edits
u8 ** replay_packet_templates
always_inline uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
u32 next_buffer
Next buffer for this linked-list of buffers.
void pg_stream_enable_disable(pg_main_t *pg, pg_stream_t *s, int is_enable)
#define vec_elt(v, i)
Get vector value at index i.
u32 sw_if_index[VLIB_N_RX_TX]
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
void(* edit_function)(struct pg_main_t *pg, struct pg_stream_t *s, struct pg_edit_group_t *g, u32 *buffers, u32 n_buffers)
always_inline uword clib_fifo_advance_head(void *v, uword n_elts)
void(* buffer_init_function)(struct vlib_main_t *vm, struct vlib_buffer_free_list_t *fl, u32 *buffers, u32 n_buffers)
always_inline vnet_sw_interface_t * vnet_get_sw_interface(vnet_main_t *vnm, u32 sw_if_index)
always_inline void vlib_copy_buffers(u32 *dst, u32 *src, u32 n)
always_inline void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
#define clib_mem_unaligned(pointer, type)
f64 rate_packets_per_second
#define VLIB_REGISTER_NODE(x,...)
#define STRUCT_SIZE_OF(t, f)
#define vec_foreach(var, vec)
Vector iterator.
always_inline f64 vlib_time_now(vlib_main_t *vm)
uword buffer_init_function_opaque
always_inline uword pow2_mask(uword x)
u32 flags
buffer flags: VLIB_BUFFER_IS_TRACED: trace this buffer.
always_inline void vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
always_inline vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
clib_random_buffer_t random_buffer