FD.io VPP  v19.04.4-rc0-5-ge88582fac
Vector Packet Processing
input.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /*
16  * pg_input.c: buffer generator input
17  *
18  * Copyright (c) 2008 Eliot Dresselhaus
19  *
20  * Permission is hereby granted, free of charge, to any person obtaining
21  * a copy of this software and associated documentation files (the
22  * "Software"), to deal in the Software without restriction, including
23  * without limitation the rights to use, copy, modify, merge, publish,
24  * distribute, sublicense, and/or sell copies of the Software, and to
25  * permit persons to whom the Software is furnished to do so, subject to
26  * the following conditions:
27  *
28  * The above copyright notice and this permission notice shall be
29  * included in all copies or substantial portions of the Software.
30  *
31  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38  */
39 
40  /*
41  * To be honest, the packet generator needs an extreme
42  * makeover. Two key assumptions which drove the current implementation
43  * are no longer true. First, buffer managers implement a
44  * post-TX recycle list. Second, that packet generator performance
45  * is first-order important.
46  */
47 
48 #include <vlib/vlib.h>
49 #include <vnet/pg/pg.h>
50 #include <vnet/vnet.h>
51 #include <vnet/ethernet/ethernet.h>
52 #include <vnet/feature/feature.h>
53 #include <vnet/devices/devices.h>
54 
55 static int
57  u32 data_offset, u32 n_bytes)
58 {
59  u8 *bd, *pd, *pm;
60  u32 i;
61 
62  bd = b->data;
63  pd = s->fixed_packet_data + data_offset;
64  pm = s->fixed_packet_data_mask + data_offset;
65 
66  if (pd + n_bytes >= vec_end (s->fixed_packet_data))
67  n_bytes = (pd < vec_end (s->fixed_packet_data)
68  ? vec_end (s->fixed_packet_data) - pd : 0);
69 
70  for (i = 0; i < n_bytes; i++)
71  if ((bd[i] & pm[i]) != pd[i])
72  break;
73 
74  if (i >= n_bytes)
75  return 1;
76 
77  clib_warning ("buffer %U", format_vnet_buffer, b);
78  clib_warning ("differ at index %d", i);
79  clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
80  clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
81  clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
82  return 0;
83 }
84 
85 static int
87 {
88  return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
89 }
90 
91 always_inline void
92 set_1 (void *a0,
93  u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
94 {
95  ASSERT (v0 >= v_min && v0 <= v_max);
96  if (n_bits == BITS (u8))
97  {
98  ((u8 *) a0)[0] = v0;
99  }
100  else if (n_bits == BITS (u16))
101  {
102  if (is_net_byte_order)
103  v0 = clib_host_to_net_u16 (v0);
104  clib_mem_unaligned (a0, u16) = v0;
105  }
106  else if (n_bits == BITS (u32))
107  {
108  if (is_net_byte_order)
109  v0 = clib_host_to_net_u32 (v0);
110  clib_mem_unaligned (a0, u32) = v0;
111  }
112  else if (n_bits == BITS (u64))
113  {
114  if (is_net_byte_order)
115  v0 = clib_host_to_net_u64 (v0);
116  clib_mem_unaligned (a0, u64) = v0;
117  }
118 }
119 
120 always_inline void
121 set_2 (void *a0, void *a1,
122  u64 v0, u64 v1,
123  u64 v_min, u64 v_max,
124  u32 n_bits, u32 is_net_byte_order, u32 is_increment)
125 {
126  ASSERT (v0 >= v_min && v0 <= v_max);
127  ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
128  if (n_bits == BITS (u8))
129  {
130  ((u8 *) a0)[0] = v0;
131  ((u8 *) a1)[0] = v1;
132  }
133  else if (n_bits == BITS (u16))
134  {
135  if (is_net_byte_order)
136  {
137  v0 = clib_host_to_net_u16 (v0);
138  v1 = clib_host_to_net_u16 (v1);
139  }
140  clib_mem_unaligned (a0, u16) = v0;
141  clib_mem_unaligned (a1, u16) = v1;
142  }
143  else if (n_bits == BITS (u32))
144  {
145  if (is_net_byte_order)
146  {
147  v0 = clib_host_to_net_u32 (v0);
148  v1 = clib_host_to_net_u32 (v1);
149  }
150  clib_mem_unaligned (a0, u32) = v0;
151  clib_mem_unaligned (a1, u32) = v1;
152  }
153  else if (n_bits == BITS (u64))
154  {
155  if (is_net_byte_order)
156  {
157  v0 = clib_host_to_net_u64 (v0);
158  v1 = clib_host_to_net_u64 (v1);
159  }
160  clib_mem_unaligned (a0, u64) = v0;
161  clib_mem_unaligned (a1, u64) = v1;
162  }
163 }
164 
167  pg_stream_t * s,
168  u32 * buffers,
169  u32 n_buffers,
170  u32 n_bits,
171  u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
172 {
174 
175  while (n_buffers >= 4)
176  {
177  vlib_buffer_t *b0, *b1, *b2, *b3;
178  void *a0, *a1;
179 
180  b0 = vlib_get_buffer (vm, buffers[0]);
181  b1 = vlib_get_buffer (vm, buffers[1]);
182  b2 = vlib_get_buffer (vm, buffers[2]);
183  b3 = vlib_get_buffer (vm, buffers[3]);
184  buffers += 2;
185  n_buffers -= 2;
186 
187  a0 = (void *) b0 + byte_offset;
188  a1 = (void *) b1 + byte_offset;
189  CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
190  CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
191 
192  set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
193  /* is_increment */ 0);
194 
195  ASSERT (validate_buffer_data (b0, s));
196  ASSERT (validate_buffer_data (b1, s));
197  }
198 
199  while (n_buffers > 0)
200  {
201  vlib_buffer_t *b0;
202  void *a0;
203 
204  b0 = vlib_get_buffer (vm, buffers[0]);
205  buffers += 1;
206  n_buffers -= 1;
207 
208  a0 = (void *) b0 + byte_offset;
209 
210  set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
211 
212  ASSERT (validate_buffer_data (b0, s));
213  }
214 }
215 
218  pg_stream_t * s,
219  u32 * buffers,
220  u32 n_buffers,
221  u32 n_bits,
222  u32 byte_offset,
223  u32 is_net_byte_order,
224  u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
225 {
227  u64 sum = 0;
228 
229  ASSERT (v >= v_min && v <= v_max);
230 
231  while (n_buffers >= 4)
232  {
233  vlib_buffer_t *b0, *b1, *b2, *b3;
234  void *a0, *a1;
235  u64 v_old;
236 
237  b0 = vlib_get_buffer (vm, buffers[0]);
238  b1 = vlib_get_buffer (vm, buffers[1]);
239  b2 = vlib_get_buffer (vm, buffers[2]);
240  b3 = vlib_get_buffer (vm, buffers[3]);
241  buffers += 2;
242  n_buffers -= 2;
243 
244  a0 = (void *) b0 + byte_offset;
245  a1 = (void *) b1 + byte_offset;
246  CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
247  CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
248 
249  v_old = v;
250  v = v_old + 2;
251  v = v > v_max ? v_min : v;
252  set_2 (a0, a1,
253  v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
254  /* is_increment */ 1);
255 
256  if (want_sum)
257  sum += 2 * v_old + 1;
258 
259  if (PREDICT_FALSE (v_old + 1 > v_max))
260  {
261  if (want_sum)
262  sum -= 2 * v_old + 1;
263 
264  v = v_old;
265  set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
266  if (want_sum)
267  sum += v;
268  v += 1;
269 
270  v = v > v_max ? v_min : v;
271  set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
272  if (want_sum)
273  sum += v;
274  v += 1;
275  }
276 
277  ASSERT (validate_buffer_data (b0, s));
278  ASSERT (validate_buffer_data (b1, s));
279  }
280 
281  while (n_buffers > 0)
282  {
283  vlib_buffer_t *b0;
284  void *a0;
285  u64 v_old;
286 
287  b0 = vlib_get_buffer (vm, buffers[0]);
288  buffers += 1;
289  n_buffers -= 1;
290 
291  a0 = (void *) b0 + byte_offset;
292 
293  v_old = v;
294  if (want_sum)
295  sum += v_old;
296  v += 1;
297  v = v > v_max ? v_min : v;
298 
299  ASSERT (v_old >= v_min && v_old <= v_max);
300  set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
301 
302  ASSERT (validate_buffer_data (b0, s));
303  }
304 
305  if (want_sum)
306  *sum_result = sum;
307 
308  return v;
309 }
310 
313  pg_stream_t * s,
314  u32 * buffers,
315  u32 n_buffers,
316  u32 n_bits,
317  u32 byte_offset,
318  u32 is_net_byte_order,
319  u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
320 {
322  u64 v_diff = v_max - v_min + 1;
323  u64 r_mask = max_pow2 (v_diff) - 1;
324  u64 v0, v1;
325  u64 sum = 0;
326  void *random_data;
327 
328  random_data = clib_random_buffer_get_data
329  (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
330 
331  v0 = v1 = v_min;
332 
333  while (n_buffers >= 4)
334  {
335  vlib_buffer_t *b0, *b1, *b2, *b3;
336  void *a0, *a1;
337  u64 r0 = 0, r1 = 0; /* warnings be gone */
338 
339  b0 = vlib_get_buffer (vm, buffers[0]);
340  b1 = vlib_get_buffer (vm, buffers[1]);
341  b2 = vlib_get_buffer (vm, buffers[2]);
342  b3 = vlib_get_buffer (vm, buffers[3]);
343  buffers += 2;
344  n_buffers -= 2;
345 
346  a0 = (void *) b0 + byte_offset;
347  a1 = (void *) b1 + byte_offset;
348  CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
349  CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
350 
351  switch (n_bits)
352  {
353 #define _(n) \
354  case BITS (u##n): \
355  { \
356  u##n * r = random_data; \
357  r0 = r[0]; \
358  r1 = r[1]; \
359  random_data = r + 2; \
360  } \
361  break;
362 
363  _(8);
364  _(16);
365  _(32);
366  _(64);
367 
368 #undef _
369  }
370 
371  /* Add power of 2 sized random number which may be out of range. */
372  v0 += r0 & r_mask;
373  v1 += r1 & r_mask;
374 
375  /* Twice should be enough to reduce to v_min .. v_max range. */
376  v0 = v0 > v_max ? v0 - v_diff : v0;
377  v1 = v1 > v_max ? v1 - v_diff : v1;
378  v0 = v0 > v_max ? v0 - v_diff : v0;
379  v1 = v1 > v_max ? v1 - v_diff : v1;
380 
381  if (want_sum)
382  sum += v0 + v1;
383 
384  set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
385  /* is_increment */ 0);
386 
387  ASSERT (validate_buffer_data (b0, s));
388  ASSERT (validate_buffer_data (b1, s));
389  }
390 
391  while (n_buffers > 0)
392  {
393  vlib_buffer_t *b0;
394  void *a0;
395  u64 r0 = 0; /* warnings be gone */
396 
397  b0 = vlib_get_buffer (vm, buffers[0]);
398  buffers += 1;
399  n_buffers -= 1;
400 
401  a0 = (void *) b0 + byte_offset;
402 
403  switch (n_bits)
404  {
405 #define _(n) \
406  case BITS (u##n): \
407  { \
408  u##n * r = random_data; \
409  r0 = r[0]; \
410  random_data = r + 1; \
411  } \
412  break;
413 
414  _(8);
415  _(16);
416  _(32);
417  _(64);
418 
419 #undef _
420  }
421 
422  /* Add power of 2 sized random number which may be out of range. */
423  v0 += r0 & r_mask;
424 
425  /* Twice should be enough to reduce to v_min .. v_max range. */
426  v0 = v0 > v_max ? v0 - v_diff : v0;
427  v0 = v0 > v_max ? v0 - v_diff : v0;
428 
429  if (want_sum)
430  sum += v0;
431 
432  set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
433 
434  ASSERT (validate_buffer_data (b0, s));
435  }
436 
437  if (want_sum)
438  *sum_result = sum;
439 }
440 
441 #define _(i,t) \
442  clib_mem_unaligned (a##i, t) = \
443  clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
444  | (v##i << shift))
445 
446 always_inline void
447 setbits_1 (void *a0,
448  u64 v0,
449  u64 v_min, u64 v_max,
450  u32 max_bits, u32 n_bits, u64 mask, u32 shift)
451 {
452  ASSERT (v0 >= v_min && v0 <= v_max);
453  if (max_bits == BITS (u8))
454  ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
455 
456  else if (max_bits == BITS (u16))
457  {
458  _(0, u16);
459  }
460  else if (max_bits == BITS (u32))
461  {
462  _(0, u32);
463  }
464  else if (max_bits == BITS (u64))
465  {
466  _(0, u64);
467  }
468 }
469 
470 always_inline void
471 setbits_2 (void *a0, void *a1,
472  u64 v0, u64 v1,
473  u64 v_min, u64 v_max,
474  u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
475 {
476  ASSERT (v0 >= v_min && v0 <= v_max);
477  ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
478  if (max_bits == BITS (u8))
479  {
480  ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
481  ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
482  }
483 
484  else if (max_bits == BITS (u16))
485  {
486  _(0, u16);
487  _(1, u16);
488  }
489  else if (max_bits == BITS (u32))
490  {
491  _(0, u32);
492  _(1, u32);
493  }
494  else if (max_bits == BITS (u64))
495  {
496  _(0, u64);
497  _(1, u64);
498  }
499 }
500 
501 #undef _
502 
505  pg_stream_t * s,
506  u32 * buffers,
507  u32 n_buffers,
508  u32 max_bits,
509  u32 n_bits,
510  u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
511 {
513 
514  while (n_buffers >= 4)
515  {
516  vlib_buffer_t *b0, *b1, *b2, *b3;
517  void *a0, *a1;
518 
519  b0 = vlib_get_buffer (vm, buffers[0]);
520  b1 = vlib_get_buffer (vm, buffers[1]);
521  b2 = vlib_get_buffer (vm, buffers[2]);
522  b3 = vlib_get_buffer (vm, buffers[3]);
523  buffers += 2;
524  n_buffers -= 2;
525 
526  a0 = (void *) b0 + byte_offset;
527  a1 = (void *) b1 + byte_offset;
528  CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
529  CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
530 
531  setbits_2 (a0, a1,
532  v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
533  /* is_increment */ 0);
534 
535  ASSERT (validate_buffer_data (b0, s));
536  ASSERT (validate_buffer_data (b1, s));
537  }
538 
539  while (n_buffers > 0)
540  {
541  vlib_buffer_t *b0;
542  void *a0;
543 
544  b0 = vlib_get_buffer (vm, buffers[0]);
545  buffers += 1;
546  n_buffers -= 1;
547 
548  a0 = (void *) b0 + byte_offset;
549 
550  setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
551  ASSERT (validate_buffer_data (b0, s));
552  }
553 }
554 
557  pg_stream_t * s,
558  u32 * buffers,
559  u32 n_buffers,
560  u32 max_bits,
561  u32 n_bits,
562  u32 byte_offset,
563  u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
564 {
566 
567  ASSERT (v >= v_min && v <= v_max);
568 
569  while (n_buffers >= 4)
570  {
571  vlib_buffer_t *b0, *b1, *b2, *b3;
572  void *a0, *a1;
573  u64 v_old;
574 
575  b0 = vlib_get_buffer (vm, buffers[0]);
576  b1 = vlib_get_buffer (vm, buffers[1]);
577  b2 = vlib_get_buffer (vm, buffers[2]);
578  b3 = vlib_get_buffer (vm, buffers[3]);
579  buffers += 2;
580  n_buffers -= 2;
581 
582  a0 = (void *) b0 + byte_offset;
583  a1 = (void *) b1 + byte_offset;
584  CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
585  CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
586 
587  v_old = v;
588  v = v_old + 2;
589  v = v > v_max ? v_min : v;
590  setbits_2 (a0, a1,
591  v_old + 0, v_old + 1,
592  v_min, v_max, max_bits, n_bits, mask, shift,
593  /* is_increment */ 1);
594 
595  if (PREDICT_FALSE (v_old + 1 > v_max))
596  {
597  v = v_old;
598  setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
599  v += 1;
600 
601  v = v > v_max ? v_min : v;
602  setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
603  v += 1;
604  }
605  ASSERT (validate_buffer_data (b0, s));
606  ASSERT (validate_buffer_data (b1, s));
607  }
608 
609  while (n_buffers > 0)
610  {
611  vlib_buffer_t *b0;
612  void *a0;
613  u64 v_old;
614 
615  b0 = vlib_get_buffer (vm, buffers[0]);
616  buffers += 1;
617  n_buffers -= 1;
618 
619  a0 = (void *) b0 + byte_offset;
620 
621  v_old = v;
622  v = v_old + 1;
623  v = v > v_max ? v_min : v;
624 
625  ASSERT (v_old >= v_min && v_old <= v_max);
626  setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
627 
628  ASSERT (validate_buffer_data (b0, s));
629  }
630 
631  return v;
632 }
633 
636  pg_stream_t * s,
637  u32 * buffers,
638  u32 n_buffers,
639  u32 max_bits,
640  u32 n_bits,
641  u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
642 {
644  u64 v_diff = v_max - v_min + 1;
645  u64 r_mask = max_pow2 (v_diff) - 1;
646  u64 v0, v1;
647  void *random_data;
648 
649  random_data = clib_random_buffer_get_data
650  (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
651  v0 = v1 = v_min;
652 
653  while (n_buffers >= 4)
654  {
655  vlib_buffer_t *b0, *b1, *b2, *b3;
656  void *a0, *a1;
657  u64 r0 = 0, r1 = 0; /* warnings be gone */
658 
659  b0 = vlib_get_buffer (vm, buffers[0]);
660  b1 = vlib_get_buffer (vm, buffers[1]);
661  b2 = vlib_get_buffer (vm, buffers[2]);
662  b3 = vlib_get_buffer (vm, buffers[3]);
663  buffers += 2;
664  n_buffers -= 2;
665 
666  a0 = (void *) b0 + byte_offset;
667  a1 = (void *) b1 + byte_offset;
668  CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
669  CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
670 
671  switch (max_bits)
672  {
673 #define _(n) \
674  case BITS (u##n): \
675  { \
676  u##n * r = random_data; \
677  r0 = r[0]; \
678  r1 = r[1]; \
679  random_data = r + 2; \
680  } \
681  break;
682 
683  _(8);
684  _(16);
685  _(32);
686  _(64);
687 
688 #undef _
689  }
690 
691  /* Add power of 2 sized random number which may be out of range. */
692  v0 += r0 & r_mask;
693  v1 += r1 & r_mask;
694 
695  /* Twice should be enough to reduce to v_min .. v_max range. */
696  v0 = v0 > v_max ? v0 - v_diff : v0;
697  v1 = v1 > v_max ? v1 - v_diff : v1;
698  v0 = v0 > v_max ? v0 - v_diff : v0;
699  v1 = v1 > v_max ? v1 - v_diff : v1;
700 
701  setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
702  /* is_increment */ 0);
703 
704  ASSERT (validate_buffer_data (b0, s));
705  ASSERT (validate_buffer_data (b1, s));
706  }
707 
708  while (n_buffers > 0)
709  {
710  vlib_buffer_t *b0;
711  void *a0;
712  u64 r0 = 0; /* warnings be gone */
713 
714  b0 = vlib_get_buffer (vm, buffers[0]);
715  buffers += 1;
716  n_buffers -= 1;
717 
718  a0 = (void *) b0 + byte_offset;
719 
720  switch (max_bits)
721  {
722 #define _(n) \
723  case BITS (u##n): \
724  { \
725  u##n * r = random_data; \
726  r0 = r[0]; \
727  random_data = r + 1; \
728  } \
729  break;
730 
731  _(8);
732  _(16);
733  _(32);
734  _(64);
735 
736 #undef _
737  }
738 
739  /* Add power of 2 sized random number which may be out of range. */
740  v0 += r0 & r_mask;
741 
742  /* Twice should be enough to reduce to v_min .. v_max range. */
743  v0 = v0 > v_max ? v0 - v_diff : v0;
744  v0 = v0 > v_max ? v0 - v_diff : v0;
745 
746  setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
747 
748  ASSERT (validate_buffer_data (b0, s));
749  }
750 }
751 
752 static u64
754  pg_stream_t * s,
755  u32 * buffers,
756  u32 n_buffers,
757  u32 lo_bit, u32 hi_bit,
758  u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
759 {
760  u32 max_bits, l0, l1, h1, start_bit;
761 
762  if (v_min == v_max)
763  edit_type = PG_EDIT_FIXED;
764 
765  l0 = lo_bit / BITS (u8);
766  l1 = lo_bit % BITS (u8);
767  h1 = hi_bit % BITS (u8);
768 
769  start_bit = l0 * BITS (u8);
770 
771  max_bits = hi_bit - start_bit;
772  ASSERT (max_bits <= 64);
773 
774 #define _(n) \
775  case (n): \
776  if (edit_type == PG_EDIT_INCREMENT) \
777  v = do_set_increment (pg, s, buffers, n_buffers, \
778  BITS (u##n), \
779  l0, \
780  /* is_net_byte_order */ 1, \
781  /* want sum */ 0, 0, \
782  v_min, v_max, \
783  v); \
784  else if (edit_type == PG_EDIT_RANDOM) \
785  do_set_random (pg, s, buffers, n_buffers, \
786  BITS (u##n), \
787  l0, \
788  /* is_net_byte_order */ 1, \
789  /* want sum */ 0, 0, \
790  v_min, v_max); \
791  else /* edit_type == PG_EDIT_FIXED */ \
792  do_set_fixed (pg, s, buffers, n_buffers, \
793  BITS (u##n), \
794  l0, \
795  /* is_net_byte_order */ 1, \
796  v_min, v_max); \
797  goto done;
798 
799  if (l1 == 0 && h1 == 0)
800  {
801  switch (max_bits)
802  {
803  _(8);
804  _(16);
805  _(32);
806  _(64);
807  }
808  }
809 
810 #undef _
811 
812  {
813  u64 mask;
814  u32 shift = l1;
815  u32 n_bits = max_bits;
816 
817  max_bits = clib_max (max_pow2 (n_bits), 8);
818 
819  mask = ((u64) 1 << (u64) n_bits) - 1;
820  mask &= ~(((u64) 1 << (u64) shift) - 1);
821 
822  mask <<= max_bits - n_bits;
823  shift += max_bits - n_bits;
824 
825  switch (max_bits)
826  {
827 #define _(n) \
828  case (n): \
829  if (edit_type == PG_EDIT_INCREMENT) \
830  v = do_setbits_increment (pg, s, buffers, n_buffers, \
831  BITS (u##n), n_bits, \
832  l0, v_min, v_max, v, \
833  mask, shift); \
834  else if (edit_type == PG_EDIT_RANDOM) \
835  do_setbits_random (pg, s, buffers, n_buffers, \
836  BITS (u##n), n_bits, \
837  l0, v_min, v_max, \
838  mask, shift); \
839  else /* edit_type == PG_EDIT_FIXED */ \
840  do_setbits_fixed (pg, s, buffers, n_buffers, \
841  BITS (u##n), n_bits, \
842  l0, v_min, v_max, \
843  mask, shift); \
844  goto done;
845 
846  _(8);
847  _(16);
848  _(32);
849  _(64);
850 
851 #undef _
852  }
853  }
854 
855 done:
856  return v;
857 }
858 
859 static void
861  pg_stream_t * s, u32 * buffers, u32 n_buffers)
862 {
863  u64 v_min, v_max, length_sum;
864  pg_edit_type_t edit_type;
865 
866  v_min = s->min_packet_bytes;
867  v_max = s->max_packet_bytes;
868  edit_type = s->packet_size_edit_type;
869 
870  if (edit_type == PG_EDIT_INCREMENT)
872  = do_set_increment (pg, s, buffers, n_buffers,
873  8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
874  STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
875  /* is_net_byte_order */ 0,
876  /* want sum */ 1, &length_sum,
877  v_min, v_max, s->last_increment_packet_size);
878 
879  else if (edit_type == PG_EDIT_RANDOM)
880  do_set_random (pg, s, buffers, n_buffers,
881  8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
882  STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
883  /* is_net_byte_order */ 0,
884  /* want sum */ 1, &length_sum,
885  v_min, v_max);
886 
887  else /* edit_type == PG_EDIT_FIXED */
888  {
889  do_set_fixed (pg, s, buffers, n_buffers,
890  8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
891  STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
892  /* is_net_byte_order */ 0,
893  v_min, v_max);
894  length_sum = v_min * n_buffers;
895  }
896 
897  {
898  vnet_main_t *vnm = vnet_get_main ();
900  vnet_sw_interface_t *si =
902 
906  si->sw_if_index, n_buffers, length_sum);
907  }
908 
909 }
910 
911 static void
913  pg_stream_t * s,
914  u32 * buffers, u32 n_buffers)
915 {
917  pg_buffer_index_t *pbi;
918  uword n_bytes_left;
919  static u32 *unused_buffers = 0;
920 
921  while (n_buffers > 0)
922  {
923  vlib_buffer_t *b;
924  u32 bi;
925 
926  bi = buffers[0];
927  b = vlib_get_buffer (vm, bi);
928 
929  /* Current length here is length of whole packet. */
930  n_bytes_left = b->current_length;
931 
932  pbi = s->buffer_indices;
933  while (1)
934  {
935  uword n = clib_min (n_bytes_left, s->buffer_bytes);
936 
937  b->current_length = n;
938  n_bytes_left -= n;
939  if (n_bytes_left > 0)
940  b->flags |= VLIB_BUFFER_NEXT_PRESENT;
941  else
942  b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
943 
944  /* Return unused buffers to fifos. */
945  if (n == 0)
946  vec_add1 (unused_buffers, bi);
947 
948  pbi++;
949  if (pbi >= vec_end (s->buffer_indices))
950  break;
951 
952  bi = b->next_buffer;
953  b = vlib_get_buffer (vm, bi);
954  }
955  ASSERT (n_bytes_left == 0);
956 
957  buffers += 1;
958  n_buffers -= 1;
959  }
960 
961  if (vec_len (unused_buffers) > 0)
962  {
963  vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
964  _vec_len (unused_buffers) = 0;
965  }
966 }
967 
968 static void
970  pg_stream_t * s, u32 * buffers, u32 n_buffers)
971 {
972  pg_edit_t *e;
973 
975  {
976  switch (e->type)
977  {
978  case PG_EDIT_RANDOM:
979  case PG_EDIT_INCREMENT:
980  {
981  u32 lo_bit, hi_bit;
982  u64 v_min, v_max;
983 
984  v_min = pg_edit_get_value (e, PG_EDIT_LO);
985  v_max = pg_edit_get_value (e, PG_EDIT_HI);
986 
987  hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
988  + BITS (u8) + e->lsb_bit_offset);
989  lo_bit = hi_bit - e->n_bits;
990 
992  = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
993  e->last_increment_value, e->type);
994  }
995  break;
996 
997  case PG_EDIT_UNSPECIFIED:
998  break;
999 
1000  default:
1001  /* Should not be any fixed edits left. */
1002  ASSERT (0);
1003  break;
1004  }
1005  }
1006 
1007  /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1008  {
1009  int i;
1010  for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1011  {
1012  pg_edit_group_t *g = s->edit_groups + i;
1013  if (g->edit_function)
1014  g->edit_function (pg, s, g, buffers, n_buffers);
1015  }
1016  }
1017 }
1018 
1019 static void
1021  pg_stream_t * s,
1022  u32 * buffers, u32 * next_buffers, u32 n_buffers)
1023 {
1025 
1026  while (n_buffers >= 4)
1027  {
1028  u32 ni0, ni1;
1029  vlib_buffer_t *b0, *b1;
1030 
1031  b0 = vlib_get_buffer (vm, buffers[0]);
1032  b1 = vlib_get_buffer (vm, buffers[1]);
1033  ni0 = next_buffers[0];
1034  ni1 = next_buffers[1];
1035 
1036  vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1037  vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1038 
1039  b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1040  b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1041  b0->next_buffer = ni0;
1042  b1->next_buffer = ni1;
1043 
1044  buffers += 2;
1045  next_buffers += 2;
1046  n_buffers -= 2;
1047  }
1048 
1049  while (n_buffers > 0)
1050  {
1051  u32 ni0;
1052  vlib_buffer_t *b0;
1053 
1054  b0 = vlib_get_buffer (vm, buffers[0]);
1055  ni0 = next_buffers[0];
1056  buffers += 1;
1057  next_buffers += 1;
1058  n_buffers -= 1;
1059 
1060  b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1061  b0->next_buffer = ni0;
1062  }
1063 }
1064 
1067  pg_stream_t * s,
1068  u32 * buffers,
1069  u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
1070 {
1071  u32 n_left, *b;
1072  u8 *data, *mask;
1073 
1074  ASSERT (s->replay_packet_templates == 0);
1075 
1076  data = s->fixed_packet_data + data_offset;
1077  mask = s->fixed_packet_data_mask + data_offset;
1078  if (data + n_data >= vec_end (s->fixed_packet_data))
1079  n_data = (data < vec_end (s->fixed_packet_data)
1080  ? vec_end (s->fixed_packet_data) - data : 0);
1081  if (n_data > 0)
1082  {
1083  ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1084  ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1085  }
1086 
1087  n_left = n_buffers;
1088  b = buffers;
1089 
1090  while (n_left >= 4)
1091  {
1092  u32 bi0, bi1;
1093  vlib_buffer_t *b0, *b1;
1094 
1095  /* Prefetch next iteration. */
1096  vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1097  vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1098 
1099  bi0 = b[0];
1100  bi1 = b[1];
1101  b += 2;
1102  n_left -= 2;
1103 
1104  b0 = vlib_get_buffer (vm, bi0);
1105  b1 = vlib_get_buffer (vm, bi1);
1106 
1107  vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1108  vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1109 
1110  vnet_buffer (b0)->sw_if_index[VLIB_TX] =
1111  vnet_buffer (b1)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1112 
1113  if (set_data)
1114  {
1115  clib_memcpy_fast (b0->data, data, n_data);
1116  clib_memcpy_fast (b1->data, data, n_data);
1117  }
1118  else
1119  {
1120  ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1121  ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1122  }
1123  }
1124 
1125  while (n_left >= 1)
1126  {
1127  u32 bi0;
1128  vlib_buffer_t *b0;
1129 
1130  bi0 = b[0];
1131  b += 1;
1132  n_left -= 1;
1133 
1134  b0 = vlib_get_buffer (vm, bi0);
1135  vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1136  /* s->sw_if_index[VLIB_TX]; */
1137  vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1138 
1139  if (set_data)
1140  clib_memcpy_fast (b0->data, data, n_data);
1141  else
1142  ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1143  }
1144 }
1145 
1146 static u32
1148  pg_stream_t * s,
1149  pg_buffer_index_t * bi,
1150  u32 * buffers, u32 * next_buffers, u32 n_alloc)
1151 {
1153  uword is_start_of_packet = bi == s->buffer_indices;
1154  u32 n_allocated;
1155 
1157 
1158  n_allocated = vlib_buffer_alloc (vm, buffers, n_alloc);
1159  if (n_allocated == 0)
1160  return 0;
1161 
1162  /*
1163  * We can't assume we got all the buffers we asked for...
1164  * This never worked until recently.
1165  */
1166  n_alloc = n_allocated;
1167 
1168  /* Reinitialize buffers */
1170  (vm, s,
1171  buffers,
1172  n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1173  s->buffer_bytes,
1174  /* set_data */ 1);
1175 
1176  if (next_buffers)
1177  pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1178 
1179  if (is_start_of_packet)
1180  {
1181  pg_generate_set_lengths (pg, s, buffers, n_alloc);
1182  if (vec_len (s->buffer_indices) > 1)
1183  pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
1184 
1185  pg_generate_edit (pg, s, buffers, n_alloc);
1186  }
1187 
1188  return n_alloc;
1189 }
1190 
1191 static u32
1193 {
1194  pg_buffer_index_t *bi;
1195  u32 n_left, i, l;
1196  u32 buffer_alloc_request = 0;
1197  u32 buffer_alloc_result;
1198  u32 current_buffer_index;
1199  u32 *buffers;
1201  vnet_main_t *vnm = vnet_get_main ();
1202  u32 buf_sz = vlib_buffer_get_default_data_size (vm);
1204  vnet_sw_interface_t *si;
1205 
1206  buffers = pg->replay_buffers_by_thread[vm->thread_index];
1207  vec_reset_length (buffers);
1208  bi = s->buffer_indices;
1209 
1210  n_left = n_alloc;
1213 
1214  /* Figure out how many buffers we need */
1215  while (n_left > 0)
1216  {
1217  u8 *d0;
1218 
1219  d0 = vec_elt (s->replay_packet_templates, i);
1220  buffer_alloc_request += (vec_len (d0) + (buf_sz - 1)) / buf_sz;
1221 
1222  i = ((i + 1) == l) ? 0 : i + 1;
1223  n_left--;
1224  }
1225 
1226  ASSERT (buffer_alloc_request > 0);
1227  vec_validate (buffers, buffer_alloc_request - 1);
1228 
1229  /* Allocate that many buffers */
1230  buffer_alloc_result = vlib_buffer_alloc (vm, buffers, buffer_alloc_request);
1231  if (buffer_alloc_result < buffer_alloc_request)
1232  {
1233  clib_warning ("alloc failure, got %d not %d", buffer_alloc_result,
1234  buffer_alloc_request);
1235  vlib_buffer_free_no_next (vm, buffers, buffer_alloc_result);
1236  pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1237  return 0;
1238  }
1239 
1240  /* Now go generate the buffers, and add them to the FIFO */
1241  n_left = n_alloc;
1242 
1243  current_buffer_index = 0;
1246  while (n_left > 0)
1247  {
1248  u8 *d0;
1249  int not_last;
1250  u32 data_offset;
1251  u32 bytes_to_copy, bytes_this_chunk;
1252  vlib_buffer_t *b;
1253 
1254  d0 = vec_elt (s->replay_packet_templates, i);
1255  data_offset = 0;
1256  bytes_to_copy = vec_len (d0);
1257 
1258  /* Add head chunk to pg fifo */
1259  clib_fifo_add1 (bi->buffer_fifo, buffers[current_buffer_index]);
1260 
1261  /* Copy the data */
1262  while (bytes_to_copy)
1263  {
1264  bytes_this_chunk = clib_min (bytes_to_copy, buf_sz);
1265  ASSERT (current_buffer_index < vec_len (buffers));
1266  b = vlib_get_buffer (vm, buffers[current_buffer_index]);
1267  clib_memcpy_fast (b->data, d0 + data_offset, bytes_this_chunk);
1268  vnet_buffer (b)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1269  vnet_buffer (b)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1270  b->flags = 0;
1271  b->next_buffer = 0;
1272  b->current_data = 0;
1273  b->current_length = bytes_this_chunk;
1274 
1275  not_last = bytes_this_chunk < bytes_to_copy;
1276  if (not_last)
1277  {
1278  ASSERT (current_buffer_index < (vec_len (buffers) - 1));
1279  b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1280  b->next_buffer = buffers[current_buffer_index + 1];
1281  }
1282  bytes_to_copy -= bytes_this_chunk;
1283  data_offset += bytes_this_chunk;
1284  current_buffer_index++;
1285  }
1286 
1287  i = ((i + 1) == l) ? 0 : i + 1;
1288  n_left--;
1289  }
1290 
1291  /* Update the interface counters */
1292  si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1293  l = 0;
1294  for (i = 0; i < n_alloc; i++)
1295  l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1299  si->sw_if_index, n_alloc, l);
1300 
1301  s->current_replay_packet_index += n_alloc;
1303 
1304  pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1305  return n_alloc;
1306 }
1307 
1308 
1309 static u32
1310 pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1311 {
1312  pg_buffer_index_t *bi;
1313  word i, n_in_fifo, n_alloc, n_free, n_added;
1314  u32 *tail, *start, *end, *last_tail, *last_start;
1315 
1316  bi = s->buffer_indices;
1317 
1318  n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1319  if (n_in_fifo >= n_buffers)
1320  return n_in_fifo;
1321 
1322  n_alloc = n_buffers - n_in_fifo;
1323 
1324  /* Round up, but never generate more than limit. */
1325  n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1326 
1327  if (s->n_packets_limit > 0
1328  && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1329  {
1330  n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1331  if (n_alloc < 0)
1332  n_alloc = 0;
1333  }
1334 
1335  /*
1336  * Handle pcap replay directly
1337  */
1338  if (s->replay_packet_templates)
1339  return pg_stream_fill_replay (pg, s, n_alloc);
1340 
1341  /* All buffer fifos should have the same size. */
1342  if (CLIB_DEBUG > 0)
1343  {
1344  uword l = ~0, e;
1345  vec_foreach (bi, s->buffer_indices)
1346  {
1347  e = clib_fifo_elts (bi->buffer_fifo);
1348  if (bi == s->buffer_indices)
1349  l = e;
1350  ASSERT (l == e);
1351  }
1352  }
1353 
1354  last_tail = last_start = 0;
1355  n_added = n_alloc;
1356 
1357  for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1358  {
1359  bi = vec_elt_at_index (s->buffer_indices, i);
1360 
1361  n_free = clib_fifo_free_elts (bi->buffer_fifo);
1362  if (n_free < n_alloc)
1363  clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1364 
1365  tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1366  start = bi->buffer_fifo;
1367  end = clib_fifo_end (bi->buffer_fifo);
1368 
1369  if (tail + n_alloc <= end)
1370  {
1371  n_added =
1372  pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1373  }
1374  else
1375  {
1376  u32 n = clib_min (end - tail, n_alloc);
1377  n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1378 
1379  if (n_added == n && n_alloc > n_added)
1380  {
1381  n_added += pg_stream_fill_helper
1382  (pg, s, bi, start, last_start, n_alloc - n_added);
1383  }
1384  }
1385 
1386  if (PREDICT_FALSE (n_added < n_alloc))
1387  tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1388 
1389  last_tail = tail;
1390  last_start = start;
1391 
1392  /* Verify that pkts in the fifo are properly allocated */
1393  }
1394 
1395  return n_in_fifo + n_added;
1396 }
1397 
1398 typedef struct
1399 {
1401 
1404 
1405  /* Use pre data for packet data. */
1408 
1409 static u8 *
1410 format_pg_input_trace (u8 * s, va_list * va)
1411 {
1412  vlib_main_t *vm = va_arg (*va, vlib_main_t *);
1413  CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
1414  pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1415  pg_main_t *pg = &pg_main;
1416  pg_stream_t *stream;
1417  vlib_node_t *n;
1418  u32 indent = format_get_indent (s);
1419 
1420  stream = 0;
1421  if (!pool_is_free_index (pg->streams, t->stream_index))
1422  stream = pool_elt_at_index (pg->streams, t->stream_index);
1423 
1424  if (stream)
1425  s = format (s, "stream %v", pg->streams[t->stream_index].name);
1426  else
1427  s = format (s, "stream %d", t->stream_index);
1428 
1429  s = format (s, ", %d bytes", t->packet_length);
1430  s = format (s, ", %d sw_if_index", t->sw_if_index);
1431 
1432  s = format (s, "\n%U%U",
1434 
1435  s = format (s, "\n%U", format_white_space, indent);
1436 
1437  n = 0;
1438  if (stream)
1439  n = vlib_get_node (vm, stream->node_index);
1440 
1441  if (n && n->format_buffer)
1442  s = format (s, "%U", n->format_buffer,
1443  t->buffer.pre_data, sizeof (t->buffer.pre_data));
1444  else
1445  s = format (s, "%U",
1447  ARRAY_LEN (t->buffer.pre_data));
1448  return s;
1449 }
1450 
1451 static void
1453  vlib_node_runtime_t * node, u32 stream_index, u32 next_index,
1454  u32 * buffers, u32 n_buffers)
1455 {
1457  u32 *b, n_left;
1458 
1459  n_left = n_buffers;
1460  b = buffers;
1461 
1462  while (n_left >= 2)
1463  {
1464  u32 bi0, bi1;
1465  vlib_buffer_t *b0, *b1;
1466  pg_input_trace_t *t0, *t1;
1467 
1468  bi0 = b[0];
1469  bi1 = b[1];
1470  b += 2;
1471  n_left -= 2;
1472 
1473  b0 = vlib_get_buffer (vm, bi0);
1474  b1 = vlib_get_buffer (vm, bi1);
1475 
1476  vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1477  vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1478 
1479  t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1480  t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1481 
1482  t0->stream_index = stream_index;
1483  t1->stream_index = stream_index;
1484 
1487 
1488  t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1489  t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1490 
1491  clib_memcpy_fast (&t0->buffer, b0,
1492  sizeof (b0[0]) - sizeof (b0->pre_data));
1493  clib_memcpy_fast (&t1->buffer, b1,
1494  sizeof (b1[0]) - sizeof (b1->pre_data));
1495 
1497  sizeof (t0->buffer.pre_data));
1499  sizeof (t1->buffer.pre_data));
1500  }
1501 
1502  while (n_left >= 1)
1503  {
1504  u32 bi0;
1505  vlib_buffer_t *b0;
1506  pg_input_trace_t *t0;
1507 
1508  bi0 = b[0];
1509  b += 1;
1510  n_left -= 1;
1511 
1512  b0 = vlib_get_buffer (vm, bi0);
1513 
1514  vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1515  t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1516 
1517  t0->stream_index = stream_index;
1519  t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1520  clib_memcpy_fast (&t0->buffer, b0,
1521  sizeof (b0[0]) - sizeof (b0->pre_data));
1523  sizeof (t0->buffer.pre_data));
1524  }
1525 }
1526 
1527 static uword
1529  pg_main_t * pg,
1530  pg_stream_t * s, uword n_packets_to_generate)
1531 {
1533  u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1534  uword n_packets_generated;
1535  pg_buffer_index_t *bi, *bi0;
1536  u32 next_index = s->next_index;
1539  u8 feature_arc_index = fm->device_input_feature_arc_index;
1540  cm = &fm->feature_config_mains[feature_arc_index];
1541  u32 current_config_index = ~(u32) 0;
1542  int i;
1543 
1544  bi0 = s->buffer_indices;
1545 
1546  n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1547  n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1548  n_packets_generated = 0;
1549 
1550  if (PREDICT_FALSE
1551  (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1552  {
1553  current_config_index =
1555  vnet_get_config_data (&cm->config_main, &current_config_index,
1556  &next_index, 0);
1557  }
1558 
1559  while (n_packets_to_generate > 0)
1560  {
1561  u32 *head, *start, *end;
1562 
1564  {
1565  vlib_next_frame_t *nf;
1566  vlib_frame_t *f;
1568  pg_interface_t *pi;
1569  vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1570  nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
1571  f = vlib_get_frame (vm, nf->frame);
1573 
1574  ef = vlib_frame_scalar_args (f);
1575  pi = pool_elt_at_index (pg->interfaces, s->pg_if_index);
1576  ef->sw_if_index = pi->sw_if_index;
1577  ef->hw_if_index = pi->hw_if_index;
1579  }
1580  else
1581  vlib_get_next_frame (vm, node, next_index, to_next, n_left);
1582 
1583  n_this_frame = n_packets_to_generate;
1584  if (n_this_frame > n_left)
1585  n_this_frame = n_left;
1586 
1587  start = bi0->buffer_fifo;
1588  end = clib_fifo_end (bi0->buffer_fifo);
1589  head = clib_fifo_head (bi0->buffer_fifo);
1590 
1591  if (head + n_this_frame <= end)
1592  vlib_buffer_copy_indices (to_next, head, n_this_frame);
1593  else
1594  {
1595  u32 n = end - head;
1596  vlib_buffer_copy_indices (to_next + 0, head, n);
1597  vlib_buffer_copy_indices (to_next + n, start, n_this_frame - n);
1598  }
1599 
1600  if (s->replay_packet_templates == 0)
1601  {
1602  vec_foreach (bi, s->buffer_indices)
1603  clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1604  }
1605  else
1606  {
1607  clib_fifo_advance_head (bi0->buffer_fifo, n_this_frame);
1608  }
1609 
1610  if (current_config_index != ~(u32) 0)
1611  for (i = 0; i < n_this_frame; i++)
1612  {
1613  vlib_buffer_t *b;
1614  b = vlib_get_buffer (vm, to_next[i]);
1615  b->current_config_index = current_config_index;
1616  vnet_buffer (b)->feature_arc_index = feature_arc_index;
1617  }
1618 
1619  n_trace = vlib_get_trace_count (vm, node);
1620  if (n_trace > 0)
1621  {
1622  u32 n = clib_min (n_trace, n_this_frame);
1623  pg_input_trace (pg, node, s - pg->streams, next_index, to_next, n);
1624  vlib_set_trace_count (vm, node, n_trace - n);
1625  }
1626  n_packets_to_generate -= n_this_frame;
1627  n_packets_generated += n_this_frame;
1628  n_left -= n_this_frame;
1629  if (CLIB_DEBUG > 0)
1630  {
1631  int i;
1632  vlib_buffer_t *b;
1633 
1634  for (i = 0; i < n_this_frame; i++)
1635  {
1636  b = vlib_get_buffer (vm, to_next[i]);
1637  ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1639  }
1640  }
1641  vlib_put_next_frame (vm, node, next_index, n_left);
1642  }
1643 
1644  return n_packets_generated;
1645 }
1646 
1647 static uword
1649 {
1651  uword n_packets;
1652  f64 time_now, dt;
1653 
1654  if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
1655  {
1656  pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1657  return 0;
1658  }
1659 
1660  /* Apply rate limit. */
1661  time_now = vlib_time_now (vm);
1662  if (s->time_last_generate == 0)
1663  s->time_last_generate = time_now;
1664 
1665  dt = time_now - s->time_last_generate;
1666  s->time_last_generate = time_now;
1667 
1668  n_packets = VLIB_FRAME_SIZE;
1669  if (s->rate_packets_per_second > 0)
1670  {
1672  n_packets = s->packet_accumulator;
1673 
1674  /* Never allow accumulator to grow if we get behind. */
1675  s->packet_accumulator -= n_packets;
1676  }
1677 
1678  /* Apply fixed limit. */
1679  if (s->n_packets_limit > 0
1680  && s->n_packets_generated + n_packets > s->n_packets_limit)
1681  n_packets = s->n_packets_limit - s->n_packets_generated;
1682 
1683  /* Generate up to one frame's worth of packets. */
1684  if (n_packets > VLIB_FRAME_SIZE)
1685  n_packets = VLIB_FRAME_SIZE;
1686 
1687  if (n_packets > 0)
1688  n_packets = pg_generate_packets (node, pg, s, n_packets);
1689 
1690  s->n_packets_generated += n_packets;
1691 
1692  return n_packets;
1693 }
1694 
1695 uword
1697 {
1698  uword i;
1699  pg_main_t *pg = &pg_main;
1700  uword n_packets = 0;
1701  u32 worker_index = 0;
1702 
1703  if (vlib_num_workers ())
1704  worker_index = vlib_get_current_worker_index ();
1705 
1706  /* *INDENT-OFF* */
1707  clib_bitmap_foreach (i, pg->enabled_streams[worker_index], ({
1708  pg_stream_t *s = vec_elt_at_index (pg->streams, i);
1709  n_packets += pg_input_stream (node, pg, s);
1710  }));
1711  /* *INDENT-ON* */
1712 
1713  return n_packets;
1714 }
1715 
1716 /* *INDENT-OFF* */
1718  .function = pg_input,
1719  .name = "pg-input",
1720  .sibling_of = "device-input",
1721  .type = VLIB_NODE_TYPE_INPUT,
1722 
1723  .format_trace = format_pg_input_trace,
1724 
1725  /* Input node will be left disabled until a stream is active. */
1726  .state = VLIB_NODE_STATE_DISABLED,
1727 };
1728 /* *INDENT-ON* */
1729 
1730 /*
1731  * fd.io coding-style-patch-verification: ON
1732  *
1733  * Local Variables:
1734  * eval: (c-set-style "gnu")
1735  * End:
1736  */
vnet_config_main_t config_main
Definition: feature.h:82
u32 sw_if_index
Definition: pg.h:297
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
Definition: vec.h:439
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:124
Definition: edit.h:64
static uword pg_generate_packets(vlib_node_runtime_t *node, pg_main_t *pg, pg_stream_t *s, uword n_packets_to_generate)
Definition: input.c:1528
u32 next_index
Definition: pg.h:147
#define PG_EDIT_LO
Definition: edit.h:83
u64 n_packets_limit
Definition: pg.h:156
#define clib_min(x, y)
Definition: clib.h:295
#define clib_fifo_head(v)
Definition: fifo.h:254
#define CLIB_UNUSED(x)
Definition: clib.h:82
u8 * fixed_packet_data
Definition: pg.h:120
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
Definition: trace_funcs.h:156
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
Definition: counter.h:220
static uword clib_fifo_elts(void *v)
Definition: fifo.h:66
vnet_main_t * vnet_get_main(void)
Definition: misc.c:47
static void setbits_1(void *a0, u64 v0, u64 v_min, u64 v_max, u32 max_bits, u32 n_bits, u64 mask, u32 shift)
Definition: input.c:447
Definition: pg.h:313
static void pg_generate_fix_multi_buffer_lengths(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers)
Definition: input.c:912
vnet_interface_main_t interface_main
Definition: vnet.h:56
static u64 do_it(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 lo_bit, u32 hi_bit, u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
Definition: input.c:753
#define PREDICT_TRUE(x)
Definition: clib.h:112
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
Definition: buffer.h:110
unsigned long u64
Definition: types.h:89
pg_edit_group_t * edit_groups
Definition: pg.h:106
#define clib_memcpy_fast(a, b, c)
Definition: string.h:81
#define clib_fifo_advance_tail(f, n_elts)
Definition: fifo.h:161
static f64 vlib_time_now(vlib_main_t *vm)
Definition: main.h:255
#define clib_fifo_resize(f, n_elts)
Definition: fifo.h:101
static void * clib_random_buffer_get_data(clib_random_buffer_t *b, uword n_bytes)
Definition: random_buffer.h:78
void(* edit_function)(struct pg_main_t *pg, struct pg_stream_t *s, struct pg_edit_group_t *g, u32 *buffers, u32 n_buffers)
Definition: pg.h:73
pg_edit_type_t
Definition: edit.h:46
u32 thread_index
Definition: main.h:197
u16 current_length
Nbytes between current data and the end of this buffer.
Definition: buffer.h:113
static vlib_frame_t * vlib_get_frame(vlib_main_t *vm, vlib_frame_t *f)
Definition: node_funcs.h:216
u8 data[0]
Packet data.
Definition: buffer.h:181
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
Definition: vec.h:522
static_always_inline u64 do_set_increment(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 n_bits, u32 byte_offset, u32 is_net_byte_order, u32 want_sum, u64 *sum_result, u64 v_min, u64 v_max, u64 v)
Definition: input.c:217
static void pg_generate_set_lengths(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers)
Definition: input.c:860
int i
static u32 format_get_indent(u8 *s)
Definition: format.h:72
#define STRUCT_OFFSET_OF(t, f)
Definition: clib.h:65
static vnet_sw_interface_t * vnet_get_sw_interface(vnet_main_t *vnm, u32 sw_if_index)
u8 * format(u8 *s, const char *fmt,...)
Definition: format.c:424
u8 data[128]
Definition: ipsec.api:248
u64 last_increment_value
Definition: edit.h:87
static_always_inline int vnet_have_features(u8 arc, u32 sw_if_index)
Definition: feature.h:241
pg_buffer_index_t * buffer_indices
Definition: pg.h:166
u32 hw_if_index
Definition: pg.h:297
static_always_inline void init_buffers_inline(vlib_main_t *vm, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
Definition: input.c:1066
pg_edit_type_t packet_size_edit_type
Definition: pg.h:108
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
Definition: buffer_funcs.h:366
unsigned char u8
Definition: types.h:56
u8 * fixed_packet_data_mask
Definition: pg.h:120
static_always_inline void do_set_fixed(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 n_bits, u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
Definition: input.c:166
static void set_1(void *a0, u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
Definition: input.c:92
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
double f64
Definition: types.h:142
#define fm
static void vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
Definition: trace_funcs.h:114
static void pg_generate_edit(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers)
Definition: input.c:969
#define PG_EDIT_HI
Definition: edit.h:84
#define static_always_inline
Definition: clib.h:99
static void pg_set_next_buffer_pointers(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 *next_buffers, u32 n_buffers)
Definition: input.c:1020
i64 word
Definition: types.h:111
#define vlib_prefetch_buffer_with_index(vm, bi, type)
Prefetch buffer metadata by buffer index The first 64 bytes of buffer contains most header informatio...
Definition: buffer_funcs.h:440
static_always_inline u64 do_setbits_increment(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 max_bits, u32 n_bits, u32 byte_offset, u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
Definition: input.c:556
#define always_inline
Definition: clib.h:98
static_always_inline void do_setbits_fixed(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 max_bits, u32 n_bits, u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Definition: input.c:504
#define ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX
Definition: ethernet.h:52
vlib_combined_counter_main_t * combined_sw_if_counters
Definition: interface.h:824
u8 * format_white_space(u8 *s, va_list *va)
Definition: std-formats.c:113
static uword clib_fifo_free_elts(void *v)
Definition: fifo.h:82
pg_edit_type_t type
Definition: edit.h:66
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
u8 * format_hex_bytes(u8 *s, va_list *va)
Definition: std-formats.c:84
#define vlib_get_new_next_frame(vm, node, next_index, vectors, n_vectors_left)
Definition: node_funcs.h:343
static uword pg_input_stream(vlib_node_runtime_t *node, pg_main_t *pg, pg_stream_t *s)
Definition: input.c:1648
i32 lsb_bit_offset
Definition: edit.h:73
unsigned int u32
Definition: types.h:88
static u32 vlib_get_current_worker_index()
Definition: threads.h:384
#define vec_end(v)
End (last data address) of vector.
static int validate_buffer_data(vlib_buffer_t *b, pg_stream_t *s)
Definition: input.c:86
#define VLIB_FRAME_SIZE
Definition: node.h:376
static vlib_next_frame_t * vlib_node_runtime_get_next_frame(vlib_main_t *vm, vlib_node_runtime_t *n, u32 next_index)
Definition: node_funcs.h:264
u32 buffer_bytes
Definition: pg.h:124
f64 packet_accumulator
Definition: pg.h:164
u32 last_increment_packet_size
Definition: pg.h:127
#define clib_bitmap_foreach(i, ai, body)
Macro to iterate across set bits in a bitmap.
Definition: bitmap.h:361
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
Definition: pool.h:514
u32 pg_if_index
Definition: pg.h:130
format_function_t format_vnet_buffer
Definition: buffer.h:444
u32 current_replay_packet_index
Definition: pg.h:170
u32 ** replay_buffers_by_thread
Definition: pg.h:329
u8 * name
Definition: pg.h:97
unsigned short u16
Definition: types.h:57
static u64 pg_edit_get_value(pg_edit_t *e, int hi_or_lo)
Definition: edit.h:173
static void * vnet_get_config_data(vnet_config_main_t *cm, u32 *config_index, u32 *next_index, u32 n_data_bytes)
Definition: config.h:122
#define PREDICT_FALSE(x)
Definition: clib.h:111
format_function_t * format_buffer
Definition: node.h:356
static void pg_input_trace(pg_main_t *pg, vlib_node_runtime_t *node, u32 stream_index, u32 next_index, u32 *buffers, u32 n_buffers)
Definition: input.c:1452
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
Definition: node_funcs.h:338
uword ** enabled_streams
Definition: pg.h:319
u32 n_bits
Definition: edit.h:79
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
Definition: buffer_funcs.h:96
vlib_buffer_t buffer
Definition: input.c:1406
#define VLIB_REGISTER_NODE(x,...)
Definition: node.h:169
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
Definition: buffer_funcs.h:882
static_always_inline uword vlib_get_thread_index(void)
Definition: threads.h:212
#define CLIB_PREFETCH(addr, size, type)
Definition: cache.h:80
vlib_main_t * vm
Definition: buffer.c:312
static_always_inline void do_set_random(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 n_bits, u32 byte_offset, u32 is_net_byte_order, u32 want_sum, u64 *sum_result, u64 v_min, u64 v_max)
Definition: input.c:312
static u32 pg_stream_fill(pg_main_t *pg, pg_stream_t *s, u32 n_buffers)
Definition: input.c:1310
u32 min_packet_bytes
Definition: pg.h:111
u32 max_packet_bytes
Definition: pg.h:111
#define clib_warning(format, args...)
Definition: error.h:59
static void set_2(void *a0, void *a1, u64 v0, u64 v1, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order, u32 is_increment)
Definition: input.c:121
static uword max_pow2(uword x)
Definition: clib.h:226
static void * vlib_frame_scalar_args(vlib_frame_t *f)
Get pointer to frame scalar data.
Definition: node_funcs.h:258
#define pool_is_free_index(P, I)
Use free bitmap to query whether given index is free.
Definition: pool.h:283
u32 current_config_index
Used by feature subgraph arcs to visit enabled feature nodes.
Definition: buffer.h:147
#define ARRAY_LEN(x)
Definition: clib.h:62
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
Definition: main.c:458
pg_edit_t * non_fixed_edits
Definition: pg.h:115
u8 ** replay_packet_templates
Definition: pg.h:168
static uword clib_fifo_advance_head(void *v, uword n_elts)
Definition: fifo.h:169
static int validate_buffer_data2(vlib_buffer_t *b, pg_stream_t *s, u32 data_offset, u32 n_bytes)
Definition: input.c:56
pg_stream_t * streams
Definition: pg.h:316
#define ASSERT(truth)
u8 pre_data[VLIB_BUFFER_PRE_DATA_SIZE]
Space for inserting data before buffer start.
Definition: buffer.h:178
vlib_frame_t * frame
Definition: node.h:404
u16 flags
Definition: node.h:386
void pg_stream_enable_disable(pg_main_t *pg, pg_stream_t *s, int is_enable)
Definition: stream.c:49
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
Definition: buffer_funcs.h:386
#define clib_mem_unaligned(pointer, type)
Definition: types.h:155
Definition: pg.h:94
#define clib_max(x, y)
Definition: clib.h:288
static vlib_main_t * vlib_get_main(void)
Definition: global_funcs.h:23
#define clib_fifo_end(v)
Definition: fifo.h:63
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
Definition: trace_funcs.h:57
#define vec_elt(v, i)
Get vector value at index i.
u8 device_input_feature_arc_index
Feature arc index for device-input.
Definition: feature.h:112
static_always_inline void do_setbits_random(pg_main_t *pg, pg_stream_t *s, u32 *buffers, u32 n_buffers, u32 max_bits, u32 n_bits, u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Definition: input.c:635
u32 node_index
Definition: pg.h:141
Definition: defs.h:47
u32 sw_if_index[VLIB_N_RX_TX]
Definition: pg.h:138
#define clib_fifo_add1(f, e)
Definition: fifo.h:192
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
Definition: buffer_funcs.h:102
static u32 pg_stream_fill_replay(pg_main_t *pg, pg_stream_t *s, u32 n_alloc)
Definition: input.c:1192
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 next_buffer
Next buffer for this linked-list of buffers.
Definition: buffer.h:140
VLIB buffer representation.
Definition: buffer.h:102
u64 uword
Definition: types.h:112
u32 packet_length
Definition: input.c:1402
#define VLIB_BUFFER_MIN_CHAIN_SEG_SIZE
Definition: buffer.h:58
f64 rate_packets_per_second
Definition: pg.h:160
static u8 * format_pg_input_trace(u8 *s, va_list *va)
Definition: input.c:1410
#define vnet_buffer(b)
Definition: buffer.h:369
static u32 pg_stream_fill_helper(pg_main_t *pg, pg_stream_t *s, pg_buffer_index_t *bi, u32 *buffers, u32 *next_buffers, u32 n_alloc)
Definition: input.c:1147
static u32 vlib_num_workers()
Definition: threads.h:366
#define STRUCT_SIZE_OF(t, f)
Definition: clib.h:67
u64 n_packets_generated
Definition: pg.h:152
vlib_node_registration_t pg_input_node
(constructor) VLIB_REGISTER_NODE (pg_input_node)
Definition: input.c:1717
pg_main_t pg_main
Definition: init.c:44
static vlib_node_t * vlib_get_node(vlib_main_t *vm, u32 i)
Get vlib node by index.
Definition: node_funcs.h:59
#define vec_foreach(var, vec)
Vector iterator.
static void vlib_frame_no_append(vlib_frame_t *f)
Definition: node_funcs.h:224
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
Definition: trace_funcs.h:172
vnet_feature_config_main_t * feature_config_mains
feature config main objects
Definition: feature.h:100
u32 stream_index
Definition: input.c:1400
uword pg_input(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: input.c:1696
static u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
Definition: buffer_funcs.h:612
#define BITS(x)
Definition: clib.h:61
f64 time_last_generate
Definition: pg.h:162
vnet_feature_main_t feature_main
Definition: feature.c:19
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:85
clib_random_buffer_t random_buffer
Definition: main.h:191
static void setbits_2(void *a0, void *a1, u64 v0, u64 v1, u64 v_min, u64 v_max, u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
Definition: input.c:471
pg_interface_t * interfaces
Definition: pg.h:325
Definition: defs.h:46
u32 * buffer_fifo
Definition: pg.h:90