FD.io VPP  v21.10.1-2-g0a485f517
Vector Packet Processing
buffer_funcs.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /*
16  * buffer_funcs.h: VLIB buffer related functions/inlines
17  *
18  * Copyright (c) 2008 Eliot Dresselhaus
19  *
20  * Permission is hereby granted, free of charge, to any person obtaining
21  * a copy of this software and associated documentation files (the
22  * "Software"), to deal in the Software without restriction, including
23  * without limitation the rights to use, copy, modify, merge, publish,
24  * distribute, sublicense, and/or sell copies of the Software, and to
25  * permit persons to whom the Software is furnished to do so, subject to
26  * the following conditions:
27  *
28  * The above copyright notice and this permission notice shall be
29  * included in all copies or substantial portions of the Software.
30  *
31  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38  */
39 
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
42 
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
45 #include <vlib/buffer.h>
46 #include <vlib/physmem_funcs.h>
47 #include <vlib/main.h>
48 #include <vlib/node.h>
49 
50 /** \file
51  vlib buffer access methods.
52 */
53 
56  u32 *buffers, u16 *nexts,
57  uword count);
60  u32 count);
61 
63  vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
64  u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
65  int drop_on_congestion);
66 
69 
70 typedef struct
71 {
77 
79 
80 always_inline void
82 {
85 
86  /* reference count in allocated buffer always must be 1 or higher */
87  ASSERT (b->ref_count > 0);
88 
89  /* verify that buffer pool index is valid */
91  ASSERT (pointer_to_uword (b) >= bp->start);
92  ASSERT (pointer_to_uword (b) < bp->start + bp->size -
93  (bp->data_size + sizeof (vlib_buffer_t)));
94 }
95 
96 always_inline void *
97 vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
98  uword offset)
99 {
100  offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
101  return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
102 }
103 
104 /** \brief Translate buffer index into buffer pointer
105 
106  @param vm - (vlib_main_t *) vlib main data structure pointer
107  @param buffer_index - (u32) buffer index
108  @return - (vlib_buffer_t *) buffer pointer
109 */
112 {
114  vlib_buffer_t *b;
115 
116  b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
118  return b;
119 }
120 
123 {
125 }
126 
129 {
130  clib_memcpy_u32 (dst, src, n_indices);
131 }
132 
133 always_inline void
135  u32 ring_size, u32 n_buffers)
136 {
137  ASSERT (n_buffers <= ring_size);
138 
139  if (PREDICT_TRUE (start + n_buffers <= ring_size))
140  {
141  vlib_buffer_copy_indices (dst, ring + start, n_buffers);
142  }
143  else
144  {
145  u32 n = ring_size - start;
146  vlib_buffer_copy_indices (dst, ring + start, n);
147  vlib_buffer_copy_indices (dst + n, ring, n_buffers - n);
148  }
149 }
150 
151 always_inline void
153  u32 ring_size, u32 n_buffers)
154 {
155  ASSERT (n_buffers <= ring_size);
156 
157  if (PREDICT_TRUE (start + n_buffers <= ring_size))
158  {
159  vlib_buffer_copy_indices (ring + start, src, n_buffers);
160  }
161  else
162  {
163  u32 n = ring_size - start;
164  vlib_buffer_copy_indices (ring + start, src, n);
165  vlib_buffer_copy_indices (ring, src + n, n_buffers - n);
166  }
167 }
168 
169 STATIC_ASSERT_OFFSET_OF (vlib_buffer_t, template_end, 64);
172 {
173 #if defined CLIB_HAVE_VEC512
174  b->as_u8x64[0] = bt->as_u8x64[0];
175 #elif defined (CLIB_HAVE_VEC256)
176  b->as_u8x32[0] = bt->as_u8x32[0];
177  b->as_u8x32[1] = bt->as_u8x32[1];
178 #elif defined (CLIB_HAVE_VEC128)
179  b->as_u8x16[0] = bt->as_u8x16[0];
180  b->as_u8x16[1] = bt->as_u8x16[1];
181  b->as_u8x16[2] = bt->as_u8x16[2];
182  b->as_u8x16[3] = bt->as_u8x16[3];
183 #else
184  clib_memcpy_fast (b, bt, 64);
185 #endif
186 }
187 
190 {
191  ASSERT (numa_node < VLIB_BUFFER_MAX_NUMA_NODES);
193 }
194 
195 /** \brief Translate array of buffer indices into buffer pointers with offset
196 
197  @param vm - (vlib_main_t *) vlib main data structure pointer
198  @param bi - (u32 *) array of buffer indices
199  @param b - (void **) array to store buffer pointers
200  @param count - (uword) number of elements
201  @param offset - (i32) offset applied to each pointer
202 */
205  i32 offset)
206 {
207  uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
208 #ifdef CLIB_HAVE_VEC512
209  u64x8 of8 = u64x8_splat (buffer_mem_start + offset);
210  u64x4 off = u64x8_extract_lo (of8);
211  /* if count is not const, compiler will not unroll while loop
212  se we maintain two-in-parallel variant */
213  while (count >= 32)
214  {
215  u64x8 b0 = u64x8_from_u32x8 (u32x8_load_unaligned (bi));
216  u64x8 b1 = u64x8_from_u32x8 (u32x8_load_unaligned (bi + 8));
217  u64x8 b2 = u64x8_from_u32x8 (u32x8_load_unaligned (bi + 16));
218  u64x8 b3 = u64x8_from_u32x8 (u32x8_load_unaligned (bi + 24));
219  /* shift and add to get vlib_buffer_t pointer */
220  u64x8_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b);
221  u64x8_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b + 8);
222  u64x8_store_unaligned ((b2 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b + 16);
223  u64x8_store_unaligned ((b3 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b + 24);
224  b += 32;
225  bi += 32;
226  count -= 32;
227  }
228  while (count >= 8)
229  {
230  u64x8 b0 = u64x8_from_u32x8 (u32x8_load_unaligned (bi));
231  /* shift and add to get vlib_buffer_t pointer */
232  u64x8_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b);
233  b += 8;
234  bi += 8;
235  count -= 8;
236  }
237 #elif defined CLIB_HAVE_VEC256
238  u64x4 off = u64x4_splat (buffer_mem_start + offset);
239  /* if count is not const, compiler will not unroll while loop
240  se we maintain two-in-parallel variant */
241  while (count >= 32)
242  {
243  u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
244  u64x4 b1 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 4));
245  u64x4 b2 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 8));
246  u64x4 b3 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 12));
247  u64x4 b4 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 16));
248  u64x4 b5 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 20));
249  u64x4 b6 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 24));
250  u64x4 b7 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 28));
251  /* shift and add to get vlib_buffer_t pointer */
252  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
253  u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
254  u64x4_store_unaligned ((b2 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 8);
255  u64x4_store_unaligned ((b3 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 12);
256  u64x4_store_unaligned ((b4 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 16);
257  u64x4_store_unaligned ((b5 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 20);
258  u64x4_store_unaligned ((b6 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 24);
259  u64x4_store_unaligned ((b7 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 28);
260  b += 32;
261  bi += 32;
262  count -= 32;
263  }
264 #endif
265  while (count >= 4)
266  {
267 #ifdef CLIB_HAVE_VEC256
268  u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
269  /* shift and add to get vlib_buffer_t pointer */
270  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
271 #elif defined (CLIB_HAVE_VEC128)
272  u64x2 off = u64x2_splat (buffer_mem_start + offset);
273  u32x4 bi4 = u32x4_load_unaligned (bi);
274  u64x2 b0 = u64x2_from_u32x4 ((u32x4) bi4);
275 #if defined (__aarch64__)
276  u64x2 b1 = u64x2_from_u32x4_high ((u32x4) bi4);
277 #else
278  bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
279  u64x2 b1 = u64x2_from_u32x4 ((u32x4) bi4);
280 #endif
281  u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
282  u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
283 #else
284  b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
285  b[1] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[1], offset);
286  b[2] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[2], offset);
287  b[3] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[3], offset);
288 #endif
289  b += 4;
290  bi += 4;
291  count -= 4;
292  }
293  while (count)
294  {
295  b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
296  b += 1;
297  bi += 1;
298  count -= 1;
299  }
300 }
301 
302 /** \brief Translate array of buffer indices into buffer pointers
303 
304  @param vm - (vlib_main_t *) vlib main data structure pointer
305  @param bi - (u32 *) array of buffer indices
306  @param b - (vlib_buffer_t **) array to store buffer pointers
307  @param count - (uword) number of elements
308 */
309 
312 {
313  vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
314 }
315 
316 /** \brief Translate buffer pointer into buffer index
317 
318  @param vm - (vlib_main_t *) vlib main data structure pointer
319  @param p - (void *) buffer pointer
320  @return - (u32) buffer index
321 */
322 
325 {
329  ASSERT (offset < bm->buffer_mem_size);
330  ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
332 }
333 
334 /** \brief Translate array of buffer pointers into buffer indices with offset
335 
336  @param vm - (vlib_main_t *) vlib main data structure pointer
337  @param b - (void **) array of buffer pointers
338  @param bi - (u32 *) array to store buffer indices
339  @param count - (uword) number of elements
340  @param offset - (i32) offset applied to each pointer
341 */
345 {
346 #ifdef CLIB_HAVE_VEC256
347  u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
348  u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
349 
350  while (count >= 8)
351  {
352  /* load 4 pointers into 256-bit register */
353  u64x4 v0 = u64x4_load_unaligned (b);
354  u64x4 v1 = u64x4_load_unaligned (b + 4);
355  u32x8 v2, v3;
356 
357  v0 -= off4;
358  v1 -= off4;
359 
362 
363  /* permute 256-bit register so lower u32s of each buffer index are
364  * placed into lower 128-bits */
365  v2 = u32x8_permute ((u32x8) v0, mask);
366  v3 = u32x8_permute ((u32x8) v1, mask);
367 
368  /* extract lower 128-bits and save them to the array of buffer indices */
369  u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
370  u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
371  bi += 8;
372  b += 8;
373  count -= 8;
374  }
375 #endif
376  while (count >= 4)
377  {
378  /* equivalent non-nector implementation */
379  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
380  bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
381  bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
382  bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
383  bi += 4;
384  b += 4;
385  count -= 4;
386  }
387  while (count)
388  {
389  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
390  bi += 1;
391  b += 1;
392  count -= 1;
393  }
394 }
395 
396 /** \brief Translate array of buffer pointers into buffer indices
397 
398  @param vm - (vlib_main_t *) vlib main data structure pointer
399  @param b - (vlib_buffer_t **) array of buffer pointers
400  @param bi - (u32 *) array to store buffer indices
401  @param count - (uword) number of elements
402 */
405  uword count)
406 {
407  vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
408 }
409 
410 /** \brief Get next buffer in buffer linklist, or zero for end of list.
411 
412  @param vm - (vlib_main_t *) vlib main data structure pointer
413  @param b - (void *) buffer pointer
414  @return - (vlib_buffer_t *) next buffer, or NULL
415 */
418 {
419  return (b->flags & VLIB_BUFFER_NEXT_PRESENT
420  ? vlib_get_buffer (vm, b->next_buffer) : 0);
421 }
422 
424  vlib_buffer_t * b_first);
425 
426 /** \brief Get length in bytes of the buffer chain
427 
428  @param vm - (vlib_main_t *) vlib main data structure pointer
429  @param b - (void *) buffer pointer
430  @return - (uword) length of buffer chain
431 */
434 {
436 
437  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
438  return len;
439 
440  if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
442 
444 }
445 
446 /** \brief Get length in bytes of the buffer index buffer chain
447 
448  @param vm - (vlib_main_t *) vlib main data structure pointer
449  @param bi - (u32) buffer index
450  @return - (uword) length of buffer chain
451 */
454 {
456  return vlib_buffer_length_in_chain (vm, b);
457 }
458 
459 /** \brief Copy buffer contents to memory
460 
461  @param vm - (vlib_main_t *) vlib main data structure pointer
462  @param buffer_index - (u32) buffer index
463  @param contents - (u8 *) memory, <strong>must be large enough</strong>
464  @return - (uword) length of buffer chain
465 */
467 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
468 {
469  uword content_len = 0;
470  uword l;
471  vlib_buffer_t *b;
472 
473  while (1)
474  {
475  b = vlib_get_buffer (vm, buffer_index);
476  l = b->current_length;
477  clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
478  content_len += l;
479  if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
480  break;
481  buffer_index = b->next_buffer;
482  }
483 
484  return content_len;
485 }
486 
489 {
490  return vlib_physmem_get_pa (vm, b->data);
491 }
492 
495 {
496  return vlib_buffer_get_pa (vm, b) + b->current_data;
497 }
498 
499 /** \brief Prefetch buffer metadata by buffer index
500  The first 64 bytes of buffer contains most header information
501 
502  @param vm - (vlib_main_t *) vlib main data structure pointer
503  @param bi - (u32) buffer index
504  @param type - LOAD, STORE. In most cases, STORE is the right answer
505 */
506 /* Prefetch buffer header given index. */
507 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
508  do { \
509  vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
510  vlib_prefetch_buffer_header (_b, type); \
511  } while (0)
512 
513 typedef enum
514 {
515  /* Index is unknown. */
517 
518  /* Index is known and free/allocated. */
522 
526  expected_state);
527 
530 {
532 
534  uword *p = hash_get (bm->buffer_known_hash, buffer_index);
536  return p ? p[0] : VLIB_BUFFER_UNKNOWN;
537 }
538 
539 /* Validates sanity of a single buffer.
540  Returns format'ed vector with error message if any. */
541 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
542  uword follow_chain);
543 
545  u32 * buffers,
546  uword next_buffer_stride,
548  vlib_buffer_known_state_t known_state,
549  uword follow_buffer_next);
550 
552 vlib_get_buffer_pool (vlib_main_t * vm, u8 buffer_pool_index)
553 {
555  return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
556 }
557 
558 static_always_inline __clib_warn_unused_result uword
559 vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index, u32 * buffers,
560  u32 n_buffers)
561 {
562  vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
563  u32 len;
564 
565  ASSERT (bp->buffers);
566 
567  clib_spinlock_lock (&bp->lock);
568  len = bp->n_avail;
569  if (PREDICT_TRUE (n_buffers < len))
570  {
571  len -= n_buffers;
572  vlib_buffer_copy_indices (buffers, bp->buffers + len, n_buffers);
573  bp->n_avail = len;
574  clib_spinlock_unlock (&bp->lock);
575  return n_buffers;
576  }
577  else
578  {
579  vlib_buffer_copy_indices (buffers, bp->buffers, len);
580  bp->n_avail = 0;
581  clib_spinlock_unlock (&bp->lock);
582  return len;
583  }
584 }
585 
586 
587 /** \brief Allocate buffers from specific pool into supplied array
588 
589  @param vm - (vlib_main_t *) vlib main data structure pointer
590  @param buffers - (u32 * ) buffer index array
591  @param n_buffers - (u32) number of buffers requested
592  @return - (u32) number of buffers actually allocated, may be
593  less than the number requested or zero
594 */
595 
596 always_inline __clib_warn_unused_result u32
598  u8 buffer_pool_index)
599 {
601  vlib_buffer_pool_t *bp;
603  u32 *src, *dst, len, n_left;
604 
605  /* If buffer allocation fault injection is configured */
606  if (VLIB_BUFFER_ALLOC_FAULT_INJECTOR > 0)
607  {
608  u32 vlib_buffer_alloc_may_fail (vlib_main_t *, u32);
609 
610  /* See how many buffers we're willing to allocate */
611  n_buffers = vlib_buffer_alloc_may_fail (vm, n_buffers);
612  if (n_buffers == 0)
613  return (n_buffers);
614  }
615 
616  bp = vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
617  bpt = vec_elt_at_index (bp->threads, vm->thread_index);
618 
619  dst = buffers;
620  n_left = n_buffers;
621  len = bpt->n_cached;
622 
623  /* per-thread cache contains enough buffers */
624  if (len >= n_buffers)
625  {
626  src = bpt->cached_buffers + len - n_buffers;
628  bpt->n_cached -= n_buffers;
629  goto done;
630  }
631 
632  /* alloc bigger than cache - take buffers directly from main pool */
634  {
635  n_buffers = vlib_buffer_pool_get (vm, buffer_pool_index, buffers,
636  n_buffers);
637  goto done;
638  }
639 
640  /* take everything available in the cache */
641  if (len)
642  {
644  bpt->n_cached = 0;
645  dst += len;
646  n_left -= len;
647  }
648 
649  len = round_pow2 (n_left, 32);
650  len = vlib_buffer_pool_get (vm, buffer_pool_index, bpt->cached_buffers,
651  len);
652  bpt->n_cached = len;
653 
654  if (len)
655  {
656  u32 n_copy = clib_min (len, n_left);
657  src = bpt->cached_buffers + len - n_copy;
658  vlib_buffer_copy_indices (dst, src, n_copy);
659  bpt->n_cached -= n_copy;
660  n_left -= n_copy;
661  }
662 
663  n_buffers -= n_left;
664 
665 done:
666  /* Verify that buffers are known free. */
667  if (CLIB_DEBUG > 0)
670  if (PREDICT_FALSE (bm->alloc_callback_fn != 0))
671  bm->alloc_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
672  return n_buffers;
673 }
674 
675 /** \brief Allocate buffers from specific numa node into supplied array
676 
677  @param vm - (vlib_main_t *) vlib main data structure pointer
678  @param buffers - (u32 * ) buffer index array
679  @param n_buffers - (u32) number of buffers requested
680  @param numa_node - (u32) numa node
681  @return - (u32) number of buffers actually allocated, may be
682  less than the number requested or zero
683 */
684 always_inline __clib_warn_unused_result u32
686  u32 numa_node)
687 {
689  return vlib_buffer_alloc_from_pool (vm, buffers, n_buffers, index);
690 }
691 
692 /** \brief Allocate buffers into supplied array
693 
694  @param vm - (vlib_main_t *) vlib main data structure pointer
695  @param buffers - (u32 * ) buffer index array
696  @param n_buffers - (u32) number of buffers requested
697  @return - (u32) number of buffers actually allocated, may be
698  less than the number requested or zero
699 */
700 
701 always_inline __clib_warn_unused_result u32
703 {
704  return vlib_buffer_alloc_on_numa (vm, buffers, n_buffers, vm->numa_node);
705 }
706 
707 /** \brief Allocate buffers into ring
708 
709  @param vm - (vlib_main_t *) vlib main data structure pointer
710  @param buffers - (u32 * ) buffer index ring
711  @param start - (u32) first slot in the ring
712  @param ring_size - (u32) ring size
713  @param n_buffers - (u32) number of buffers requested
714  @return - (u32) number of buffers actually allocated, may be
715  less than the number requested or zero
716 */
717 always_inline __clib_warn_unused_result u32
719  u32 ring_size, u32 n_buffers)
720 {
721  u32 n_alloc;
722 
723  ASSERT (n_buffers <= ring_size);
724 
725  if (PREDICT_TRUE (start + n_buffers <= ring_size))
726  return vlib_buffer_alloc (vm, ring + start, n_buffers);
727 
728  n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
729 
730  if (PREDICT_TRUE (n_alloc == ring_size - start))
731  n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
732 
733  return n_alloc;
734 }
735 
736 /** \brief Allocate buffers into ring from specific buffer pool
737 
738  @param vm - (vlib_main_t *) vlib main data structure pointer
739  @param buffers - (u32 * ) buffer index ring
740  @param start - (u32) first slot in the ring
741  @param ring_size - (u32) ring size
742  @param n_buffers - (u32) number of buffers requested
743  @return - (u32) number of buffers actually allocated, may be
744  less than the number requested or zero
745 */
746 always_inline __clib_warn_unused_result u32
748  u32 ring_size, u32 n_buffers,
749  u8 buffer_pool_index)
750 {
751  u32 n_alloc;
752 
753  ASSERT (n_buffers <= ring_size);
754 
755  if (PREDICT_TRUE (start + n_buffers <= ring_size))
756  return vlib_buffer_alloc_from_pool (vm, ring + start, n_buffers,
757  buffer_pool_index);
758 
759  n_alloc = vlib_buffer_alloc_from_pool (vm, ring + start, ring_size - start,
760  buffer_pool_index);
761 
762  if (PREDICT_TRUE (n_alloc == ring_size - start))
763  n_alloc += vlib_buffer_alloc_from_pool (vm, ring, n_buffers - n_alloc,
764  buffer_pool_index);
765 
766  return n_alloc;
767 }
768 
770 vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
771  u32 * buffers, u32 n_buffers)
772 {
774  vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
776  vm->thread_index);
777  u32 n_cached, n_empty;
778 
779  if (CLIB_DEBUG > 0)
782  if (PREDICT_FALSE (bm->free_callback_fn != 0))
783  bm->free_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
784 
785  n_cached = bpt->n_cached;
786  n_empty = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ - n_cached;
787  if (n_buffers <= n_empty)
788  {
789  vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
790  buffers, n_buffers);
791  bpt->n_cached = n_cached + n_buffers;
792  return;
793  }
794 
795  vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
796  buffers + n_buffers - n_empty, n_empty);
798 
799  clib_spinlock_lock (&bp->lock);
800  vlib_buffer_copy_indices (bp->buffers + bp->n_avail, buffers,
801  n_buffers - n_empty);
802  bp->n_avail += n_buffers - n_empty;
803  clib_spinlock_unlock (&bp->lock);
804 }
805 
808  int maybe_next)
809 {
810  const int queue_size = 128;
811  vlib_buffer_pool_t *bp = 0;
812  u8 buffer_pool_index = ~0;
813  u32 n_queue = 0, queue[queue_size + 4];
814  vlib_buffer_t bt = { };
815 #if defined(CLIB_HAVE_VEC128)
816  vlib_buffer_t bpi_mask = {.buffer_pool_index = ~0 };
817  vlib_buffer_t bpi_vec = {};
818  vlib_buffer_t flags_refs_mask = {
819  .flags = VLIB_BUFFER_NEXT_PRESENT,
820  .ref_count = ~1
821  };
822 #endif
823 
824  if (PREDICT_FALSE (n_buffers == 0))
825  return;
826 
827  vlib_buffer_t *b = vlib_get_buffer (vm, buffers[0]);
828  buffer_pool_index = b->buffer_pool_index;
829  bp = vlib_get_buffer_pool (vm, buffer_pool_index);
831 #if defined(CLIB_HAVE_VEC128)
832  bpi_vec.buffer_pool_index = buffer_pool_index;
833 #endif
834 
835  while (n_buffers)
836  {
837  vlib_buffer_t *b[8];
838  u32 bi, sum = 0, flags, next;
839 
840  if (n_buffers < 4)
841  goto one_by_one;
842 
843  vlib_get_buffers (vm, buffers, b, 4);
844 
845  if (n_buffers >= 12)
846  {
847  vlib_get_buffers (vm, buffers + 8, b + 4, 4);
848  vlib_prefetch_buffer_header (b[4], LOAD);
849  vlib_prefetch_buffer_header (b[5], LOAD);
850  vlib_prefetch_buffer_header (b[6], LOAD);
851  vlib_prefetch_buffer_header (b[7], LOAD);
852  }
853 
854 #if defined(CLIB_HAVE_VEC128)
855  u8x16 p0, p1, p2, p3, r;
856  p0 = u8x16_load_unaligned (b[0]);
857  p1 = u8x16_load_unaligned (b[1]);
858  p2 = u8x16_load_unaligned (b[2]);
859  p3 = u8x16_load_unaligned (b[3]);
860 
861  r = p0 ^ bpi_vec.as_u8x16[0];
862  r |= p1 ^ bpi_vec.as_u8x16[0];
863  r |= p2 ^ bpi_vec.as_u8x16[0];
864  r |= p3 ^ bpi_vec.as_u8x16[0];
865  r &= bpi_mask.as_u8x16[0];
866  r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
867 
868  sum = !u8x16_is_all_zero (r);
869 #else
870  sum |= b[0]->flags;
871  sum |= b[1]->flags;
872  sum |= b[2]->flags;
873  sum |= b[3]->flags;
874  sum &= VLIB_BUFFER_NEXT_PRESENT;
875  sum += b[0]->ref_count - 1;
876  sum += b[1]->ref_count - 1;
877  sum += b[2]->ref_count - 1;
878  sum += b[3]->ref_count - 1;
879  sum |= b[0]->buffer_pool_index ^ buffer_pool_index;
880  sum |= b[1]->buffer_pool_index ^ buffer_pool_index;
881  sum |= b[2]->buffer_pool_index ^ buffer_pool_index;
882  sum |= b[3]->buffer_pool_index ^ buffer_pool_index;
883 #endif
884 
885  if (sum)
886  goto one_by_one;
887 
888  vlib_buffer_copy_indices (queue + n_queue, buffers, 4);
889  vlib_buffer_copy_template (b[0], &bt);
890  vlib_buffer_copy_template (b[1], &bt);
891  vlib_buffer_copy_template (b[2], &bt);
892  vlib_buffer_copy_template (b[3], &bt);
893  n_queue += 4;
894 
895  vlib_buffer_validate (vm, b[0]);
896  vlib_buffer_validate (vm, b[1]);
897  vlib_buffer_validate (vm, b[2]);
898  vlib_buffer_validate (vm, b[3]);
899 
904 
905  if (n_queue >= queue_size)
906  {
907  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
908  n_queue = 0;
909  }
910  buffers += 4;
911  n_buffers -= 4;
912  continue;
913 
914  one_by_one:
915  bi = buffers[0];
916 
917  next_in_chain:
918  b[0] = vlib_get_buffer (vm, bi);
919  flags = b[0]->flags;
920  next = b[0]->next_buffer;
921 
922  if (PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
923  {
924 
925  if (n_queue)
926  {
927  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
928  n_queue = 0;
929  }
930 
931  buffer_pool_index = b[0]->buffer_pool_index;
932 #if defined(CLIB_HAVE_VEC128)
933  bpi_vec.buffer_pool_index = buffer_pool_index;
934 #endif
935  bp = vlib_get_buffer_pool (vm, buffer_pool_index);
937  }
938 
939  vlib_buffer_validate (vm, b[0]);
940 
942 
943  if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
944  {
945  vlib_buffer_copy_template (b[0], &bt);
946  queue[n_queue++] = bi;
947  }
948 
949  if (n_queue == queue_size)
950  {
951  vlib_buffer_pool_put (vm, buffer_pool_index, queue, queue_size);
952  n_queue = 0;
953  }
954 
955  if (maybe_next && (flags & VLIB_BUFFER_NEXT_PRESENT))
956  {
957  bi = next;
958  goto next_in_chain;
959  }
960 
961  buffers++;
962  n_buffers--;
963  }
964 
965  if (n_queue)
966  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
967 }
968 
969 
970 /** \brief Free buffers
971  Frees the entire buffer chain for each buffer
972 
973  @param vm - (vlib_main_t *) vlib main data structure pointer
974  @param buffers - (u32 * ) buffer index array
975  @param n_buffers - (u32) number of buffers to free
976 
977 */
978 always_inline void
980  /* pointer to first buffer */
981  u32 * buffers,
982  /* number of buffers to free */
983  u32 n_buffers)
984 {
985  vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 1);
986 }
987 
988 /** \brief Free buffers, does not free the buffer chain for each buffer
989 
990  @param vm - (vlib_main_t *) vlib main data structure pointer
991  @param buffers - (u32 * ) buffer index array
992  @param n_buffers - (u32) number of buffers to free
993 
994 */
995 always_inline void
997  /* pointer to first buffer */
998  u32 * buffers,
999  /* number of buffers to free */
1000  u32 n_buffers)
1001 {
1002  vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 0);
1003 }
1004 
1005 /** \brief Free one buffer
1006  Shorthand to free a single buffer chain.
1007 
1008  @param vm - (vlib_main_t *) vlib main data structure pointer
1009  @param buffer_index - (u32) buffer index to free
1010 */
1011 always_inline void
1013 {
1014  vlib_buffer_free_inline (vm, &buffer_index, 1, /* maybe next */ 1);
1015 }
1016 
1017 /** \brief Free buffers from ring
1018 
1019  @param vm - (vlib_main_t *) vlib main data structure pointer
1020  @param buffers - (u32 * ) buffer index ring
1021  @param start - (u32) first slot in the ring
1022  @param ring_size - (u32) ring size
1023  @param n_buffers - (u32) number of buffers
1024 */
1025 always_inline void
1027  u32 ring_size, u32 n_buffers)
1028 {
1029  ASSERT (n_buffers <= ring_size);
1030 
1031  if (PREDICT_TRUE (start + n_buffers <= ring_size))
1032  {
1033  vlib_buffer_free (vm, ring + start, n_buffers);
1034  }
1035  else
1036  {
1037  vlib_buffer_free (vm, ring + start, ring_size - start);
1038  vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
1039  }
1040 }
1041 
1042 /** \brief Free buffers from ring without freeing tail buffers
1043 
1044  @param vm - (vlib_main_t *) vlib main data structure pointer
1045  @param buffers - (u32 * ) buffer index ring
1046  @param start - (u32) first slot in the ring
1047  @param ring_size - (u32) ring size
1048  @param n_buffers - (u32) number of buffers
1049 */
1050 always_inline void
1052  u32 ring_size, u32 n_buffers)
1053 {
1054  ASSERT (n_buffers <= ring_size);
1055 
1056  if (PREDICT_TRUE (start + n_buffers <= ring_size))
1057  {
1058  vlib_buffer_free_no_next (vm, ring + start, n_buffers);
1059  }
1060  else
1061  {
1062  vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
1063  vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
1064  }
1065 }
1066 
1067 /* Append given data to end of buffer, possibly allocating new buffers. */
1068 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
1069  u32 n_data_bytes);
1070 
1071 /* Define vlib_buffer and vnet_buffer flags bits preserved for copy/clone */
1072 #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \
1073  (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \
1074  VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL)
1075 
1076 /* duplicate all buffers in chain */
1079 {
1080  vlib_buffer_t *s, *d, *fd;
1081  uword n_alloc, n_buffers = 1;
1083  int i;
1084 
1085  s = b;
1086  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1087  {
1088  n_buffers++;
1089  s = vlib_get_buffer (vm, s->next_buffer);
1090  }
1091  u32 new_buffers[n_buffers];
1092 
1093  n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1094 
1095  /* No guarantee that we'll get all the buffers we asked for */
1096  if (PREDICT_FALSE (n_alloc < n_buffers))
1097  {
1098  if (n_alloc > 0)
1099  vlib_buffer_free (vm, new_buffers, n_alloc);
1100  return 0;
1101  }
1102 
1103  /* 1st segment */
1104  s = b;
1105  fd = d = vlib_get_buffer (vm, new_buffers[0]);
1106  d->current_data = s->current_data;
1108  d->flags = s->flags & flag_mask;
1109  d->trace_handle = s->trace_handle;
1112  clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1113  clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1116 
1117  /* next segments */
1118  for (i = 1; i < n_buffers; i++)
1119  {
1120  /* previous */
1121  d->next_buffer = new_buffers[i];
1122  /* current */
1123  s = vlib_get_buffer (vm, s->next_buffer);
1124  d = vlib_get_buffer (vm, new_buffers[i]);
1125  d->current_data = s->current_data;
1129  d->flags = s->flags & flag_mask;
1130  }
1131 
1132  return fd;
1133 }
1134 
1135 /* duplicate first buffer in chain */
1138 {
1139  vlib_buffer_t *d;
1140 
1141  if ((vlib_buffer_alloc (vm, di, 1)) != 1)
1142  return 0;
1143 
1144  d = vlib_get_buffer (vm, *di);
1145  /* 1st segment */
1146  d->current_data = b->current_data;
1148  clib_memcpy_fast (d->opaque, b->opaque, sizeof (b->opaque));
1149  clib_memcpy_fast (d->opaque2, b->opaque2, sizeof (b->opaque2));
1152 
1153  return d;
1154 }
1155 
1156 /* \brief Move packet from current position to offset position in buffer.
1157  Only work for small packet using one buffer with room to fit the move
1158  @param vm - (vlib_main_t *) vlib main data structure pointer
1159  @param b - (vlib_buffer_t *) pointer to buffer
1160  @param offset - (i16) position to move the packet in buffer
1161  */
1162 always_inline void
1164 {
1165  ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1169 
1170  u8 *source = vlib_buffer_get_current (b);
1171  b->current_data = offset;
1172  u8 *destination = vlib_buffer_get_current (b);
1174 
1175  if (source + length <= destination) /* no overlap */
1176  clib_memcpy_fast (destination, source, length);
1177  else
1178  memmove (destination, source, length);
1179 }
1180 
1181 /** \brief Create a maximum of 256 clones of buffer and store them
1182  in the supplied array
1183 
1184  @param vm - (vlib_main_t *) vlib main data structure pointer
1185  @param src_buffer - (u32) source buffer index
1186  @param buffers - (u32 * ) buffer index array
1187  @param n_buffers - (u16) number of buffer clones requested (<=256)
1188  @param head_end_offset - (u16) offset relative to current position
1189  where packet head ends
1190  @param offset - (i16) copy packet head at current position if 0,
1191  else at offset position to change headroom space as specified
1192  @return - (u16) number of buffers actually cloned, may be
1193  less than the number requested or zero
1194 */
1196 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1197  u16 n_buffers, u16 head_end_offset, i16 offset)
1198 {
1199  u16 i;
1200  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1201 
1202  ASSERT (s->ref_count == 1);
1203  ASSERT (n_buffers);
1204  ASSERT (n_buffers <= 256);
1206  ASSERT ((offset + head_end_offset) <
1208 
1209  if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
1210  {
1211  buffers[0] = src_buffer;
1212  if (offset)
1213  vlib_buffer_move (vm, s, offset);
1214 
1215  for (i = 1; i < n_buffers; i++)
1216  {
1217  vlib_buffer_t *d;
1218  d = vlib_buffer_copy (vm, s);
1219  if (d == 0)
1220  return i;
1221  buffers[i] = vlib_get_buffer_index (vm, d);
1222 
1223  }
1224  return n_buffers;
1225  }
1226 
1227  if (PREDICT_FALSE ((n_buffers == 1) && (offset == 0)))
1228  {
1229  buffers[0] = src_buffer;
1230  return 1;
1231  }
1232 
1234  s->buffer_pool_index);
1235 
1236  for (i = 0; i < n_buffers; i++)
1237  {
1238  vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
1239  if (offset)
1240  d->current_data = offset;
1241  else
1242  d->current_data = s->current_data;
1243 
1244  d->current_length = head_end_offset;
1246 
1248  head_end_offset;
1249  if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
1250  {
1253  }
1255  VLIB_BUFFER_NEXT_PRESENT;
1256  d->trace_handle = s->trace_handle;
1257  clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1258  clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1260  vlib_buffer_get_current (s), head_end_offset);
1261  d->next_buffer = src_buffer;
1262  }
1263  vlib_buffer_advance (s, head_end_offset);
1264  s->ref_count = n_buffers ? n_buffers : s->ref_count;
1265  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1266  {
1267  s = vlib_get_buffer (vm, s->next_buffer);
1268  s->ref_count = n_buffers ? n_buffers : s->ref_count;
1269  }
1270 
1271  return n_buffers;
1272 }
1273 
1274 /** \brief Create multiple clones of buffer and store them
1275  in the supplied array
1276 
1277  @param vm - (vlib_main_t *) vlib main data structure pointer
1278  @param src_buffer - (u32) source buffer index
1279  @param buffers - (u32 * ) buffer index array
1280  @param n_buffers - (u16) number of buffer clones requested (<=256)
1281  @param head_end_offset - (u16) offset relative to current position
1282  where packet head ends
1283  @param offset - (i16) copy packet head at current position if 0,
1284  else at offset position to change headroom space as specified
1285  @return - (u16) number of buffers actually cloned, may be
1286  less than the number requested or zero
1287 */
1290  u16 n_buffers, u16 head_end_offset, i16 offset)
1291 {
1292  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1293  u16 n_cloned = 0;
1294 
1295  while (n_buffers > 256)
1296  {
1297  vlib_buffer_t *copy;
1298  copy = vlib_buffer_copy (vm, s);
1299  n_cloned += vlib_buffer_clone_256 (vm,
1300  vlib_get_buffer_index (vm, copy),
1301  (buffers + n_cloned),
1302  256, head_end_offset, offset);
1303  n_buffers -= 256;
1304  }
1305  n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
1306  buffers + n_cloned,
1307  n_buffers, head_end_offset, offset);
1308 
1309  return n_cloned;
1310 }
1311 
1312 /** \brief Create multiple clones of buffer and store them
1313  in the supplied array
1314 
1315  @param vm - (vlib_main_t *) vlib main data structure pointer
1316  @param src_buffer - (u32) source buffer index
1317  @param buffers - (u32 * ) buffer index array
1318  @param n_buffers - (u16) number of buffer clones requested (<=256)
1319  @param head_end_offset - (u16) offset relative to current position
1320  where packet head ends
1321  @return - (u16) number of buffers actually cloned, may be
1322  less than the number requested or zero
1323 */
1325 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1326  u16 n_buffers, u16 head_end_offset)
1327 {
1328  return vlib_buffer_clone_at_offset (vm, src_buffer, buffers, n_buffers,
1329  head_end_offset, 0);
1330 }
1331 
1332 /** \brief Attach cloned tail to the buffer
1333 
1334  @param vm - (vlib_main_t *) vlib main data structure pointer
1335  @param head - (vlib_buffer_t *) head buffer
1336  @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
1337 */
1338 
1339 always_inline void
1341  vlib_buffer_t * tail)
1342 {
1343  ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1344  ASSERT (head->buffer_pool_index == tail->buffer_pool_index);
1345 
1346  head->flags |= VLIB_BUFFER_NEXT_PRESENT;
1347  head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1348  head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1349  head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1350  head->next_buffer = vlib_get_buffer_index (vm, tail);
1353 
1354 next_segment:
1355  clib_atomic_add_fetch (&tail->ref_count, 1);
1356 
1357  if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
1358  {
1359  tail = vlib_get_buffer (vm, tail->next_buffer);
1360  goto next_segment;
1361  }
1362 }
1363 
1364 /* Initializes the buffer as an empty packet with no chained buffers. */
1365 always_inline void
1367 {
1368  first->total_length_not_including_first_buffer = 0;
1369  first->current_length = 0;
1370  first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1371  first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1372 }
1373 
1374 /* The provided next_bi buffer index is appended to the end of the packet. */
1377 {
1378  vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
1379  last->next_buffer = next_bi;
1380  last->flags |= VLIB_BUFFER_NEXT_PRESENT;
1381  next_buffer->current_length = 0;
1382  next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1383  return next_buffer;
1384 }
1385 
1386 /* Increases or decreases the packet length.
1387  * It does not allocate or deallocate new buffers.
1388  * Therefore, the added length must be compatible
1389  * with the last buffer. */
1390 always_inline void
1393 {
1394  last->current_length += len;
1395  if (first != last)
1396  first->total_length_not_including_first_buffer += len;
1397 }
1398 
1399 /* Copy data to the end of the packet and increases its length.
1400  * It does not allocate new buffers.
1401  * Returns the number of copied bytes. */
1404  vlib_buffer_t * first,
1405  vlib_buffer_t * last, void *data, u16 data_len)
1406 {
1407  u32 n_buffer_bytes = vlib_buffer_get_default_data_size (vm);
1408  ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
1409  u16 len = clib_min (data_len,
1410  n_buffer_bytes - last->current_length -
1411  last->current_data);
1412  clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
1413  data, len);
1415  return len;
1416 }
1417 
1418 /* Copy data to the end of the packet and increases its length.
1419  * Allocates additional buffers from the free list if necessary.
1420  * Returns the number of copied bytes.
1421  * 'last' value is modified whenever new buffers are allocated and
1422  * chained and points to the last buffer in the chain. */
1423 u16
1425  vlib_buffer_t * first,
1426  vlib_buffer_t ** last, void *data,
1427  u16 data_len);
1429 
1432 
1433 typedef struct
1434 {
1435  /* Vector of packet data. */
1437 
1438  /* Number of buffers to allocate in each call to allocator. */
1440 
1443 
1446  void *packet_data,
1447  uword n_packet_data_bytes,
1448  uword min_n_buffers_each_alloc,
1449  char *fmt, ...);
1450 
1453  u32 * bi_result);
1454 
1455 always_inline void
1457 {
1458  vec_free (t->packet_data);
1459 }
1460 
1463 {
1466 }
1467 
1468 #define VLIB_BUFFER_LINEARIZE_MAX 64
1469 
1472 {
1473  vlib_buffer_t *dst_b;
1474  u32 n_buffers = 1, to_free = 0;
1475  u16 rem_len, dst_len, data_size, src_len = 0;
1476  u8 *dst, *src = 0;
1477 
1478  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1479  return 1;
1480 
1481  ASSERT (1 == b->ref_count);
1482  if (PREDICT_FALSE (1 != b->ref_count))
1483  return 0;
1484 
1485  data_size = vlib_buffer_get_default_data_size (vm);
1487 
1488  dst_b = b;
1489  dst = vlib_buffer_get_tail (dst_b);
1490  dst_len = vlib_buffer_space_left_at_end (vm, dst_b);
1491 
1493 
1494  while (rem_len > 0)
1495  {
1496  u16 copy_len;
1497 
1498  while (0 == src_len)
1499  {
1500  ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
1501  if (PREDICT_FALSE (!(b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1502  break; /* malformed chained buffer */
1503 
1506  src_len = b->current_length;
1507  }
1508 
1509  if (0 == dst_len)
1510  {
1511  ASSERT (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT);
1512  if (PREDICT_FALSE (!(dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1513  break; /* malformed chained buffer */
1514 
1515  vlib_buffer_t *next_dst_b = vlib_get_buffer (vm, dst_b->next_buffer);
1516 
1517  if (PREDICT_TRUE (1 == next_dst_b->ref_count))
1518  {
1519  /* normal case: buffer is not cloned, just use it */
1520  dst_b = next_dst_b;
1521  }
1522  else
1523  {
1524  /* cloned buffer, build a new dest chain from there */
1526  u32 bis[VLIB_BUFFER_LINEARIZE_MAX + 1];
1527  const int n = (rem_len + data_size - 1) / data_size;
1528  int n_alloc;
1529  int i;
1530 
1533  return 0;
1534 
1535  n_alloc = vlib_buffer_alloc (vm, bis, n);
1536  if (PREDICT_FALSE (n_alloc != n))
1537  {
1538  vlib_buffer_free (vm, bis, n_alloc);
1539  return 0;
1540  }
1541 
1542  vlib_get_buffers (vm, bis, bufs, n);
1543 
1544  for (i = 0; i < n - 1; i++)
1545  {
1546  bufs[i]->flags |= VLIB_BUFFER_NEXT_PRESENT;
1547  bufs[i]->next_buffer = bis[i + 1];
1548  }
1549 
1550  to_free = dst_b->next_buffer;
1551  dst_b->next_buffer = bis[0];
1552  dst_b = bufs[0];
1553  }
1554 
1555  n_buffers++;
1556 
1557  dst_b->current_data = clib_min (0, dst_b->current_data);
1558  dst_b->current_length = 0;
1559 
1560  dst = dst_b->data + dst_b->current_data;
1561  dst_len = data_size - dst_b->current_data;
1562  }
1563 
1564  copy_len = clib_min (src_len, dst_len);
1565 
1566  if (PREDICT_TRUE (src == dst))
1567  {
1568  /* nothing to do */
1569  }
1570  else if (src + copy_len > dst && dst + copy_len > src)
1571  {
1572  /* src and dst overlap */
1573  ASSERT (b == dst_b);
1574  memmove (dst, src, copy_len);
1575  }
1576  else
1577  {
1578  clib_memcpy_fast (dst, src, copy_len);
1579  }
1580 
1581  dst_b->current_length += copy_len;
1582 
1583  dst += copy_len;
1584  src += copy_len;
1585  dst_len -= copy_len;
1586  src_len -= copy_len;
1587  rem_len -= copy_len;
1588  }
1589 
1590  /* in case of a malformed chain buffer, we'll exit early from the loop. */
1591  ASSERT (0 == rem_len);
1593 
1594  if (to_free)
1595  vlib_buffer_free_one (vm, to_free);
1596 
1597  if (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)
1598  {
1599  /* the resulting chain is smaller than the original, cut it there */
1600  dst_b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1602  if (1 == n_buffers)
1603  {
1604  /* no longer a chained buffer */
1605  dst_b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1607  }
1608  }
1609 
1610  return n_buffers;
1611 }
1612 
1613 #endif /* included_vlib_buffer_funcs_h */
1614 
1615 /*
1616  * fd.io coding-style-patch-verification: ON
1617  *
1618  * Local Variables:
1619  * eval: (c-set-style "gnu")
1620  * End:
1621  */
vlib_buffer_main_t
Definition: buffer.h:479
vlib_buffer_pool_t::start
uword start
Definition: buffer.h:453
vlib_buffer_enqueue_to_single_next_fn_t
void() vlib_buffer_enqueue_to_single_next_fn_t(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u16 next_index, u32 count)
Definition: buffer_funcs.h:58
vlib_buffer_t::next_buffer
u32 next_buffer
Next buffer for this linked-list of buffers.
Definition: buffer.h:149
vlib_buffer_free
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
Definition: buffer_funcs.h:979
vlib_buffer_t::buffer_pool_index
u8 buffer_pool_index
index of buffer pool this buffer belongs.
Definition: buffer.h:142
vlib_packet_template_t::packet_data
u8 * packet_data
Definition: buffer_funcs.h:1436
vlib_buffer_clone_256
static u16 vlib_buffer_clone_256(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create a maximum of 256 clones of buffer and store them in the supplied array.
Definition: buffer_funcs.h:1196
bufs
vlib_buffer_t * bufs[VLIB_FRAME_SIZE]
Definition: nat44_ei_out2in.c:717
u32x8_permute
foreach_avx2_vec256i static foreach_avx2_vec256u u32x8 u32x8_permute(u32x8 v, u32x8 idx)
Definition: vector_avx2.h:73
vlib_packet_template_t
Definition: buffer_funcs.h:1433
vlib_buffer_func_main_t::buffer_enqueue_to_thread_fn
vlib_buffer_enqueue_to_thread_fn_t * buffer_enqueue_to_thread_fn
Definition: buffer_funcs.h:74
format_vlib_buffer
format_function_t format_vlib_buffer
Definition: buffer_funcs.h:1430
vlib_prefetch_buffer_header
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
Definition: buffer.h:231
vlib_buffer_get_current_pa
static uword vlib_buffer_get_current_pa(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:494
vlib_buffer_chain_append_data_with_alloc
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
Definition: buffer.c:453
n_buffers
u32 n_buffers
Definition: interface_output.c:421
vlib_buffer_chain_increase_length
static void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
Definition: buffer_funcs.h:1391
VLIB_BUFFER_TRACE_TRAJECTORY_INIT
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
Definition: buffer.h:192
vlib_buffer_alloc_to_ring_from_pool
static __clib_warn_unused_result u32 vlib_buffer_alloc_to_ring_from_pool(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers into ring from specific buffer pool.
Definition: buffer_funcs.h:747
STATIC_ASSERT_OFFSET_OF
STATIC_ASSERT_OFFSET_OF(vlib_buffer_t, template_end, 64)
vlib_buffer_copy
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:1078
vlib_buffer_move
static void vlib_buffer_move(vlib_main_t *vm, vlib_buffer_t *b, i16 offset)
Definition: buffer_funcs.h:1163
next_index
nat44_ei_hairpin_src_next_t next_index
Definition: nat44_ei_hairpinning.c:412
buffer.h
vlib_get_buffer
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:111
pointer_to_uword
static uword pointer_to_uword(const void *p)
Definition: types.h:131
vlib_buffer_length_in_chain_slow_path
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
Definition: buffer.c:78
vlib_buffer_alloc_to_ring
static __clib_warn_unused_result u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
Definition: buffer_funcs.h:718
vlib_buffer_pool_t::data_size
u32 data_size
Definition: buffer.h:459
clib_atomic_add_fetch
#define clib_atomic_add_fetch(a, b)
Definition: atomics.h:30
vlib_get_buffers_with_offset
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
Definition: buffer_funcs.h:204
next
u16 * next
Definition: nat44_ei_out2in.c:718
vlib_buffer_pool_t
Definition: buffer.h:450
node
vlib_main_t vlib_node_runtime_t * node
Definition: nat44_ei.c:3047
vlib_buffer_copy_indices_to_ring
static void vlib_buffer_copy_indices_to_ring(u32 *ring, u32 *src, u32 start, u32 ring_size, u32 n_buffers)
Definition: buffer_funcs.h:152
vlib_buffer_attach_clone
static void vlib_buffer_attach_clone(vlib_main_t *vm, vlib_buffer_t *head, vlib_buffer_t *tail)
Attach cloned tail to the buffer.
Definition: buffer_funcs.h:1340
u16
unsigned short u16
Definition: types.h:57
first
static heap_elt_t * first(heap_header_t *h)
Definition: heap.c:59
u64x2
epu8_epi32 epu16_epi32 u64x2
Definition: vector_sse42.h:444
vlib_main_t::buffer_main
vlib_buffer_main_t * buffer_main
Definition: main.h:167
vlib_buffer_pool_get
static_always_inline __clib_warn_unused_result uword vlib_buffer_pool_get(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
Definition: buffer_funcs.h:559
vm
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
Definition: nat44_ei.c:3047
vlib_buffer_alloc_on_numa
static __clib_warn_unused_result u32 vlib_buffer_alloc_on_numa(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u32 numa_node)
Allocate buffers from specific numa node into supplied array.
Definition: buffer_funcs.h:685
fifo.h
vlib_buffer_t::trace_handle
u32 trace_handle
Specifies trace buffer handle if VLIB_PACKET_IS_TRACED flag is set.
Definition: buffer.h:172
format_vlib_buffer_contents
format_function_t format_vlib_buffer_contents
Definition: buffer_funcs.h:1431
vlib_buffer_get_pa
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:488
clib_memcpy_u32
static_always_inline void clib_memcpy_u32(u32 *dst, u32 *src, u32 n_left)
Definition: memcpy.h:43
r
vnet_hw_if_output_node_runtime_t * r
Definition: interface_output.c:1089
vlib_buffer_is_known
static vlib_buffer_known_state_t vlib_buffer_is_known(vlib_main_t *vm, u32 buffer_index)
Definition: buffer_funcs.h:529
vlib_get_buffer_pool
static_always_inline vlib_buffer_pool_t * vlib_get_buffer_pool(vlib_main_t *vm, u8 buffer_pool_index)
Definition: buffer_funcs.h:552
vlib_buffer_length_in_chain
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
Definition: buffer_funcs.h:433
clib_memcpy_fast
static_always_inline void * clib_memcpy_fast(void *restrict dst, const void *restrict src, size_t n)
Definition: string.h:92
u32x4_shuffle
static_always_inline u32x4 u32x4_shuffle(u32x4 v, const int a, const int b, const int c, const int d)
Definition: vector_sse42.h:421
vlib_buffer_add_data
int vlib_buffer_add_data(vlib_main_t *vm, u32 *buffer_index, void *data, u32 n_data_bytes)
Definition: buffer.c:398
vlib_buffer_func_main_t::buffer_enqueue_to_next_fn
vlib_buffer_enqueue_to_next_fn_t * buffer_enqueue_to_next_fn
Definition: buffer_funcs.h:72
i32
signed int i32
Definition: types.h:77
vlib_get_buffer_indices
static_always_inline void vlib_get_buffer_indices(vlib_main_t *vm, vlib_buffer_t **b, u32 *bi, uword count)
Translate array of buffer pointers into buffer indices.
Definition: buffer_funcs.h:404
format_vlib_buffer_no_chain
format_function_t format_vlib_buffer_no_chain
Definition: buffer_funcs.h:1431
vlib_buffer_func_main_t::buffer_enqueue_to_single_next_fn
vlib_buffer_enqueue_to_single_next_fn_t * buffer_enqueue_to_single_next_fn
Definition: buffer_funcs.h:73
u8x16
u8x16
Definition: vector_sse42.h:157
round_pow2
static uword round_pow2(uword x, uword pow2)
Definition: clib.h:279
i16
signed short i16
Definition: types.h:46
vlib_buffer_t::current_data
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
Definition: buffer.h:119
vlib_buffer_pool_t::threads
vlib_buffer_pool_thread_t * threads
Definition: buffer.h:467
vlib_buffer_t::opaque2
u32 opaque2[14]
Definition: buffer.h:179
vlib_buffer_free_from_ring
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
Definition: buffer_funcs.h:1026
hash.h
vlib_buffer_pool_t::size
uword size
Definition: buffer.h:454
vlib_buffer_advance
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
Definition: buffer.h:276
count
u8 count
Definition: dhcp.api:208
vlib_physmem_get_pa
static u64 vlib_physmem_get_pa(vlib_main_t *vm, void *mem)
Definition: physmem_funcs.h:103
len
u8 len
Definition: ip_types.api:103
vlib_buffer_contents
static uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
Definition: buffer_funcs.h:467
vlib_validate_buffer
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
Definition: buffer.c:254
u64x2_from_u32x4
static_always_inline u64x2 u64x2_from_u32x4(u32x4 v)
Definition: vector_neon.h:145
vlib_packet_template_t::min_n_buffers_each_alloc
u32 min_n_buffers_each_alloc
Definition: buffer_funcs.h:1439
vlib_buffer_pool_t::buffers
u32 * buffers
Definition: buffer.h:462
physmem_funcs.h
vlib_buffer_alloc
static __clib_warn_unused_result u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
Definition: buffer_funcs.h:702
vlib_buffer_validate
static void vlib_buffer_validate(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:81
vec_elt_at_index
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
Definition: vec_bootstrap.h:203
offset
struct clib_bihash_value offset
template key/value backing page structure
vlib_validate_buffers
u8 * vlib_validate_buffers(vlib_main_t *vm, u32 *buffers, uword next_buffer_stride, uword n_buffers, vlib_buffer_known_state_t known_state, uword follow_buffer_next)
Definition: buffer.c:261
vlib_buffer_pool_t::buffer_template
vlib_buffer_t buffer_template
Definition: buffer.h:470
PREDICT_FALSE
#define PREDICT_FALSE(x)
Definition: clib.h:124
hash_get
#define hash_get(h, key)
Definition: hash.h:249
vlib_get_buffer_index
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
Definition: buffer_funcs.h:324
VLIB_BUFFER_MAX_NUMA_NODES
#define VLIB_BUFFER_MAX_NUMA_NODES
Definition: buffer.h:473
clib_spinlock_lock
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
Definition: lock.h:82
static_always_inline
#define static_always_inline
Definition: clib.h:112
uword
u64 uword
Definition: types.h:112
last
static heap_elt_t * last(heap_header_t *h)
Definition: heap.c:53
VLIB_BUFFER_KNOWN_FREE
@ VLIB_BUFFER_KNOWN_FREE
Definition: buffer_funcs.h:519
vlib_buffer_main_t::default_buffer_pool_index_for_numa
u8 default_buffer_pool_index_for_numa[VLIB_BUFFER_MAX_NUMA_NODES]
Definition: buffer.h:491
vlib_main_t::thread_index
u32 thread_index
Definition: main.h:215
format_vlib_buffer_and_data
format_function_t format_vlib_buffer_and_data
Definition: buffer_funcs.h:1430
vlib_buffer_copy_template
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
Definition: buffer_funcs.h:171
vlib_buffer_clone
static u16 vlib_buffer_clone(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create multiple clones of buffer and store them in the supplied array.
Definition: buffer_funcs.h:1325
mask
vl_api_pnat_mask_t mask
Definition: pnat.api:45
vlib_buffer_chain_validate
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
vlib_buffer_t::ref_count
volatile u8 ref_count
Reference count for this buffer.
Definition: buffer.h:139
src
vl_api_address_t src
Definition: gre.api:54
vlib_buffer_main_t::buffer_known_hash_lockp
clib_spinlock_t buffer_known_hash_lockp
Definition: buffer.h:504
vlib_buffer_clone_at_offset
static u16 vlib_buffer_clone_at_offset(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create multiple clones of buffer and store them in the supplied array.
Definition: buffer_funcs.h:1289
vlib_buffer_func_main_t
Definition: buffer_funcs.h:70
vlib_buffer_chain_append_data
static u16 vlib_buffer_chain_append_data(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
Definition: buffer_funcs.h:1403
clib_min
#define clib_min(x, y)
Definition: clib.h:342
vlib_buffer_chain_linearize
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:1471
vlib_get_buffer_indices_with_offset
static_always_inline void vlib_get_buffer_indices_with_offset(vlib_main_t *vm, void **b, u32 *bi, uword count, i32 offset)
Translate array of buffer pointers into buffer indices with offset.
Definition: buffer_funcs.h:343
CLIB_CACHE_LINE_BYTES
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:58
vlib_buffer_copy_no_chain
static vlib_buffer_t * vlib_buffer_copy_no_chain(vlib_main_t *vm, vlib_buffer_t *b, u32 *di)
Definition: buffer_funcs.h:1137
vlib_get_next_buffer
static vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
Definition: buffer_funcs.h:417
fmt
int cJSON_bool fmt
Definition: cJSON.h:160
vlib_buffer_main_t::buffer_mem_start
uword buffer_mem_start
Definition: buffer.h:484
vlib_buffer_t::current_length
u16 current_length
Nbytes between current data and the end of this buffer.
Definition: buffer.h:122
vlib_buffer_space_left_at_end
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:1462
clib_atomic_sub_fetch
#define clib_atomic_sub_fetch(a, b)
Definition: atomics.h:31
vlib_buffer_chain_init
static void vlib_buffer_chain_init(vlib_buffer_t *first)
Definition: buffer_funcs.h:1366
vlib_buffer_free_inline
static_always_inline void vlib_buffer_free_inline(vlib_main_t *vm, u32 *buffers, u32 n_buffers, int maybe_next)
Definition: buffer_funcs.h:807
vlib_buffer_pool_thread_t::cached_buffers
u32 cached_buffers[VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ]
Definition: buffer.h:446
data
u8 data[128]
Definition: ipsec_types.api:95
vlib_main_t::numa_node
u32 numa_node
Definition: main.h:217
format_function_t
u8 *() format_function_t(u8 *s, va_list *args)
Definition: format.h:48
vlib_buffer_pool_put
static_always_inline void vlib_buffer_pool_put(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
Definition: buffer_funcs.h:770
vec_free
#define vec_free(V)
Free vector's memory (no header).
Definition: vec.h:395
index
u32 index
Definition: flow_types.api:221
always_inline
#define always_inline
Definition: rdma_mlx5dv.h:23
clib_bihash_value
template key/value backing page structure
Definition: bihash_doc.h:44
vlib_buffer_main_t::buffer_pools
vlib_buffer_pool_t * buffer_pools
Definition: buffer.h:486
vlib_buffer_enqueue_to_thread_fn_t
u32() vlib_buffer_enqueue_to_thread_fn_t(vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index, u32 *buffer_indices, u16 *thread_indices, u32 n_packets, int drop_on_congestion)
Definition: buffer_funcs.h:62
ASSERT
#define ASSERT(truth)
Definition: error_bootstrap.h:69
vlib_buffer_index_length_in_chain
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
Definition: buffer_funcs.h:453
data_len
u8 data_len
Definition: ikev2_types.api:24
off
u32 off
Definition: interface_output.c:1096
vlib_buffer_get_tail
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer's data.
Definition: buffer.h:338
VLIB_BUFFER_UNKNOWN
@ VLIB_BUFFER_UNKNOWN
Definition: buffer_funcs.h:516
VLIB_BUFFER_PRE_DATA_SIZE
#define VLIB_BUFFER_PRE_DATA_SIZE
Definition: buffer.h:51
vlib_buffer_get_default_data_size
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
Definition: buffer_funcs.h:122
u32
unsigned int u32
Definition: types.h:88
vlib_buffer_pool_thread_t::n_cached
u32 n_cached
Definition: buffer.h:447
vlib_buffer_main_t::alloc_callback_fn
vlib_buffer_alloc_free_callback_t * alloc_callback_fn
Definition: buffer.h:488
vlib_buffer_enqueue_to_next_fn_t
void() vlib_buffer_enqueue_to_next_fn_t(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts, uword count)
Definition: buffer_funcs.h:54
dst
vl_api_ip4_address_t dst
Definition: pnat.api:41
vlib_buffer_main_t::buffer_known_hash
uword * buffer_known_hash
Definition: buffer.h:503
vlib_buffer_free_no_next
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
Definition: buffer_funcs.h:996
CLIB_LOG2_CACHE_LINE_BYTES
#define CLIB_LOG2_CACHE_LINE_BYTES
Definition: cache.h:45
main.h
vlib_packet_template_t::name
u8 * name
Definition: buffer_funcs.h:1441
vlib_buffer_copy_indices
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
Definition: buffer_funcs.h:128
n_left
u32 n_left
Definition: interface_output.c:1096
u32x4
unsigned long long u32x4
Definition: ixge.c:28
vlib_buffer_chain_buffer
static vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *last, u32 next_bi)
Definition: buffer_funcs.h:1376
clib_spinlock_unlock
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
Definition: lock.h:121
vlib_buffer_pool_t::n_avail
u32 n_avail
Definition: buffer.h:461
vlib_buffer_validate_alloc_free
void vlib_buffer_validate_alloc_free(vlib_main_t *vm, u32 *buffers, uword n_buffers, vlib_buffer_known_state_t expected_state)
Definition: buffer.c:315
vlib_buffer_copy_indices_from_ring
static void vlib_buffer_copy_indices_from_ring(u32 *dst, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Definition: buffer_funcs.h:134
length
char const int length
Definition: cJSON.h:163
vlib_buffer_pool_get_default_for_numa
static u8 vlib_buffer_pool_get_default_for_numa(vlib_main_t *vm, u32 numa_node)
Definition: buffer_funcs.h:189
vlib_buffer_func_main
vlib_buffer_func_main_t vlib_buffer_func_main
Definition: buffer_funcs.c:419
vlib_main_t
Definition: main.h:102
vlib_buffer_known_state_t
vlib_buffer_known_state_t
Definition: buffer_funcs.h:513
VLIB_BUFFER_LINEARIZE_MAX
#define VLIB_BUFFER_LINEARIZE_MAX
Definition: buffer_funcs.h:1468
vlib_buffer_ptr_from_index
static void * vlib_buffer_ptr_from_index(uword buffer_mem_start, u32 buffer_index, uword offset)
Definition: buffer_funcs.h:97
b
vlib_buffer_t ** b
Definition: nat44_ei_out2in.c:717
u8
unsigned char u8
Definition: types.h:56
vlib_buffer_get_current
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
Definition: buffer.h:257
vlib_buffer_t::data
u8 data[]
Packet data.
Definition: buffer.h:204
uword_to_pointer
#define uword_to_pointer(u, type)
Definition: types.h:136
i
int i
Definition: flowhash_template.h:376
vlib_packet_template_free
static void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
Definition: buffer_funcs.h:1456
u64x8
__m512i u64x8
Definition: vector_avx512.h:306
VLIB_BUFFER_COPY_CLONE_FLAGS_MASK
#define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK
Definition: buffer_funcs.h:1072
u64x2_from_u32x4_high
static_always_inline u64x2 u64x2_from_u32x4_high(u32x4 v)
Definition: vector_neon.h:151
nexts
u16 nexts[VLIB_FRAME_SIZE]
Definition: nat44_ei_out2in.c:718
vlib_buffer_pool_t::lock
clib_spinlock_t lock
Definition: buffer.h:464
vlib_buffer_pool_thread_t
Definition: buffer.h:443
VLIB_BUFFER_KNOWN_ALLOCATED
@ VLIB_BUFFER_KNOWN_ALLOCATED
Definition: buffer_funcs.h:520
vlib_buffer_free_one
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
Definition: buffer_funcs.h:1012
vlib_frame_queue_dequeue_fn_t
u32() vlib_frame_queue_dequeue_fn_t(vlib_main_t *vm, vlib_frame_queue_main_t *fqm)
Definition: buffer_funcs.h:67
vlib_get_buffers
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
Definition: buffer_funcs.h:311
vlib_buffer_func_main_t::frame_queue_dequeue_fn
vlib_frame_queue_dequeue_fn_t * frame_queue_dequeue_fn
Definition: buffer_funcs.h:75
vlib_packet_template_init
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_alloc, char *fmt,...)
Definition: buffer.c:355
vlib_node_runtime_t
Definition: node.h:454
PREDICT_TRUE
#define PREDICT_TRUE(x)
Definition: clib.h:125
vlib_buffer_t::total_length_not_including_first_buffer
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
Definition: buffer.h:176
vlib_packet_template_get_packet
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
Definition: buffer.c:377
di
void di(unformat_input_t *i)
Definition: unformat.c:163
vlib_buffer_alloc_from_pool
static __clib_warn_unused_result u32 vlib_buffer_alloc_from_pool(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers from specific pool into supplied array.
Definition: buffer_funcs.h:597
vlib_frame_queue_main_t
Definition: threads.h:133
node.h
vlib_buffer_main_t::free_callback_fn
vlib_buffer_alloc_free_callback_t * free_callback_fn
Definition: buffer.h:489
vlib_buffer_t::opaque
u32 opaque[10]
Opaque data used by sub-graphs for their own purposes.
Definition: buffer.h:162
VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ
#define VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ
Definition: buffer.h:441
u64x4
u64x4
Definition: vector_avx2.h:142
vlib_buffer_t::flags
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index,...
Definition: buffer.h:133
vlib_buffer_t
VLIB buffer representation.
Definition: buffer.h:111
vlib_buffer_main_t::default_data_size
u32 default_data_size
Definition: buffer.h:496
vlib_buffer_free_from_ring_no_next
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
Definition: buffer_funcs.h:1051
flags
vl_api_wireguard_peer_flags_t flags
Definition: wireguard.api:105