FD.io VPP  v20.01-48-g3e0dafb74
Vector Packet Processing
buffer_funcs.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /*
16  * buffer_funcs.h: VLIB buffer related functions/inlines
17  *
18  * Copyright (c) 2008 Eliot Dresselhaus
19  *
20  * Permission is hereby granted, free of charge, to any person obtaining
21  * a copy of this software and associated documentation files (the
22  * "Software"), to deal in the Software without restriction, including
23  * without limitation the rights to use, copy, modify, merge, publish,
24  * distribute, sublicense, and/or sell copies of the Software, and to
25  * permit persons to whom the Software is furnished to do so, subject to
26  * the following conditions:
27  *
28  * The above copyright notice and this permission notice shall be
29  * included in all copies or substantial portions of the Software.
30  *
31  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38  */
39 
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
42 
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
45 #include <vlib/buffer.h>
46 #include <vlib/physmem_funcs.h>
47 #include <vlib/main.h>
48 #include <vlib/node.h>
49 
50 /** \file
51  vlib buffer access methods.
52 */
53 
54 always_inline void
56 {
59 
60  /* reference count in allocated buffer always must be 1 or higher */
61  ASSERT (b->ref_count > 0);
62 
63  /* verify that buffer pool index is valid */
65  ASSERT (pointer_to_uword (b) >= bp->start);
66  ASSERT (pointer_to_uword (b) < bp->start + bp->size -
67  (bp->data_size + sizeof (vlib_buffer_t)));
68 }
69 
70 always_inline void *
71 vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
72  uword offset)
73 {
74  offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
75  return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
76 }
77 
78 /** \brief Translate buffer index into buffer pointer
79 
80  @param vm - (vlib_main_t *) vlib main data structure pointer
81  @param buffer_index - (u32) buffer index
82  @return - (vlib_buffer_t *) buffer pointer
83 */
85 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
86 {
88  vlib_buffer_t *b;
89 
90  b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
91  vlib_buffer_validate (vm, b);
92  return b;
93 }
94 
97 {
98  return vm->buffer_main->default_data_size;
99 }
100 
103 {
104 #if defined(CLIB_HAVE_VEC512)
105  while (n_indices >= 16)
106  {
107  u32x16_store_unaligned (u32x16_load_unaligned (src), dst);
108  dst += 16;
109  src += 16;
110  n_indices -= 16;
111  }
112 #endif
113 
114 #if defined(CLIB_HAVE_VEC256)
115  while (n_indices >= 8)
116  {
117  u32x8_store_unaligned (u32x8_load_unaligned (src), dst);
118  dst += 8;
119  src += 8;
120  n_indices -= 8;
121  }
122 #endif
123 
124 #if defined(CLIB_HAVE_VEC128)
125  while (n_indices >= 4)
126  {
127  u32x4_store_unaligned (u32x4_load_unaligned (src), dst);
128  dst += 4;
129  src += 4;
130  n_indices -= 4;
131  }
132 #endif
133 
134  while (n_indices)
135  {
136  dst[0] = src[0];
137  dst += 1;
138  src += 1;
139  n_indices -= 1;
140  }
141 }
142 
143 STATIC_ASSERT_OFFSET_OF (vlib_buffer_t, template_end, 64);
146 {
147 #if defined CLIB_HAVE_VEC512
148  b->as_u8x64[0] = bt->as_u8x64[0];
149 #elif defined (CLIB_HAVE_VEC256)
150  b->as_u8x32[0] = bt->as_u8x32[0];
151  b->as_u8x32[1] = bt->as_u8x32[1];
152 #elif defined (CLIB_HAVE_VEC128)
153  b->as_u8x16[0] = bt->as_u8x16[0];
154  b->as_u8x16[1] = bt->as_u8x16[1];
155  b->as_u8x16[2] = bt->as_u8x16[2];
156  b->as_u8x16[3] = bt->as_u8x16[3];
157 #else
158  clib_memcpy_fast (b, bt, 64);
159 #endif
160 }
161 
164 {
165  ASSERT (numa_node < VLIB_BUFFER_MAX_NUMA_NODES);
166  return vm->buffer_main->default_buffer_pool_index_for_numa[numa_node];
167 }
168 
169 /** \brief Translate array of buffer indices into buffer pointers with offset
170 
171  @param vm - (vlib_main_t *) vlib main data structure pointer
172  @param bi - (u32 *) array of buffer indices
173  @param b - (void **) array to store buffer pointers
174  @param count - (uword) number of elements
175  @param offset - (i32) offset applied to each pointer
176 */
179  i32 offset)
180 {
181  uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
182 #ifdef CLIB_HAVE_VEC256
183  u64x4 off = u64x4_splat (buffer_mem_start + offset);
184  /* if count is not const, compiler will not unroll while loop
185  se we maintain two-in-parallel variant */
186  while (count >= 8)
187  {
188  u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
189  u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
190  /* shift and add to get vlib_buffer_t pointer */
191  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
192  u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
193  b += 8;
194  bi += 8;
195  count -= 8;
196  }
197 #endif
198  while (count >= 4)
199  {
200 #ifdef CLIB_HAVE_VEC256
201  u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
202  /* shift and add to get vlib_buffer_t pointer */
203  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
204 #elif defined (CLIB_HAVE_VEC128)
205  u64x2 off = u64x2_splat (buffer_mem_start + offset);
206  u32x4 bi4 = u32x4_load_unaligned (bi);
207  u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
208 #if defined (__aarch64__)
210 #else
211  bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
212  u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
213 #endif
214  u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
215  u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
216 #else
217  b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
218  b[1] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[1], offset);
219  b[2] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[2], offset);
220  b[3] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[3], offset);
221 #endif
222  b += 4;
223  bi += 4;
224  count -= 4;
225  }
226  while (count)
227  {
228  b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
229  b += 1;
230  bi += 1;
231  count -= 1;
232  }
233 }
234 
235 /** \brief Translate array of buffer indices into buffer pointers
236 
237  @param vm - (vlib_main_t *) vlib main data structure pointer
238  @param bi - (u32 *) array of buffer indices
239  @param b - (vlib_buffer_t **) array to store buffer pointers
240  @param count - (uword) number of elements
241 */
242 
245 {
246  vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
247 }
248 
249 /** \brief Translate buffer pointer into buffer index
250 
251  @param vm - (vlib_main_t *) vlib main data structure pointer
252  @param p - (void *) buffer pointer
253  @return - (u32) buffer index
254 */
255 
258 {
262  ASSERT (offset < bm->buffer_mem_size);
263  ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
264  return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
265 }
266 
267 /** \brief Translate array of buffer pointers into buffer indices with offset
268 
269  @param vm - (vlib_main_t *) vlib main data structure pointer
270  @param b - (void **) array of buffer pointers
271  @param bi - (u32 *) array to store buffer indices
272  @param count - (uword) number of elements
273  @param offset - (i32) offset applied to each pointer
274 */
278 {
279 #ifdef CLIB_HAVE_VEC256
280  u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
281  u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
282 
283  while (count >= 8)
284  {
285  /* load 4 pointers into 256-bit register */
286  u64x4 v0 = u64x4_load_unaligned (b);
287  u64x4 v1 = u64x4_load_unaligned (b + 4);
288  u32x8 v2, v3;
289 
290  v0 -= off4;
291  v1 -= off4;
292 
295 
296  /* permute 256-bit register so lower u32s of each buffer index are
297  * placed into lower 128-bits */
298  v2 = u32x8_permute ((u32x8) v0, mask);
299  v3 = u32x8_permute ((u32x8) v1, mask);
300 
301  /* extract lower 128-bits and save them to the array of buffer indices */
302  u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
303  u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
304  bi += 8;
305  b += 8;
306  count -= 8;
307  }
308 #endif
309  while (count >= 4)
310  {
311  /* equivalent non-nector implementation */
312  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
313  bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
314  bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
315  bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
316  bi += 4;
317  b += 4;
318  count -= 4;
319  }
320  while (count)
321  {
322  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
323  bi += 1;
324  b += 1;
325  count -= 1;
326  }
327 }
328 
329 /** \brief Translate array of buffer pointers into buffer indices
330 
331  @param vm - (vlib_main_t *) vlib main data structure pointer
332  @param b - (vlib_buffer_t **) array of buffer pointers
333  @param bi - (u32 *) array to store buffer indices
334  @param count - (uword) number of elements
335 */
338  uword count)
339 {
340  vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
341 }
342 
343 /** \brief Get next buffer in buffer linklist, or zero for end of list.
344 
345  @param vm - (vlib_main_t *) vlib main data structure pointer
346  @param b - (void *) buffer pointer
347  @return - (vlib_buffer_t *) next buffer, or NULL
348 */
351 {
352  return (b->flags & VLIB_BUFFER_NEXT_PRESENT
353  ? vlib_get_buffer (vm, b->next_buffer) : 0);
354 }
355 
357  vlib_buffer_t * b_first);
358 
359 /** \brief Get length in bytes of the buffer chain
360 
361  @param vm - (vlib_main_t *) vlib main data structure pointer
362  @param b - (void *) buffer pointer
363  @return - (uword) length of buffer chain
364 */
367 {
368  uword len = b->current_length;
369 
370  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
371  return len;
372 
373  if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
375 
377 }
378 
379 /** \brief Get length in bytes of the buffer index buffer chain
380 
381  @param vm - (vlib_main_t *) vlib main data structure pointer
382  @param bi - (u32) buffer index
383  @return - (uword) length of buffer chain
384 */
387 {
388  vlib_buffer_t *b = vlib_get_buffer (vm, bi);
389  return vlib_buffer_length_in_chain (vm, b);
390 }
391 
392 /** \brief Copy buffer contents to memory
393 
394  @param vm - (vlib_main_t *) vlib main data structure pointer
395  @param buffer_index - (u32) buffer index
396  @param contents - (u8 *) memory, <strong>must be large enough</strong>
397  @return - (uword) length of buffer chain
398 */
400 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
401 {
402  uword content_len = 0;
403  uword l;
404  vlib_buffer_t *b;
405 
406  while (1)
407  {
408  b = vlib_get_buffer (vm, buffer_index);
409  l = b->current_length;
410  clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
411  content_len += l;
412  if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
413  break;
414  buffer_index = b->next_buffer;
415  }
416 
417  return content_len;
418 }
419 
422 {
423  return vlib_physmem_get_pa (vm, b->data);
424 }
425 
428 {
429  return vlib_buffer_get_pa (vm, b) + b->current_data;
430 }
431 
432 /** \brief Prefetch buffer metadata by buffer index
433  The first 64 bytes of buffer contains most header information
434 
435  @param vm - (vlib_main_t *) vlib main data structure pointer
436  @param bi - (u32) buffer index
437  @param type - LOAD, STORE. In most cases, STORE is the right answer
438 */
439 /* Prefetch buffer header given index. */
440 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
441  do { \
442  vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
443  vlib_prefetch_buffer_header (_b, type); \
444  } while (0)
445 
446 typedef enum
447 {
448  /* Index is unknown. */
450 
451  /* Index is known and free/allocated. */
455 
456 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
457  uword n_buffers,
459  expected_state);
460 
463 {
465 
467  uword *p = hash_get (bm->buffer_known_hash, buffer_index);
469  return p ? p[0] : VLIB_BUFFER_UNKNOWN;
470 }
471 
472 /* Validates sanity of a single buffer.
473  Returns format'ed vector with error message if any. */
474 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
475  uword follow_chain);
476 
478  u32 * buffers,
479  uword next_buffer_stride,
480  uword n_buffers,
481  vlib_buffer_known_state_t known_state,
482  uword follow_buffer_next);
483 
485 vlib_get_buffer_pool (vlib_main_t * vm, u8 buffer_pool_index)
486 {
488  return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
489 }
490 
492 vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index, u32 * buffers,
493  u32 n_buffers)
494 {
495  vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
496  u32 len;
497 
498  ASSERT (bp->buffers);
499 
500  clib_spinlock_lock (&bp->lock);
501  len = bp->n_avail;
502  if (PREDICT_TRUE (n_buffers < len))
503  {
504  len -= n_buffers;
505  vlib_buffer_copy_indices (buffers, bp->buffers + len, n_buffers);
506  bp->n_avail = len;
507  clib_spinlock_unlock (&bp->lock);
508  return n_buffers;
509  }
510  else
511  {
512  vlib_buffer_copy_indices (buffers, bp->buffers, len);
513  bp->n_avail = 0;
514  clib_spinlock_unlock (&bp->lock);
515  return len;
516  }
517 }
518 
519 
520 /** \brief Allocate buffers from specific pool into supplied array
521 
522  @param vm - (vlib_main_t *) vlib main data structure pointer
523  @param buffers - (u32 * ) buffer index array
524  @param n_buffers - (u32) number of buffers requested
525  @return - (u32) number of buffers actually allocated, may be
526  less than the number requested or zero
527 */
528 
530 vlib_buffer_alloc_from_pool (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
531  u8 buffer_pool_index)
532 {
534  vlib_buffer_pool_t *bp;
536  u32 *src, *dst, len, n_left;
537 
538  bp = vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
539  bpt = vec_elt_at_index (bp->threads, vm->thread_index);
540 
541  dst = buffers;
542  n_left = n_buffers;
543  len = bpt->n_cached;
544 
545  /* per-thread cache contains enough buffers */
546  if (len >= n_buffers)
547  {
548  src = bpt->cached_buffers + len - n_buffers;
549  vlib_buffer_copy_indices (dst, src, n_buffers);
550  bpt->n_cached -= n_buffers;
551 
552  if (CLIB_DEBUG > 0)
553  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
555  return n_buffers;
556  }
557 
558  /* alloc bigger than cache - take buffers directly from main pool */
559  if (n_buffers >= VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ)
560  {
561  n_buffers = vlib_buffer_pool_get (vm, buffer_pool_index, buffers,
562  n_buffers);
563 
564  if (CLIB_DEBUG > 0)
565  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
567  return n_buffers;
568  }
569 
570  /* take everything available in the cache */
571  if (len)
572  {
573  vlib_buffer_copy_indices (dst, bpt->cached_buffers, len);
574  bpt->n_cached = 0;
575  dst += len;
576  n_left -= len;
577  }
578 
579  len = round_pow2 (n_left, 32);
580  len = vlib_buffer_pool_get (vm, buffer_pool_index, bpt->cached_buffers,
581  len);
582  bpt->n_cached = len;
583 
584  if (len)
585  {
586  u32 n_copy = clib_min (len, n_left);
587  src = bpt->cached_buffers + len - n_copy;
588  vlib_buffer_copy_indices (dst, src, n_copy);
589  bpt->n_cached -= n_copy;
590  n_left -= n_copy;
591  }
592 
593  n_buffers -= n_left;
594 
595  /* Verify that buffers are known free. */
596  if (CLIB_DEBUG > 0)
597  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
599 
600  return n_buffers;
601 }
602 
603 /** \brief Allocate buffers from specific numa node into supplied array
604 
605  @param vm - (vlib_main_t *) vlib main data structure pointer
606  @param buffers - (u32 * ) buffer index array
607  @param n_buffers - (u32) number of buffers requested
608  @param numa_node - (u32) numa node
609  @return - (u32) number of buffers actually allocated, may be
610  less than the number requested or zero
611 */
613 vlib_buffer_alloc_on_numa (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
614  u32 numa_node)
615 {
616  u8 index = vlib_buffer_pool_get_default_for_numa (vm, numa_node);
617  return vlib_buffer_alloc_from_pool (vm, buffers, n_buffers, index);
618 }
619 
620 /** \brief Allocate buffers into supplied array
621 
622  @param vm - (vlib_main_t *) vlib main data structure pointer
623  @param buffers - (u32 * ) buffer index array
624  @param n_buffers - (u32) number of buffers requested
625  @return - (u32) number of buffers actually allocated, may be
626  less than the number requested or zero
627 */
628 
630 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
631 {
632  return vlib_buffer_alloc_on_numa (vm, buffers, n_buffers, vm->numa_node);
633 }
634 
635 /** \brief Allocate buffers into ring
636 
637  @param vm - (vlib_main_t *) vlib main data structure pointer
638  @param buffers - (u32 * ) buffer index ring
639  @param start - (u32) first slot in the ring
640  @param ring_size - (u32) ring size
641  @param n_buffers - (u32) number of buffers requested
642  @return - (u32) number of buffers actually allocated, may be
643  less than the number requested or zero
644 */
647  u32 ring_size, u32 n_buffers)
648 {
649  u32 n_alloc;
650 
651  ASSERT (n_buffers <= ring_size);
652 
653  if (PREDICT_TRUE (start + n_buffers <= ring_size))
654  return vlib_buffer_alloc (vm, ring + start, n_buffers);
655 
656  n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
657 
658  if (PREDICT_TRUE (n_alloc == ring_size - start))
659  n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
660 
661  return n_alloc;
662 }
663 
664 /** \brief Allocate buffers into ring from specific buffer pool
665 
666  @param vm - (vlib_main_t *) vlib main data structure pointer
667  @param buffers - (u32 * ) buffer index ring
668  @param start - (u32) first slot in the ring
669  @param ring_size - (u32) ring size
670  @param n_buffers - (u32) number of buffers requested
671  @return - (u32) number of buffers actually allocated, may be
672  less than the number requested or zero
673 */
676  u32 ring_size, u32 n_buffers,
677  u8 buffer_pool_index)
678 {
679  u32 n_alloc;
680 
681  ASSERT (n_buffers <= ring_size);
682 
683  if (PREDICT_TRUE (start + n_buffers <= ring_size))
684  return vlib_buffer_alloc_from_pool (vm, ring + start, n_buffers,
685  buffer_pool_index);
686 
687  n_alloc = vlib_buffer_alloc_from_pool (vm, ring + start, ring_size - start,
688  buffer_pool_index);
689 
690  if (PREDICT_TRUE (n_alloc == ring_size - start))
691  n_alloc += vlib_buffer_alloc_from_pool (vm, ring, n_buffers - n_alloc,
692  buffer_pool_index);
693 
694  return n_alloc;
695 }
696 
698 vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
699  u32 * buffers, u32 n_buffers)
700 {
701  vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
703  vm->thread_index);
704  u32 n_cached, n_empty;
705 
706  if (CLIB_DEBUG > 0)
707  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
709 
710  n_cached = bpt->n_cached;
711  n_empty = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ - n_cached;
712  if (n_buffers <= n_empty)
713  {
714  vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
715  buffers, n_buffers);
716  bpt->n_cached = n_cached + n_buffers;
717  return;
718  }
719 
720  vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
721  buffers + n_buffers - n_empty, n_empty);
723 
724  clib_spinlock_lock (&bp->lock);
725  vlib_buffer_copy_indices (bp->buffers + bp->n_avail, buffers,
726  n_buffers - n_empty);
727  bp->n_avail += n_buffers - n_empty;
728  clib_spinlock_unlock (&bp->lock);
729 }
730 
732 vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
733  int maybe_next)
734 {
735  const int queue_size = 128;
736  vlib_buffer_pool_t *bp = 0;
737  u8 buffer_pool_index = ~0;
738  u32 n_queue = 0, queue[queue_size + 4];
739  vlib_buffer_t bt = { };
740 #if defined(CLIB_HAVE_VEC128)
741  vlib_buffer_t bpi_mask = {.buffer_pool_index = ~0 };
742  vlib_buffer_t bpi_vec = {.buffer_pool_index = ~0 };
743  vlib_buffer_t flags_refs_mask = {
744  .flags = VLIB_BUFFER_NEXT_PRESENT,
745  .ref_count = ~1
746  };
747 #endif
748 
749  while (n_buffers)
750  {
751  vlib_buffer_t *b[8];
752  u32 bi, sum = 0, flags, next;
753 
754  if (n_buffers < 12)
755  goto one_by_one;
756 
757  vlib_get_buffers (vm, buffers, b, 4);
758  vlib_get_buffers (vm, buffers + 8, b + 4, 4);
759 
760  vlib_prefetch_buffer_header (b[4], LOAD);
761  vlib_prefetch_buffer_header (b[5], LOAD);
762  vlib_prefetch_buffer_header (b[6], LOAD);
763  vlib_prefetch_buffer_header (b[7], LOAD);
764 
765 #if defined(CLIB_HAVE_VEC128)
766  u8x16 p0, p1, p2, p3, r;
767  p0 = u8x16_load_unaligned (b[0]);
768  p1 = u8x16_load_unaligned (b[1]);
769  p2 = u8x16_load_unaligned (b[2]);
770  p3 = u8x16_load_unaligned (b[3]);
771 
772  r = p0 ^ bpi_vec.as_u8x16[0];
773  r |= p1 ^ bpi_vec.as_u8x16[0];
774  r |= p2 ^ bpi_vec.as_u8x16[0];
775  r |= p3 ^ bpi_vec.as_u8x16[0];
776  r &= bpi_mask.as_u8x16[0];
777  r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
778 
779  sum = !u8x16_is_all_zero (r);
780 #else
781  sum |= b[0]->flags;
782  sum |= b[1]->flags;
783  sum |= b[2]->flags;
784  sum |= b[3]->flags;
785  sum &= VLIB_BUFFER_NEXT_PRESENT;
786  sum += b[0]->ref_count - 1;
787  sum += b[1]->ref_count - 1;
788  sum += b[2]->ref_count - 1;
789  sum += b[3]->ref_count - 1;
790  sum |= b[0]->buffer_pool_index ^ buffer_pool_index;
791  sum |= b[1]->buffer_pool_index ^ buffer_pool_index;
792  sum |= b[2]->buffer_pool_index ^ buffer_pool_index;
793  sum |= b[3]->buffer_pool_index ^ buffer_pool_index;
794 #endif
795 
796  if (sum)
797  goto one_by_one;
798 
799  vlib_buffer_copy_indices (queue + n_queue, buffers, 4);
800  vlib_buffer_copy_template (b[0], &bt);
801  vlib_buffer_copy_template (b[1], &bt);
802  vlib_buffer_copy_template (b[2], &bt);
803  vlib_buffer_copy_template (b[3], &bt);
804  n_queue += 4;
805 
806  vlib_buffer_validate (vm, b[0]);
807  vlib_buffer_validate (vm, b[1]);
808  vlib_buffer_validate (vm, b[2]);
809  vlib_buffer_validate (vm, b[3]);
810 
815 
816  if (n_queue >= queue_size)
817  {
818  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
819  n_queue = 0;
820  }
821  buffers += 4;
822  n_buffers -= 4;
823  continue;
824 
825  one_by_one:
826  bi = buffers[0];
827 
828  next_in_chain:
829  b[0] = vlib_get_buffer (vm, bi);
830  flags = b[0]->flags;
831  next = b[0]->next_buffer;
832 
833  if (PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
834  {
835 
836  if (n_queue)
837  {
838  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
839  n_queue = 0;
840  }
841 
842  buffer_pool_index = b[0]->buffer_pool_index;
843 #if defined(CLIB_HAVE_VEC128)
844  bpi_vec.buffer_pool_index = buffer_pool_index;
845 #endif
846  bp = vlib_get_buffer_pool (vm, buffer_pool_index);
848  }
849 
850  vlib_buffer_validate (vm, b[0]);
851 
853 
854  if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
855  {
856  vlib_buffer_copy_template (b[0], &bt);
857  queue[n_queue++] = bi;
858  }
859 
860  if (n_queue == queue_size)
861  {
862  vlib_buffer_pool_put (vm, buffer_pool_index, queue, queue_size);
863  n_queue = 0;
864  }
865 
866  if (maybe_next && (flags & VLIB_BUFFER_NEXT_PRESENT))
867  {
868  bi = next;
869  goto next_in_chain;
870  }
871 
872  buffers++;
873  n_buffers--;
874  }
875 
876  if (n_queue)
877  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
878 }
879 
880 
881 /** \brief Free buffers
882  Frees the entire buffer chain for each buffer
883 
884  @param vm - (vlib_main_t *) vlib main data structure pointer
885  @param buffers - (u32 * ) buffer index array
886  @param n_buffers - (u32) number of buffers to free
887 
888 */
889 always_inline void
891  /* pointer to first buffer */
892  u32 * buffers,
893  /* number of buffers to free */
894  u32 n_buffers)
895 {
896  vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 1);
897 }
898 
899 /** \brief Free buffers, does not free the buffer chain for each buffer
900 
901  @param vm - (vlib_main_t *) vlib main data structure pointer
902  @param buffers - (u32 * ) buffer index array
903  @param n_buffers - (u32) number of buffers to free
904 
905 */
906 always_inline void
908  /* pointer to first buffer */
909  u32 * buffers,
910  /* number of buffers to free */
911  u32 n_buffers)
912 {
913  vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 0);
914 }
915 
916 /** \brief Free one buffer
917  Shorthand to free a single buffer chain.
918 
919  @param vm - (vlib_main_t *) vlib main data structure pointer
920  @param buffer_index - (u32) buffer index to free
921 */
922 always_inline void
924 {
925  vlib_buffer_free_inline (vm, &buffer_index, 1, /* maybe next */ 1);
926 }
927 
928 /** \brief Free buffers from ring
929 
930  @param vm - (vlib_main_t *) vlib main data structure pointer
931  @param buffers - (u32 * ) buffer index ring
932  @param start - (u32) first slot in the ring
933  @param ring_size - (u32) ring size
934  @param n_buffers - (u32) number of buffers
935 */
936 always_inline void
938  u32 ring_size, u32 n_buffers)
939 {
940  ASSERT (n_buffers <= ring_size);
941 
942  if (PREDICT_TRUE (start + n_buffers <= ring_size))
943  {
944  vlib_buffer_free (vm, ring + start, n_buffers);
945  }
946  else
947  {
948  vlib_buffer_free (vm, ring + start, ring_size - start);
949  vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
950  }
951 }
952 
953 /** \brief Free buffers from ring without freeing tail buffers
954 
955  @param vm - (vlib_main_t *) vlib main data structure pointer
956  @param buffers - (u32 * ) buffer index ring
957  @param start - (u32) first slot in the ring
958  @param ring_size - (u32) ring size
959  @param n_buffers - (u32) number of buffers
960 */
961 always_inline void
963  u32 ring_size, u32 n_buffers)
964 {
965  ASSERT (n_buffers <= ring_size);
966 
967  if (PREDICT_TRUE (start + n_buffers <= ring_size))
968  {
969  vlib_buffer_free_no_next (vm, ring + start, n_buffers);
970  }
971  else
972  {
973  vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
974  vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
975  }
976 }
977 
978 /* Append given data to end of buffer, possibly allocating new buffers. */
979 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
980  u32 n_data_bytes);
981 
982 /* Define vlib_buffer and vnet_buffer flags bits preserved for copy/clone */
983 #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \
984  (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \
985  VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL)
986 
987 /* duplicate all buffers in chain */
990 {
991  vlib_buffer_t *s, *d, *fd;
992  uword n_alloc, n_buffers = 1;
994  int i;
995 
996  s = b;
997  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
998  {
999  n_buffers++;
1000  s = vlib_get_buffer (vm, s->next_buffer);
1001  }
1002  u32 new_buffers[n_buffers];
1003 
1004  n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1005 
1006  /* No guarantee that we'll get all the buffers we asked for */
1007  if (PREDICT_FALSE (n_alloc < n_buffers))
1008  {
1009  if (n_alloc > 0)
1010  vlib_buffer_free (vm, new_buffers, n_alloc);
1011  return 0;
1012  }
1013 
1014  /* 1st segment */
1015  s = b;
1016  fd = d = vlib_get_buffer (vm, new_buffers[0]);
1017  d->current_data = s->current_data;
1019  d->flags = s->flags & flag_mask;
1020  d->trace_handle = s->trace_handle;
1023  clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1024  clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1027 
1028  /* next segments */
1029  for (i = 1; i < n_buffers; i++)
1030  {
1031  /* previous */
1032  d->next_buffer = new_buffers[i];
1033  /* current */
1034  s = vlib_get_buffer (vm, s->next_buffer);
1035  d = vlib_get_buffer (vm, new_buffers[i]);
1036  d->current_data = s->current_data;
1040  d->flags = s->flags & flag_mask;
1041  }
1042 
1043  return fd;
1044 }
1045 
1046 /* duplicate first buffer in chain */
1049 {
1050  vlib_buffer_t *d;
1051 
1052  if ((vlib_buffer_alloc (vm, di, 1)) != 1)
1053  return 0;
1054 
1055  d = vlib_get_buffer (vm, *di);
1056  /* 1st segment */
1057  d->current_data = b->current_data;
1059  clib_memcpy_fast (d->opaque, b->opaque, sizeof (b->opaque));
1060  clib_memcpy_fast (d->opaque2, b->opaque2, sizeof (b->opaque2));
1063 
1064  return d;
1065 }
1066 
1067 /* \brief Move packet from current position to offset position in buffer.
1068  Only work for small packet using one buffer with room to fit the move
1069  @param vm - (vlib_main_t *) vlib main data structure pointer
1070  @param b - (vlib_buffer_t *) pointer to buffer
1071  @param offset - (i16) position to move the packet in buffer
1072  */
1073 always_inline void
1075 {
1076  ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1077  ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1078  ASSERT (offset + b->current_length <
1080 
1081  u8 *source = vlib_buffer_get_current (b);
1082  b->current_data = offset;
1083  u8 *destination = vlib_buffer_get_current (b);
1084  u16 length = b->current_length;
1085 
1086  if (source + length <= destination) /* no overlap */
1087  clib_memcpy_fast (destination, source, length);
1088  else
1089  memmove (destination, source, length);
1090 }
1091 
1092 /** \brief Create a maximum of 256 clones of buffer and store them
1093  in the supplied array
1094 
1095  @param vm - (vlib_main_t *) vlib main data structure pointer
1096  @param src_buffer - (u32) source buffer index
1097  @param buffers - (u32 * ) buffer index array
1098  @param n_buffers - (u16) number of buffer clones requested (<=256)
1099  @param head_end_offset - (u16) offset relative to current position
1100  where packet head ends
1101  @param offset - (i16) copy packet head at current position if 0,
1102  else at offset position to change headroom space as specified
1103  @return - (u16) number of buffers actually cloned, may be
1104  less than the number requested or zero
1105 */
1107 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1108  u16 n_buffers, u16 head_end_offset, i16 offset)
1109 {
1110  u16 i;
1111  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1112 
1113  ASSERT (s->ref_count == 1);
1114  ASSERT (n_buffers);
1115  ASSERT (n_buffers <= 256);
1116  ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1117  ASSERT ((offset + head_end_offset) <
1119 
1120  if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
1121  {
1122  buffers[0] = src_buffer;
1123  if (offset)
1124  vlib_buffer_move (vm, s, offset);
1125 
1126  for (i = 1; i < n_buffers; i++)
1127  {
1128  vlib_buffer_t *d;
1129  d = vlib_buffer_copy (vm, s);
1130  if (d == 0)
1131  return i;
1132  buffers[i] = vlib_get_buffer_index (vm, d);
1133 
1134  }
1135  return n_buffers;
1136  }
1137 
1138  if (PREDICT_FALSE ((n_buffers == 1) && (offset == 0)))
1139  {
1140  buffers[0] = src_buffer;
1141  return 1;
1142  }
1143 
1144  n_buffers = vlib_buffer_alloc_from_pool (vm, buffers, n_buffers,
1145  s->buffer_pool_index);
1146 
1147  for (i = 0; i < n_buffers; i++)
1148  {
1149  vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
1150  if (offset)
1151  d->current_data = offset;
1152  else
1153  d->current_data = s->current_data;
1154 
1155  d->current_length = head_end_offset;
1157 
1159  head_end_offset;
1160  if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
1161  {
1164  }
1166  VLIB_BUFFER_NEXT_PRESENT;
1167  d->trace_handle = s->trace_handle;
1168  clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1169  clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1171  vlib_buffer_get_current (s), head_end_offset);
1172  d->next_buffer = src_buffer;
1173  }
1174  vlib_buffer_advance (s, head_end_offset);
1175  s->ref_count = n_buffers ? n_buffers : s->ref_count;
1176  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1177  {
1178  s = vlib_get_buffer (vm, s->next_buffer);
1179  s->ref_count = n_buffers ? n_buffers : s->ref_count;
1180  }
1181 
1182  return n_buffers;
1183 }
1184 
1185 /** \brief Create multiple clones of buffer and store them
1186  in the supplied array
1187 
1188  @param vm - (vlib_main_t *) vlib main data structure pointer
1189  @param src_buffer - (u32) source buffer index
1190  @param buffers - (u32 * ) buffer index array
1191  @param n_buffers - (u16) number of buffer clones requested (<=256)
1192  @param head_end_offset - (u16) offset relative to current position
1193  where packet head ends
1194  @param offset - (i16) copy packet head at current position if 0,
1195  else at offset position to change headroom space as specified
1196  @return - (u16) number of buffers actually cloned, may be
1197  less than the number requested or zero
1198 */
1200 vlib_buffer_clone_at_offset (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1201  u16 n_buffers, u16 head_end_offset, i16 offset)
1202 {
1203  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1204  u16 n_cloned = 0;
1205 
1206  while (n_buffers > 256)
1207  {
1208  vlib_buffer_t *copy;
1209  copy = vlib_buffer_copy (vm, s);
1210  n_cloned += vlib_buffer_clone_256 (vm,
1211  vlib_get_buffer_index (vm, copy),
1212  (buffers + n_cloned),
1213  256, head_end_offset, offset);
1214  n_buffers -= 256;
1215  }
1216  n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
1217  buffers + n_cloned,
1218  n_buffers, head_end_offset, offset);
1219 
1220  return n_cloned;
1221 }
1222 
1223 /** \brief Create multiple clones of buffer and store them
1224  in the supplied array
1225 
1226  @param vm - (vlib_main_t *) vlib main data structure pointer
1227  @param src_buffer - (u32) source buffer index
1228  @param buffers - (u32 * ) buffer index array
1229  @param n_buffers - (u16) number of buffer clones requested (<=256)
1230  @param head_end_offset - (u16) offset relative to current position
1231  where packet head ends
1232  @return - (u16) number of buffers actually cloned, may be
1233  less than the number requested or zero
1234 */
1236 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1237  u16 n_buffers, u16 head_end_offset)
1238 {
1239  return vlib_buffer_clone_at_offset (vm, src_buffer, buffers, n_buffers,
1240  head_end_offset, 0);
1241 }
1242 
1243 /** \brief Attach cloned tail to the buffer
1244 
1245  @param vm - (vlib_main_t *) vlib main data structure pointer
1246  @param head - (vlib_buffer_t *) head buffer
1247  @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
1248 */
1249 
1250 always_inline void
1252  vlib_buffer_t * tail)
1253 {
1254  ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1255  ASSERT (head->buffer_pool_index == tail->buffer_pool_index);
1256 
1257  head->flags |= VLIB_BUFFER_NEXT_PRESENT;
1258  head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1259  head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1260  head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1261  head->next_buffer = vlib_get_buffer_index (vm, tail);
1264 
1265 next_segment:
1266  clib_atomic_add_fetch (&tail->ref_count, 1);
1267 
1268  if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
1269  {
1270  tail = vlib_get_buffer (vm, tail->next_buffer);
1271  goto next_segment;
1272  }
1273 }
1274 
1275 /* Initializes the buffer as an empty packet with no chained buffers. */
1276 always_inline void
1278 {
1280  first->current_length = 0;
1281  first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1282  first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1283 }
1284 
1285 /* The provided next_bi buffer index is appended to the end of the packet. */
1288 {
1289  vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
1290  last->next_buffer = next_bi;
1291  last->flags |= VLIB_BUFFER_NEXT_PRESENT;
1292  next_buffer->current_length = 0;
1293  next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1294  return next_buffer;
1295 }
1296 
1297 /* Increases or decreases the packet length.
1298  * It does not allocate or deallocate new buffers.
1299  * Therefore, the added length must be compatible
1300  * with the last buffer. */
1301 always_inline void
1303  vlib_buffer_t * last, i32 len)
1304 {
1305  last->current_length += len;
1306  if (first != last)
1308 }
1309 
1310 /* Copy data to the end of the packet and increases its length.
1311  * It does not allocate new buffers.
1312  * Returns the number of copied bytes. */
1315  vlib_buffer_t * first,
1316  vlib_buffer_t * last, void *data, u16 data_len)
1317 {
1318  u32 n_buffer_bytes = vlib_buffer_get_default_data_size (vm);
1319  ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
1320  u16 len = clib_min (data_len,
1321  n_buffer_bytes - last->current_length -
1322  last->current_data);
1324  data, len);
1325  vlib_buffer_chain_increase_length (first, last, len);
1326  return len;
1327 }
1328 
1329 /* Copy data to the end of the packet and increases its length.
1330  * Allocates additional buffers from the free list if necessary.
1331  * Returns the number of copied bytes.
1332  * 'last' value is modified whenever new buffers are allocated and
1333  * chained and points to the last buffer in the chain. */
1334 u16
1336  vlib_buffer_t * first,
1337  vlib_buffer_t ** last, void *data,
1338  u16 data_len);
1340 
1343 
1344 typedef struct
1345 {
1346  /* Vector of packet data. */
1348 
1349  /* Number of buffers to allocate in each call to allocator. */
1351 
1354 
1357  void *packet_data,
1358  uword n_packet_data_bytes,
1359  uword min_n_buffers_each_alloc,
1360  char *fmt, ...);
1361 
1364  u32 * bi_result);
1365 
1366 always_inline void
1368 {
1369  vec_free (t->packet_data);
1370 }
1371 
1374 {
1375  return b->data + vlib_buffer_get_default_data_size (vm) -
1376  ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1377 }
1378 
1381 {
1382  vlib_buffer_t *db = b, *sb, *first = b;
1383  int is_cloned = 0;
1384  u32 bytes_left = 0, data_size;
1385  u16 src_left, dst_left, n_buffers = 1;
1386  u8 *dp, *sp;
1387  u32 to_free = 0;
1388 
1389  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1390  return 1;
1391 
1392  data_size = vlib_buffer_get_default_data_size (vm);
1393 
1394  dst_left = vlib_buffer_space_left_at_end (vm, b);
1395 
1396  while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1397  {
1398  b = vlib_get_buffer (vm, b->next_buffer);
1399  if (b->ref_count > 1)
1400  is_cloned = 1;
1401  bytes_left += b->current_length;
1402  n_buffers++;
1403  }
1404 
1405  /* if buffer is cloned, create completely new chain - unless everything fits
1406  * into one buffer */
1407  if (is_cloned && bytes_left >= dst_left)
1408  {
1409  u32 len = 0;
1410  u32 space_needed = bytes_left - dst_left;
1411  u32 tail;
1412 
1413  if (vlib_buffer_alloc (vm, &tail, 1) == 0)
1414  return 0;
1415 
1416  ++n_buffers;
1417  len += data_size;
1418  b = vlib_get_buffer (vm, tail);
1419 
1420  while (len < space_needed)
1421  {
1422  u32 bi;
1423  if (vlib_buffer_alloc (vm, &bi, 1) == 0)
1424  {
1425  vlib_buffer_free_one (vm, tail);
1426  return 0;
1427  }
1428  b->flags = VLIB_BUFFER_NEXT_PRESENT;
1429  b->next_buffer = bi;
1430  b = vlib_get_buffer (vm, bi);
1431  len += data_size;
1432  n_buffers++;
1433  }
1434  sb = vlib_get_buffer (vm, first->next_buffer);
1435  to_free = first->next_buffer;
1436  first->next_buffer = tail;
1437  }
1438  else
1439  sb = vlib_get_buffer (vm, first->next_buffer);
1440 
1441  src_left = sb->current_length;
1442  sp = vlib_buffer_get_current (sb);
1443  dp = vlib_buffer_get_tail (db);
1444 
1445  while (bytes_left)
1446  {
1447  u16 bytes_to_copy;
1448 
1449  if (dst_left == 0)
1450  {
1451  db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1452  ASSERT (db->flags & VLIB_BUFFER_NEXT_PRESENT);
1453  db = vlib_get_buffer (vm, db->next_buffer);
1454  dst_left = data_size;
1455  if (db->current_data > 0)
1456  {
1457  db->current_data = 0;
1458  }
1459  else
1460  {
1461  dst_left += -db->current_data;
1462  }
1463  dp = vlib_buffer_get_current (db);
1464  }
1465 
1466  while (src_left == 0)
1467  {
1468  ASSERT (sb->flags & VLIB_BUFFER_NEXT_PRESENT);
1469  sb = vlib_get_buffer (vm, sb->next_buffer);
1470  src_left = sb->current_length;
1471  sp = vlib_buffer_get_current (sb);
1472  }
1473 
1474  bytes_to_copy = clib_min (dst_left, src_left);
1475 
1476  if (dp != sp)
1477  {
1478  if (sb == db)
1479  bytes_to_copy = clib_min (bytes_to_copy, sp - dp);
1480 
1481  clib_memcpy_fast (dp, sp, bytes_to_copy);
1482  }
1483 
1484  src_left -= bytes_to_copy;
1485  dst_left -= bytes_to_copy;
1486  dp += bytes_to_copy;
1487  sp += bytes_to_copy;
1488  bytes_left -= bytes_to_copy;
1489  }
1490  if (db != first)
1491  db->current_data = 0;
1492  db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1493 
1494  if (is_cloned && to_free)
1495  vlib_buffer_free_one (vm, to_free);
1496  else
1497  {
1498  if (db->flags & VLIB_BUFFER_NEXT_PRESENT)
1500  db->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1501  b = first;
1502  n_buffers = 1;
1503  while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1504  {
1505  b = vlib_get_buffer (vm, b->next_buffer);
1506  ++n_buffers;
1507  }
1508  }
1509 
1510  first->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1511 
1512  return n_buffers;
1513 }
1514 
1515 #endif /* included_vlib_buffer_funcs_h */
1516 
1517 /*
1518  * fd.io coding-style-patch-verification: ON
1519  *
1520  * Local Variables:
1521  * eval: (c-set-style "gnu")
1522  * End:
1523  */
u32 opaque2[14]
Definition: buffer.h:170
#define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK
Definition: buffer_funcs.h:983
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:124
u8 count
Definition: dhcp.api:208
static uword vlib_buffer_get_current_pa(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:427
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer&#39;s data.
Definition: buffer.h:310
#define clib_min(x, y)
Definition: clib.h:295
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
Definition: lock.h:102
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
Definition: lock.h:80
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
Definition: buffer.c:78
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
Definition: buffer_funcs.h:890
#define clib_atomic_add_fetch(a, b)
Definition: atomics.h:30
format_function_t format_vlib_buffer_contents
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:421
#define PREDICT_TRUE(x)
Definition: clib.h:112
static_always_inline u64x2 u32x4_extend_to_u64x2_high(u32x4 v)
Definition: vector_neon.h:125
format_function_t format_vlib_buffer
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
Definition: buffer.h:110
static_always_inline vlib_buffer_pool_t * vlib_get_buffer_pool(vlib_main_t *vm, u8 buffer_pool_index)
Definition: buffer_funcs.h:485
#define clib_memcpy_fast(a, b, c)
Definition: string.h:81
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
Definition: buffer.c:476
static void vlib_buffer_attach_clone(vlib_main_t *vm, vlib_buffer_t *head, vlib_buffer_t *tail)
Attach cloned tail to the buffer.
static void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
static void vlib_buffer_move(vlib_main_t *vm, vlib_buffer_t *b, i16 offset)
#define VLIB_BUFFER_PRE_DATA_SIZE
Definition: buffer.h:51
int vlib_buffer_add_data(vlib_main_t *vm, u32 *buffer_index, void *data, u32 n_data_bytes)
Definition: buffer.c:421
u32 thread_index
Definition: main.h:218
u16 current_length
Nbytes between current data and the end of this buffer.
Definition: buffer.h:113
#define CLIB_LOG2_CACHE_LINE_BYTES
Definition: cache.h:50
u8 data[0]
Packet data.
Definition: buffer.h:181
vl_api_address_t src
Definition: gre.api:60
int i
vlib_buffer_main_t * buffer_main
Definition: main.h:152
foreach_avx2_vec256i static foreach_avx2_vec256u u32x8 u32x8_permute(u32x8 v, u32x8 idx)
Definition: vector_avx2.h:73
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
Definition: buffer_funcs.h:178
u32 numa_node
Definition: main.h:220
static vlib_buffer_known_state_t vlib_buffer_is_known(vlib_main_t *vm, u32 buffer_index)
Definition: buffer_funcs.h:462
static u32 vlib_buffer_alloc_to_ring_from_pool(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers into ring from specific buffer pool.
Definition: buffer_funcs.h:675
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
Definition: buffer_funcs.h:366
static u32 vlib_buffer_alloc_on_numa(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u32 numa_node)
Allocate buffers from specific numa node into supplied array.
Definition: buffer_funcs.h:613
unsigned char u8
Definition: types.h:56
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:989
u8 default_buffer_pool_index_for_numa[VLIB_BUFFER_MAX_NUMA_NODES]
Definition: buffer.h:462
STATIC_ASSERT_OFFSET_OF(vlib_buffer_t, template_end, 64)
u8 buffer_pool_index
index of buffer pool this buffer belongs.
Definition: buffer.h:133
static_always_inline void vlib_get_buffer_indices(vlib_main_t *vm, vlib_buffer_t **b, u32 *bi, uword count)
Translate array of buffer pointers into buffer indices.
Definition: buffer_funcs.h:337
format_function_t format_vlib_buffer_no_chain
vlib_buffer_pool_thread_t * threads
Definition: buffer.h:439
u8 *() format_function_t(u8 *s, va_list *args)
Definition: format.h:48
#define static_always_inline
Definition: clib.h:99
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
Definition: buffer.h:203
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
unsigned int u32
Definition: types.h:88
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
Definition: buffer_funcs.h:257
static u16 vlib_buffer_chain_append_data(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
epu8_epi32 epu16_epi32 u64x2
Definition: vector_sse42.h:674
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
Definition: buffer_funcs.h:937
static heap_elt_t * first(heap_header_t *h)
Definition: heap.c:59
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
#define hash_get(h, key)
Definition: hash.h:249
static vlib_buffer_t * vlib_buffer_copy_no_chain(vlib_main_t *vm, vlib_buffer_t *b, u32 *di)
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
Definition: buffer.c:254
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
Definition: buffer_funcs.h:145
format_function_t format_vlib_buffer_and_data
unsigned short u16
Definition: types.h:57
static void vlib_buffer_validate(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:55
vlib_buffer_t buffer_template
Definition: buffer.h:442
u8 * vlib_validate_buffers(vlib_main_t *vm, u32 *buffers, uword next_buffer_stride, uword n_buffers, vlib_buffer_known_state_t known_state, uword follow_buffer_next)
Definition: buffer.c:261
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
Definition: buffer.h:229
static_always_inline uword vlib_buffer_pool_get(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
Definition: buffer_funcs.h:492
void di(unformat_input_t *i)
Definition: unformat.c:163
static_always_inline u32x4 u32x4_shuffle(u32x4 v, const int a, const int b, const int c, const int d)
Definition: vector_sse42.h:651
vlib_buffer_pool_t * buffer_pools
Definition: buffer.h:454
u32 trace_handle
Specifies trace buffer handle if VLIB_PACKET_IS_TRACED flag is set.
Definition: buffer.h:163
#define PREDICT_FALSE(x)
Definition: clib.h:111
#define always_inline
Definition: ipsec.h:28
static_always_inline void vlib_get_buffer_indices_with_offset(vlib_main_t *vm, void **b, u32 *bi, uword count, i32 offset)
Translate array of buffer pointers into buffer indices with offset.
Definition: buffer_funcs.h:276
vl_api_address_t dst
Definition: gre.api:61
static uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
Definition: buffer_funcs.h:400
u8 len
Definition: ip_types.api:91
static void vlib_buffer_chain_init(vlib_buffer_t *first)
#define clib_atomic_sub_fetch(a, b)
Definition: atomics.h:31
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
Definition: buffer_funcs.h:96
clib_spinlock_t buffer_known_hash_lockp
Definition: buffer.h:461
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
Definition: buffer_funcs.h:907
static u16 vlib_buffer_clone(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create multiple clones of buffer and store them in the supplied array.
u32 flags
Definition: vhost_user.h:141
static_always_inline void vlib_buffer_pool_put(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
Definition: buffer_funcs.h:698
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
#define vec_free(V)
Free vector&#39;s memory (no header).
Definition: vec.h:342
#define VLIB_BUFFER_MAX_NUMA_NODES
Definition: buffer.h:445
clib_spinlock_t lock
Definition: buffer.h:436
static vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
Definition: buffer_funcs.h:350
void vlib_buffer_validate_alloc_free(vlib_main_t *vm, u32 *buffers, uword n_buffers, vlib_buffer_known_state_t expected_state)
Definition: buffer.c:338
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
static uword round_pow2(uword x, uword pow2)
Definition: clib.h:241
vlib_buffer_known_state_t
Definition: buffer_funcs.h:446
static_always_inline u64x2 u32x4_extend_to_u64x2(u32x4 v)
Definition: vector_neon.h:119
signed int i32
Definition: types.h:77
static u32 vlib_buffer_alloc_from_pool(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers from specific pool into supplied array.
Definition: buffer_funcs.h:530
#define uword_to_pointer(u, type)
Definition: types.h:136
vlib buffer structure definition and a few select access methods.
#define ASSERT(truth)
u8 data[128]
Definition: ipsec_types.api:87
static u16 vlib_buffer_clone_at_offset(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create multiple clones of buffer and store them in the supplied array.
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
Definition: buffer.h:248
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
Definition: buffer_funcs.h:386
static void * vlib_buffer_ptr_from_index(uword buffer_mem_start, u32 buffer_index, uword offset)
Definition: buffer_funcs.h:71
static_always_inline void vlib_buffer_free_inline(vlib_main_t *vm, u32 *buffers, u32 n_buffers, int maybe_next)
Definition: buffer_funcs.h:732
static uword pointer_to_uword(const void *p)
Definition: types.h:131
template key/value backing page structure
Definition: bihash_doc.h:44
static u64 vlib_physmem_get_pa(vlib_main_t *vm, void *mem)
static u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
Definition: buffer_funcs.h:646
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
Definition: buffer_funcs.h:102
static vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *last, u32 next_bi)
#define VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ
Definition: buffer.h:413
u32 next_buffer
Next buffer for this linked-list of buffers.
Definition: buffer.h:140
u32 cached_buffers[VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ]
Definition: buffer.h:418
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
Definition: buffer.h:492
VLIB buffer representation.
Definition: buffer.h:102
u64 uword
Definition: types.h:112
uword buffer_mem_start
Definition: buffer.h:452
uword * buffer_known_hash
Definition: buffer.h:460
u32 default_data_size
Definition: buffer.h:467
struct clib_bihash_value offset
template key/value backing page structure
u64x4
Definition: vector_avx2.h:121
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
Definition: buffer.c:400
static u16 vlib_buffer_clone_256(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create a maximum of 256 clones of buffer and store them in the supplied array.
unsigned long long u32x4
Definition: ixge.c:28
static void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
Definition: buffer_funcs.h:923
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
Definition: buffer_funcs.h:244
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
Definition: buffer.h:167
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_alloc, char *fmt,...)
Definition: buffer.c:378
static u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
Definition: buffer_funcs.h:630
static u8 vlib_buffer_pool_get_default_for_numa(vlib_main_t *vm, u32 numa_node)
Definition: buffer_funcs.h:163
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
Definition: buffer_funcs.h:962
volatile u8 ref_count
Reference count for this buffer.
Definition: buffer.h:130
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:85
u32 opaque[10]
Opaque data used by sub-graphs for their own purposes.
Definition: buffer.h:153
signed short i16
Definition: types.h:46