59 #ifndef CLIB_MARCH_VARIANT 65 interface_output_trace_t *t = va_arg (*va, interface_output_trace_t *);
110 interface_output_trace_t *t0, *t1;
122 if (b0->
flags & VLIB_BUFFER_IS_TRACED)
130 if (b1->
flags & VLIB_BUFFER_IS_TRACED)
146 interface_output_trace_t *t0;
152 if (b0->
flags & VLIB_BUFFER_IS_TRACED)
175 u32 n_left_to_tx, *from, *from_end, *to_tx;
176 u32 n_bytes, n_buffers, n_packets;
177 u32 n_bytes_b0, n_bytes_b1, n_bytes_b2, n_bytes_b3;
181 u32 current_config_index = ~0;
182 u8 arc = im->output_feature_arc_index;
221 from_end = from + n_buffers;
238 while (from < from_end)
244 while (from + 8 <= from_end && n_left_to_tx >= 4)
246 u32 bi0, bi1, bi2, bi3;
247 u32 tx_swif0, tx_swif1, tx_swif2, tx_swif3;
265 or_flags =
b[0]->flags |
b[1]->flags |
b[2]->flags |
b[3]->flags;
273 ASSERT (
b[0]->current_length > 0);
274 ASSERT (
b[1]->current_length > 0);
275 ASSERT (
b[2]->current_length > 0);
276 ASSERT (
b[3]->current_length > 0);
287 n_bytes += n_bytes_b0 + n_bytes_b1;
288 n_bytes += n_bytes_b2 + n_bytes_b3;
297 b[0]->current_config_index = current_config_index;
298 b[1]->current_config_index = current_config_index;
299 b[2]->current_config_index = current_config_index;
300 b[3]->current_config_index = current_config_index;
308 thread_index, tx_swif0, 1,
317 thread_index, tx_swif1, 1,
326 thread_index, tx_swif2, 1,
334 thread_index, tx_swif3, 1,
340 u32 vnet_buffer_offload_flags =
341 (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM |
342 VNET_BUFFER_F_OFFLOAD_UDP_CKSUM |
343 VNET_BUFFER_F_OFFLOAD_IP_CKSUM);
344 if (or_flags & vnet_buffer_offload_flags)
346 if (
b[0]->
flags & vnet_buffer_offload_flags)
349 b[0]->
flags & VNET_BUFFER_F_IS_IP4,
350 b[0]->
flags & VNET_BUFFER_F_IS_IP6);
351 if (
b[1]->
flags & vnet_buffer_offload_flags)
354 b[1]->
flags & VNET_BUFFER_F_IS_IP4,
355 b[1]->
flags & VNET_BUFFER_F_IS_IP6);
356 if (
b[2]->
flags & vnet_buffer_offload_flags)
359 b[2]->
flags & VNET_BUFFER_F_IS_IP4,
360 b[2]->
flags & VNET_BUFFER_F_IS_IP6);
361 if (
b[3]->
flags & vnet_buffer_offload_flags)
364 b[3]->
flags & VNET_BUFFER_F_IS_IP4,
365 b[3]->
flags & VNET_BUFFER_F_IS_IP6);
372 while (from + 1 <= from_end && n_left_to_tx >= 1)
385 ASSERT (
b[0]->current_length > 0);
389 n_bytes += n_bytes_b0;
395 b[0]->current_config_index = current_config_index;
403 thread_index, tx_swif0, 1,
410 (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM |
411 VNET_BUFFER_F_OFFLOAD_UDP_CKSUM |
412 VNET_BUFFER_F_OFFLOAD_IP_CKSUM))
415 b[0]->
flags & VNET_BUFFER_F_IS_IP4,
416 b[0]->
flags & VNET_BUFFER_F_IS_IP6);
434 int sw_if_index_from_buffer)
436 u32 n_left_from, *from;
443 if (sw_if_index_from_buffer == 0)
454 while (n_left_from > 0)
456 int classify_filter_result;
464 classify_filter_result =
467 if (classify_filter_result)
472 if (sw_if_index_from_buffer)
514 #ifndef CLIB_MARCH_VARIANT 528 if (fnr->function == node->
function)
533 fnr = fnr->next_registration;
555 u32 n_left_to_next, *from, *to_next;
556 u32 n_left_from, next_index;
564 next_index = node->cached_next_index;
566 while (n_left_from > 0)
570 while (n_left_from >= 4 && n_left_to_next >= 2)
572 u32 bi0, bi1, next0, next1;
605 n_left_to_next, bi0, bi1, next0,
609 while (n_left_from > 0 && n_left_to_next > 0)
632 n_left_to_next, bi0, next0);
673 s =
format (s,
"IP4: %U -> %U",
677 s =
format (s,
"IP6: %U -> %U",
688 u32 n_left, *buffers;
709 if (b0->
flags & VLIB_BUFFER_IS_TRACED)
716 if (b1->
flags & VLIB_BUFFER_IS_TRACED)
737 if (b0->
flags & VLIB_BUFFER_IS_TRACED)
761 if (b->
flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
779 case ETHERNET_TYPE_IP4:
780 ip4 = (
void *) (eh + 1);
787 case ETHERNET_TYPE_IP6:
788 ip6 = (
void *) (eh + 1);
792 sizeof (ip6_address_t));
794 sizeof (ip6_address_t));
810 vnet_error_disposition_t disposition)
825 sw_if_index = sw_if_indices;
837 while (n_trace && n_left)
914 sw_if_index = sw_if_indices + off;
943 i16 save_current_data;
944 u16 save_current_length;
992 if (b0->
flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
1004 u32 error_node_index;
1005 int drop_string_len;
1008 int error_string_len =
1016 drop_string_len = error_string_len +
vec_len (n->
name) + 2;
1019 while (last->
flags & VLIB_BUFFER_NEXT_PRESENT)
1028 - drop_string_len)))
1041 b0->
flags &= ~(VLIB_BUFFER_TOTAL_LENGTH_VALID);
1063 #ifndef CLIB_MARCH_VARIANT 1101 .name =
"error-drop",
1102 .vector_size =
sizeof (
u32),
1114 .name =
"error-punt",
1115 .vector_size =
sizeof (
u32),
1127 .name =
"interface-output",
1128 .vector_size =
sizeof (
u32),
1137 u32 last_sw_if_index = ~0;
1140 u32 *from, *to_next = 0;
1145 while (n_left_from > 0)
1157 if (
PREDICT_FALSE ((last_sw_if_index != sw_if_index0) || to_frame == 0))
1164 last_sw_if_index = sw_if_index0;
1181 .name =
"interface-tx",
1182 .vector_size =
sizeof (
u32),
1191 .arc_name =
"interface-output",
1193 .last_in_arc =
"interface-tx",
1198 .arc_name =
"interface-output",
1199 .node_name =
"span-output",
1204 .arc_name =
"interface-output",
1205 .node_name =
"ipsec-if-output",
1210 .arc_name =
"interface-output",
1211 .node_name =
"interface-tx",
1216 #ifndef CLIB_MARCH_VARIANT 1229 (vnm->
vlib_main, vnet_per_buffer_interface_output_node.index,
1241 u32 hw_if_index,
u32 node_index)
1246 (vnm->
vlib_main, vnet_per_buffer_interface_output_node.index, node_index);
u8 * format_vnet_interface_output_trace(u8 *s, va_list *va)
vnet_config_main_t config_main
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
vlib_main_t vlib_global_main
VNET_FEATURE_ARC_INIT(interface_output, static)
#define hash_set(h, key, value)
#define CLIB_MARCH_FN_POINTER(fn)
void vnet_set_interface_output_node(vnet_main_t *vnm, u32 hw_if_index, u32 node_index)
Set interface output node - for interface registered without its output/tx nodes created because its ...
static_always_inline uword vnet_interface_output_node_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, vnet_main_t *vnm, vnet_hw_interface_t *hi, int do_tx_offloads)
static uword CLIB_MULTIARCH_FN() vnet_interface_output_node(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
#define hash_unset(h, key)
vl_api_wireguard_peer_flags_t flags
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
u8 runtime_data[0]
Function dependent node-runtime data.
vnet_main_t * vnet_get_main(void)
static vnet_hw_interface_t * vnet_get_sup_hw_interface(vnet_main_t *vnm, u32 sw_if_index)
vnet_interface_main_t interface_main
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
#define clib_memcpy_fast(a, b, c)
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
#define VLIB_NODE_FLAG_TRACE_SUPPORTED
static vnet_hw_interface_t * vnet_get_hw_interface(vnet_main_t *vnm, u32 hw_if_index)
u16 current_length
Nbytes between current data and the end of this buffer.
static heap_elt_t * last(heap_header_t *h)
clib_error_t * vnet_per_buffer_interface_output_hw_interface_add_del(vnet_main_t *vnm, u32 hw_if_index, u32 is_create)
static void vnet_interface_output_trace(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, uword n_buffers)
#define CLIB_MARCH_FN_POINTER_BY_NAME(fn, name)
#define STRUCT_OFFSET_OF(t, f)
static vnet_sw_interface_t * vnet_get_sw_interface(vnet_main_t *vnm, u32 sw_if_index)
static void vlib_increment_simple_counter(vlib_simple_counter_main_t *cm, u32 thread_index, u32 index, u64 increment)
Increment a simple counter.
static_always_inline int vnet_have_features(u8 arc, u32 sw_if_index)
void vnet_pcap_drop_trace_filter_add_del(u32 error_index, int is_add)
#define VLIB_NODE_FN(node)
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
format_function_t format_vnet_sw_if_index_name
static uword vlib_node_add_next(vlib_main_t *vm, uword node, uword next_node)
vlib_node_function_t * function
#define static_always_inline
uword * pcap_drop_filter_hash
#define vlib_prefetch_buffer_with_index(vm, bi, type)
Prefetch buffer metadata by buffer index The first 64 bytes of buffer contains most header informatio...
vlib_node_function_t * vnet_interface_output_node_get(vlib_main_t *vm)
struct vnet_error_trace_t_ vnet_error_trace_t
description fragment has unexpected format
vnet_hw_interface_flags_t flags
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
u32 filter_classify_table_index
vl_counter_t * counters_heap
vlib_frame_t * vlib_get_frame_to_node(vlib_main_t *vm, u32 to_node_index)
#define vlib_get_new_next_frame(vm, node, next_index, vectors, n_vectors_left)
struct _vlib_node_fn_registration vlib_node_fn_registration_t
A collection of simple counters.
Use the vpp classifier to decide whether to trace packets.
static uword interface_tx_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame)
vlib_error_t error
Error code for buffers to be enqueued to error handler.
format_function_t format_vnet_sw_interface_name
vlib_node_t * vlib_get_node_by_name(vlib_main_t *vm, u8 *name)
static u8 * format_vnet_error_trace(u8 *s, va_list *va)
vlib_error_main_t error_main
static void drop_catchup_trace(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_buffer_t *b)
VNET_FEATURE_INIT(span_tx, static)
static __clib_warn_unused_result int vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
uword vlib_error_drop_buffers(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 next_buffer_stride, u32 n_buffers, u32 next_index, u32 drop_error_node, u32 drop_error_code)
void vlib_put_frame_to_node(vlib_main_t *vm, u32 to_node_index, vlib_frame_t *f)
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
static void * vnet_get_config_data(vnet_config_main_t *cm, u32 *config_index, u32 *next_index, u32 n_data_bytes)
u8 data[128 - 2 *sizeof(u32)]
vlib_simple_counter_main_t * sw_if_counters
format_function_t * format_buffer
u32 node_index
Node index.
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
Finish enqueueing two buffers forward in the graph.
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Finish enqueueing one buffer forward in the graph.
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
static void interface_trace_buffers(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
CLIB_MARCH_FN_REGISTRATION(vnet_interface_output_node)
#define VLIB_REGISTER_NODE(x,...)
static_always_inline void vlib_buffer_enqueue_to_next(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts, uword count)
static void pcap_drop_trace(vlib_main_t *vm, vnet_interface_main_t *im, vnet_pcap_t *pp, vlib_frame_t *f)
#define pool_is_free_index(P, I)
Use free bitmap to query whether given index is free.
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
static int vnet_is_packet_traced_inline(vlib_buffer_t *b, u32 classify_table_index, int func)
vnet_is_packet_traced
vlib_node_fn_registration_t * node_fn_registrations
u32 trace_classify_table_index
vlib_main_t vlib_node_runtime_t * node
#define hash_create(elts, value_bytes)
u32 output_node_next_index
u8 output_feature_arc_index
uword() vlib_node_function_t(struct vlib_main_t *vm, struct vlib_node_runtime_t *node, struct vlib_frame_t *frame)
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
static vlib_node_function_t CLIB_MULTIARCH_FN(vnet_interface_output_node)
static_always_inline void vnet_interface_pcap_tx_trace(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, int sw_if_index_from_buffer)
#define VNET_FEATURES(...)
VNET_HW_INTERFACE_ADD_DEL_FUNCTION(vnet_per_buffer_interface_output_hw_interface_add_del)
#define VLIB_BUFFER_DEFAULT_DATA_SIZE
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
static_always_inline u32 vnet_get_feature_config_index(u8 arc, u32 sw_if_index)
u32 next_buffer
Next buffer for this linked-list of buffers.
vlib_node_main_t node_main
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
VLIB buffer representation.
vnet_sw_interface_t * sw_interfaces
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
static_always_inline void vnet_calc_checksums_inline(vlib_main_t *vm, vlib_buffer_t *b, int is_ip4, int is_ip6)
#define clib_strnlen(s, m)
static_always_inline uword clib_count_equal_u32(u32 *data, uword max_count)
static vlib_node_t * vlib_get_node(vlib_main_t *vm, u32 i)
Get vlib node by index.
u16 flags
Copy of main node flags.
void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
static_always_inline uword interface_drop_punt(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, vnet_error_disposition_t disposition)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define VLIB_NODE_FLAG_TRACE
static_always_inline vnet_feature_config_main_t * vnet_feature_get_config_main(u16 arc)
static void pcap_add_buffer(pcap_main_t *pm, struct vlib_main_t *vm, u32 buffer_index, u32 n_bytes_in_trace)
Add buffer (vlib_buffer_t) to the trace.
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
vl_api_interface_index_t sw_if_index