48 s =
format (s,
"IP4_HOP_BY_HOP: next index %d",
55 #define foreach_ip4_hop_by_hop_error \ 56 _(PROCESSED, "Pkts with ip4 hop-by-hop options") 59 #define _(sym,str) IP4_HOP_BY_HOP_ERROR_##sym, 66 #define _(sym,string) string, 78 u32 n_left_from, * from, * to_next;
86 while (n_left_from > 0)
91 to_next, n_left_to_next);
94 while (n_left_from >= 4 && n_left_to_next >= 2)
96 u32 next0 = IP4_HOP_BY_HOP_NEXT_INTERFACE_OUTPUT;
97 u32 next1 = IP4_HOP_BY_HOP_NEXT_INTERFACE_OUTPUT;
98 u32 sw_if_index0, sw_if_index1;
119 to_next[0] = bi0 = from[0];
120 to_next[1] = bi1 = from[1];
147 t->sw_if_index = sw_if_index0;
154 t->sw_if_index = sw_if_index1;
161 to_next, n_left_to_next,
162 bi0, bi1, next0, next1);
166 while (n_left_from > 0 && n_left_to_next > 0)
209 to_next, n_left_to_next,
217 IP4_HOP_BY_HOP_ERROR_PROCESSED, processed);
223 .name =
"ip4-hop-by-hop",
224 .vector_size =
sizeof (
u32),
238 .name =
"ip4-add-hop-by-hop",
239 .vector_size =
sizeof (
u32),
253 .name =
"ip4-pop-hop-by-hop",
254 .vector_size =
sizeof (
u32),
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
bad routing header type(not 4)") sr_error (NO_MORE_SEGMENTS
ip_lookup_next_t lookup_next_index
static uword ip4_hop_by_hop_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
vlib_node_registration_t ip4_add_hop_by_hop_node
(constructor) VLIB_REGISTER_NODE (ip4_add_hop_by_hop_node)
ip4_hop_by_hop_main_t ip4_hop_by_hop_main
struct _vlib_node_registration vlib_node_registration_t
ip_lookup_main_t lookup_main
vlib_node_registration_t ip4_pop_hop_by_hop_node
(constructor) VLIB_REGISTER_NODE (ip4_pop_hop_by_hop_node)
always_inline void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
vnet_main_t * vnet_get_main(void)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
#define VLIB_INIT_FUNCTION(x)
ip6_hop_by_hop_main_t * hm
#define IP4_LOOKUP_NEXT_NODES
static u8 * format_ip4_hop_by_hop_trace(u8 *s, va_list *args)
always_inline void * vlib_frame_vector_args(vlib_frame_t *f)
static char * ip4_hop_by_hop_error_strings[]
always_inline void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
static clib_error_t * ip4_hop_by_hop_init(vlib_main_t *vm)
#define CLIB_PREFETCH(addr, size, type)
#define foreach_ip4_hop_by_hop_error
#define VLIB_NODE_FLAG_TRACE
vlib_node_registration_t ip4_hop_by_hop_node
(constructor) VLIB_REGISTER_NODE (ip4_hop_by_hop_node)
#define VLIB_BUFFER_IS_TRACED
always_inline ip_adjacency_t * ip_get_adjacency(ip_lookup_main_t *lm, u32 adj_index)
always_inline void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define VLIB_REGISTER_NODE(x,...)
#define CLIB_CACHE_LINE_BYTES
u32 flags
buffer flags: VLIB_BUFFER_IS_TRACED: trace this buffer.
always_inline vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.