21 #define foreach_l2_emulation \ 22 _(IP4, "Extract IPv4") \ 23 _(IP6, "Extract IPv6") 27 #define _(sym,str) L2_EMULATION_ERROR_##sym, 34 #define _(sym,string) string, 41 #define _(sym,str) L2_EMULATION_NEXT_##sym, 65 u32 n_left_from, *from, *to_next;
71 n_left_from =
frame->n_vectors;
74 while (n_left_from > 0)
79 while (n_left_from >= 4 && n_left_to_next >= 2)
82 u32 sw_if_index0, sw_if_index1;
83 u16 ether_type0, ether_type1;
84 u32 next0 = ~0, next1 = ~0;
89 bi0 = to_next[0] = from[0];
90 bi1 = to_next[1] = from[1];
105 ether_type0 = clib_net_to_host_u16 (*(
u16 *) (h0 + l2_len0 - 2));
106 ether_type1 = clib_net_to_host_u16 (*(
u16 *) (h1 + l2_len1 - 2));
117 case ETHERNET_TYPE_IP4:
120 next0 = L2_EMULATION_NEXT_IP4;
123 case ETHERNET_TYPE_IP6:
126 next0 = L2_EMULATION_NEXT_IP6;
136 case ETHERNET_TYPE_IP4:
139 next1 = L2_EMULATION_NEXT_IP4;
142 case ETHERNET_TYPE_IP6:
145 next1 = L2_EMULATION_NEXT_IP6;
152 && (b0->
flags & VLIB_BUFFER_IS_TRACED)))
159 && (b1->
flags & VLIB_BUFFER_IS_TRACED)))
169 L2INPUT_FEAT_L2_EMULATION);
174 L2INPUT_FEAT_L2_EMULATION);
177 to_next, n_left_to_next,
178 bi0, bi1, next0, next1);
180 while (n_left_from > 0 && n_left_to_next > 0)
201 ether_type0 = clib_net_to_host_u16 (*(
u16 *) (h0 + l2_len0 - 2));
211 case ETHERNET_TYPE_IP4:
214 next0 = L2_EMULATION_NEXT_IP4;
217 case ETHERNET_TYPE_IP6:
220 next0 = L2_EMULATION_NEXT_IP6;
228 && (b0->
flags & VLIB_BUFFER_IS_TRACED)))
238 L2INPUT_FEAT_L2_EMULATION);
242 to_next, n_left_to_next,
250 L2_EMULATION_ERROR_IP4, ip4_hits);
252 L2_EMULATION_ERROR_IP6, ip6_hits);
254 return frame->n_vectors;
259 .name =
"l2-emulation",
260 .vector_size =
sizeof (
u32),
271 [L2_EMULATION_NEXT_IP4] =
"ip4-input",
272 [L2_EMULATION_NEXT_IP6] =
"ip6-input",
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static u8 * format_l2_emulation_trace(u8 *s, va_list *args)
Grouping of global data for the L2 emulation feature.
u32 l2_input_feat_next[32]
Next nodes for L2 output features.
#define VLIB_NODE_FN(node)
static u32 vnet_l2_feature_next(vlib_buffer_t *b, u32 *next_nodes, u32 feat_bit)
Return the graph node index for the feature corresponding to the next set bit after clearing the curr...
static char * l2_emulation_error_strings[]
vl_api_fib_path_type_t type
l2_emulation_main_t l2_emulation_main
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
Finish enqueueing two buffers forward in the graph.
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Finish enqueueing one buffer forward in the graph.
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
static void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
#define VLIB_REGISTER_NODE(x,...)
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
vlib_main_t vlib_node_runtime_t * node
u8 enabled
Enabled or Disabled.
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
vlib_node_registration_t l2_emulation_node
(constructor) VLIB_REGISTER_NODE (l2_emulation_node)
VLIB buffer representation.
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
#define foreach_l2_emulation
l2_emulation_t * l2_emulations
Per-interface vector of emulation configs.
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
#define VLIB_NODE_FLAG_TRACE
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.