41 u32 n_left_from, next_index, *from, *to_next;
49 while (n_left_from > 0)
55 while (n_left_from >= 4 && n_left_to_next >= 2)
59 u32 next0 = NSH_NODE_NEXT_DROP, next1 = NSH_NODE_NEXT_DROP;
60 uword * entry0, *entry1;
61 nsh_base_header_t * hdr0 = 0, *hdr1 = 0;
62 u32 header_len0 = 0, header_len1 = 0;
63 u32 nsp_nsi0, nsp_nsi1;
96 nsp_nsi0 = hdr0->nsp_nsi;
97 header_len0 = hdr0->length * 4;
100 nsp_nsi1 = hdr1->nsp_nsi;
101 header_len1 = hdr1->length * 4;
107 error0 = NSH_NODE_ERROR_NO_MAPPING;
115 error0 = NSH_NODE_ERROR_NO_MAPPING;
130 error0 = NSH_NODE_ERROR_INVALID_OPTIONS;
144 error0 = NSH_NODE_ERROR_NO_ENTRY;
148 trace0: b0->
error = error0 ? node->
errors[error0] : 0;
160 error1 = NSH_NODE_ERROR_NO_MAPPING;
168 error1 = NSH_NODE_ERROR_NO_MAPPING;
173 next1 = map1->next_node;
183 error1 = NSH_NODE_ERROR_INVALID_OPTIONS;
197 error1 = NSH_NODE_ERROR_NO_ENTRY;
202 trace1: b1->
error = error1 ? node->
errors[error1] : 0;
211 n_left_to_next, bi0, bi1, next0, next1);
215 while (n_left_from > 0 && n_left_to_next > 0)
219 u32 next0 = NSH_NODE_NEXT_DROP;
221 nsh_base_header_t * hdr0 = 0;
238 nsp_nsi0 = hdr0->nsp_nsi;
239 header_len0 = hdr0->length * 4;
245 error0 = NSH_NODE_ERROR_NO_MAPPING;
254 error0 = NSH_NODE_ERROR_NO_MAPPING;
269 error0 = NSH_NODE_ERROR_INVALID_OPTIONS;
283 error0 = NSH_NODE_ERROR_NO_ENTRY;
287 trace00: b0->
error = error0 ? node->
errors[error0] : 0;
296 n_left_to_next, bi0, next0);
324 #define _(sym,string) string, 332 .vector_size =
sizeof (
u32),
343 #define _(s,n) [NSH_NODE_NEXT_##s] = n, u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
#define clib_memcpy_fast(a, b, c)
#define foreach_nsh_node_next
static char * nsh_pop_node_error_strings[]
#define VLIB_NODE_FN(node)
u8 * format_nsh_pop_node_map_trace(u8 *s, va_list *args)
vlib_error_t * errors
Vector of errors for this node.
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
vlib_node_registration_t nsh_pop_node
(constructor) VLIB_REGISTER_NODE (nsh_pop_node)
vl_api_fib_path_type_t type
vlib_error_t error
Error code for buffers to be enqueued to error handler.
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
Finish enqueueing two buffers forward in the graph.
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Finish enqueueing one buffer forward in the graph.
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
#define VLIB_REGISTER_NODE(x,...)
#define CLIB_PREFETCH(addr, size, type)
u8 * format_nsh_header(u8 *s, va_list *args)
u8 * format_nsh_pop_header(u8 *s, va_list *args)
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
#define foreach_nsh_node_error
u32 mapped_nsp_nsi
Key for nsh_header_t entry to map to.
u16 cached_next_index
Next frame index that vector arguments were last enqueued to last time this node ran.
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
u8 * format_nsh_node_map_trace(u8 *s, va_list *args)
VLIB buffer representation.
static uword nsh_pop_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame)
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
nsh_option_map_t * nsh_md2_lookup_option(u16 class, u8 type)
#define hash_get_mem(h, key)
#define CLIB_CACHE_LINE_BYTES
uword * nsh_mapping_by_key
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.