40 u32 feat_next_node_index[32];
61 s =
format (s,
"INACL: sw_if_index %d, next_index %d, table %d, offset %d",
70 #define foreach_l2_inacl_error \ 71 _(NONE, "valid input ACL packets") \ 72 _(MISS, "input ACL misses") \ 73 _(HIT, "input ACL hits") \ 74 _(CHAIN_HIT, "input ACL hits after chain walk") \ 75 _(TABLE_MISS, "input ACL table-miss drops") \ 76 _(SESSION_DENY, "input ACL session deny drops") 80 #define _(sym,str) L2_INACL_ERROR_##sym, 87 #define _(sym,string) string, 97 u32 n_left_from, * from, * to_next;
113 while (n_left_from > 2)
118 u32 sw_if_index0, sw_if_index1;
119 u32 table_index0, table_index1;
163 vnet_buffer(b0)->l2_classify.table_index = table_index0;
165 vnet_buffer(b1)->l2_classify.table_index = table_index1;
171 while (n_left_from > 0)
191 vnet_buffer(b0)->l2_classify.table_index = table_index0;
202 while (n_left_from > 0)
207 to_next, n_left_to_next);
210 while (n_left_from > 0 && n_left_to_next > 0)
217 vnet_classify_entry_t * e0;
230 table_index1 =
vnet_buffer(p1)->l2_classify.table_index;
250 table_index0 =
vnet_buffer(b0)->l2_classify.table_index;
255 vnet_buffer(b0)->l2.feature_bitmap &= ~L2INPUT_FEAT_ACL;
276 e0->next_index:next0;
281 L2_INACL_ERROR_SESSION_DENY:L2_INACL_ERROR_NONE;
299 L2_INACL_ERROR_TABLE_MISS:L2_INACL_ERROR_NONE;
306 (t0, (
u8 *) h0, hash0, now);
311 e0->next_index:next0;
316 L2_INACL_ERROR_SESSION_DENY:L2_INACL_ERROR_NONE;
337 to_next, n_left_to_next,
351 L2_INACL_ERROR_CHAIN_HIT,
358 .name =
"l2-input-acl",
359 .vector_size =
sizeof (
u32),
u64 vnet_classify_hash_packet(vnet_classify_table_t *t, u8 *h)
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
bad routing header type(not 4)") sr_error (NO_MORE_SEGMENTS
u32 * classify_table_index_by_sw_if_index[INPUT_ACL_N_TABLES]
struct _vlib_node_registration vlib_node_registration_t
vnet_classify_main_t * vnet_classify_main
vnet_main_t * vnet_get_main(void)
#define VLIB_INIT_FUNCTION(x)
static void vnet_classify_prefetch_bucket(vnet_classify_table_t *t, u64 hash)
always_inline void * vlib_frame_vector_args(vlib_frame_t *f)
static void vnet_classify_prefetch_entry(vnet_classify_table_t *t, u64 hash)
#define pool_elt_at_index(p, i)
always_inline void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
vlib_error_t error
Error code for buffers to be enqueued to error handler.
static uword vnet_classify_get_offset(vnet_classify_table_t *t, vnet_classify_entry_t *v)
u32 feat_next_node_index[32]
#define CLIB_PREFETCH(addr, size, type)
struct _vnet_classify_main vnet_classify_main_t
#define VLIB_NODE_FLAG_TRACE
#define VLIB_BUFFER_IS_TRACED
always_inline void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
always_inline u32 feat_bitmap_get_next_node_index(u32 *next_nodes, u32 bitmap)
always_inline void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define VLIB_REGISTER_NODE(x,...)
always_inline f64 vlib_time_now(vlib_main_t *vm)
#define CLIB_CACHE_LINE_BYTES
u32 flags
buffer flags: VLIB_BUFFER_IS_TRACED: trace this buffer.
always_inline vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
vnet_classify_entry_t * vnet_classify_find_entry(vnet_classify_table_t *t, u8 *h, u64 hash, f64 now)
always_inline void feat_bitmap_init_next_nodes(vlib_main_t *vm, u32 node_index, u32 num_features, char **feat_names, u32 *next_nodes)