FD.io VPP  v16.06
Vector Packet Processing
l2_input_acl.c
Go to the documentation of this file.
1 /*
2  * l2_input_acl.c : layer 2 input acl processing
3  *
4  * Copyright (c) 2013 Cisco and/or its affiliates.
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at:
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17 
18 #include <vlib/vlib.h>
19 #include <vnet/vnet.h>
20 #include <vnet/pg/pg.h>
21 #include <vnet/ethernet/ethernet.h>
22 #include <vnet/ethernet/packet.h>
23 #include <vnet/ip/ip_packet.h>
24 #include <vnet/ip/ip4_packet.h>
25 #include <vnet/ip/ip6_packet.h>
26 #include <vlib/cli.h>
27 #include <vnet/l2/l2_input.h>
28 #include <vnet/l2/feat_bitmap.h>
29 
30 #include <vppinfra/error.h>
31 #include <vppinfra/hash.h>
32 #include <vppinfra/cache.h>
33 
36 
37 typedef struct {
38 
39  // Next nodes for each feature
40  u32 feat_next_node_index[32];
41 
42  /* convenience variables */
46 
47 typedef struct {
53 
54 /* packet trace format function */
55 static u8 * format_l2_inacl_trace (u8 * s, va_list * args)
56 {
57  CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
58  CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
59  l2_inacl_trace_t * t = va_arg (*args, l2_inacl_trace_t *);
60 
61  s = format (s, "INACL: sw_if_index %d, next_index %d, table %d, offset %d",
62  t->sw_if_index, t->next_index, t->table_index, t->offset);
63  return s;
64 }
65 
67 
69 
70 #define foreach_l2_inacl_error \
71 _(NONE, "valid input ACL packets") \
72 _(MISS, "input ACL misses") \
73 _(HIT, "input ACL hits") \
74 _(CHAIN_HIT, "input ACL hits after chain walk") \
75 _(TABLE_MISS, "input ACL table-miss drops") \
76 _(SESSION_DENY, "input ACL session deny drops")
77 
78 
79 typedef enum {
80 #define _(sym,str) L2_INACL_ERROR_##sym,
82 #undef _
85 
86 static char * l2_inacl_error_strings[] = {
87 #define _(sym,string) string,
89 #undef _
90 };
91 
92 static uword
94  vlib_node_runtime_t * node,
95  vlib_frame_t * frame)
96 {
97  u32 n_left_from, * from, * to_next;
98  acl_next_index_t next_index;
103  f64 now = vlib_time_now (vm);
104  u32 hits = 0;
105  u32 misses = 0;
106  u32 chain_hits = 0;
107 
108  from = vlib_frame_vector_args (frame);
109  n_left_from = frame->n_vectors; /* number of packets to process */
110  next_index = node->cached_next_index;
111 
112  /* First pass: compute hashes */
113  while (n_left_from > 2)
114  {
115  vlib_buffer_t * b0, * b1;
116  u32 bi0, bi1;
117  u8 * h0, * h1;
118  u32 sw_if_index0, sw_if_index1;
119  u32 table_index0, table_index1;
120  vnet_classify_table_t * t0, * t1;
121 
122  /* prefetch next iteration */
123  {
124  vlib_buffer_t * p1, * p2;
125 
126  p1 = vlib_get_buffer (vm, from[1]);
127  p2 = vlib_get_buffer (vm, from[2]);
128 
129  vlib_prefetch_buffer_header (p1, STORE);
131  vlib_prefetch_buffer_header (p2, STORE);
133  }
134 
135  bi0 = from[0];
136  b0 = vlib_get_buffer (vm, bi0);
137  h0 = b0->data;
138 
139  bi1 = from[1];
140  b1 = vlib_get_buffer (vm, bi1);
141  h1 = b1->data;
142 
143  sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
144  table_index0 = am->classify_table_index_by_sw_if_index[tid][sw_if_index0];
145 
146  sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
147  table_index1 = am->classify_table_index_by_sw_if_index[tid][sw_if_index1];
148 
149  t0 = pool_elt_at_index (vcm->tables, table_index0);
150 
151  t1 = pool_elt_at_index (vcm->tables, table_index1);
152 
153  vnet_buffer(b0)->l2_classify.hash =
154  vnet_classify_hash_packet (t0, (u8 *) h0);
155 
156  vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
157 
158  vnet_buffer(b1)->l2_classify.hash =
159  vnet_classify_hash_packet (t1, (u8 *) h1);
160 
161  vnet_classify_prefetch_bucket (t1, vnet_buffer(b1)->l2_classify.hash);
162 
163  vnet_buffer(b0)->l2_classify.table_index = table_index0;
164 
165  vnet_buffer(b1)->l2_classify.table_index = table_index1;
166 
167  from += 2;
168  n_left_from -= 2;
169  }
170 
171  while (n_left_from > 0)
172  {
173  vlib_buffer_t * b0;
174  u32 bi0;
175  u8 * h0;
176  u32 sw_if_index0;
177  u32 table_index0;
179 
180  bi0 = from[0];
181  b0 = vlib_get_buffer (vm, bi0);
182  h0 = b0->data;
183 
184  sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
185  table_index0 = am->classify_table_index_by_sw_if_index[tid][sw_if_index0];
186 
187  t0 = pool_elt_at_index (vcm->tables, table_index0);
188  vnet_buffer(b0)->l2_classify.hash =
189  vnet_classify_hash_packet (t0, (u8 *) h0);
190 
191  vnet_buffer(b0)->l2_classify.table_index = table_index0;
192  vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
193 
194  from++;
195  n_left_from--;
196  }
197 
198  next_index = node->cached_next_index;
199  from = vlib_frame_vector_args (frame);
200  n_left_from = frame->n_vectors;
201 
202  while (n_left_from > 0)
203  {
204  u32 n_left_to_next;
205 
206  vlib_get_next_frame (vm, node, next_index,
207  to_next, n_left_to_next);
208 
209  /* Not enough load/store slots to dual loop... */
210  while (n_left_from > 0 && n_left_to_next > 0)
211  {
212  u32 bi0;
213  vlib_buffer_t * b0;
214  u32 next0 = ACL_NEXT_INDEX_DENY;
215  u32 table_index0;
217  vnet_classify_entry_t * e0;
218  u64 hash0;
219  u8 * h0;
220  u8 error0;
221 
222  /* Stride 3 seems to work best */
223  if (PREDICT_TRUE (n_left_from > 3))
224  {
225  vlib_buffer_t * p1 = vlib_get_buffer(vm, from[3]);
226  vnet_classify_table_t * tp1;
227  u32 table_index1;
228  u64 phash1;
229 
230  table_index1 = vnet_buffer(p1)->l2_classify.table_index;
231 
232  if (PREDICT_TRUE (table_index1 != ~0))
233  {
234  tp1 = pool_elt_at_index (vcm->tables, table_index1);
235  phash1 = vnet_buffer(p1)->l2_classify.hash;
236  vnet_classify_prefetch_entry (tp1, phash1);
237  }
238  }
239 
240  /* speculatively enqueue b0 to the current next frame */
241  bi0 = from[0];
242  to_next[0] = bi0;
243  from += 1;
244  to_next += 1;
245  n_left_from -= 1;
246  n_left_to_next -= 1;
247 
248  b0 = vlib_get_buffer (vm, bi0);
249  h0 = b0->data;
250  table_index0 = vnet_buffer(b0)->l2_classify.table_index;
251  e0 = 0;
252  t0 = 0;
253 
254  /* Feature bitmap update */
255  vnet_buffer(b0)->l2.feature_bitmap &= ~L2INPUT_FEAT_ACL;
256 
257  vnet_buffer(b0)->l2_classify.opaque_index = ~0;
258  /* Determine the next node */
260  vnet_buffer(b0)->l2.feature_bitmap);
261 
262  if (PREDICT_TRUE(table_index0 != ~0))
263  {
264  hash0 = vnet_buffer(b0)->l2_classify.hash;
265  t0 = pool_elt_at_index (vcm->tables, table_index0);
266 
267  e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0,
268  now);
269  if (e0)
270  {
271  vnet_buffer(b0)->l2_classify.opaque_index
272  = e0->opaque_index;
273  vlib_buffer_advance (b0, e0->advance);
274 
275  next0 = (e0->next_index < ACL_NEXT_INDEX_N_NEXT)?
276  e0->next_index:next0;
277 
278  hits++;
279 
280  error0 = (next0 == ACL_NEXT_INDEX_DENY)?
281  L2_INACL_ERROR_SESSION_DENY:L2_INACL_ERROR_NONE;
282  b0->error = node->errors[error0];
283  }
284  else
285  {
286  while (1)
287  {
288  if (PREDICT_TRUE(t0->next_table_index != ~0))
289  t0 = pool_elt_at_index (vcm->tables,
290  t0->next_table_index);
291  else
292  {
293  next0 = (t0->miss_next_index < ACL_NEXT_INDEX_N_NEXT)?
294  t0->miss_next_index:next0;
295 
296  misses++;
297 
298  error0 = (next0 == ACL_NEXT_INDEX_DENY)?
299  L2_INACL_ERROR_TABLE_MISS:L2_INACL_ERROR_NONE;
300  b0->error = node->errors[error0];
301  break;
302  }
303 
304  hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
306  (t0, (u8 *) h0, hash0, now);
307  if (e0)
308  {
309  vlib_buffer_advance (b0, e0->advance);
310  next0 = (e0->next_index < ACL_NEXT_INDEX_N_NEXT)?
311  e0->next_index:next0;
312  hits++;
313  chain_hits++;
314 
315  error0 = (next0 == ACL_NEXT_INDEX_DENY)?
316  L2_INACL_ERROR_SESSION_DENY:L2_INACL_ERROR_NONE;
317  b0->error = node->errors[error0];
318  break;
319  }
320  }
321  }
322  }
323 
325  && (b0->flags & VLIB_BUFFER_IS_TRACED)))
326  {
327  l2_inacl_trace_t *t =
328  vlib_add_trace (vm, node, b0, sizeof (*t));
329  t->sw_if_index = vnet_buffer(b0)->sw_if_index[VLIB_RX];
330  t->next_index = next0;
331  t->table_index = t0 ? t0 - vcm->tables : ~0;
332  t->offset = e0 ? vnet_classify_get_offset (t0, e0): ~0;
333  }
334 
335  /* verify speculative enqueue, maybe switch current next frame */
336  vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
337  to_next, n_left_to_next,
338  bi0, next0);
339  }
340 
341  vlib_put_next_frame (vm, node, next_index, n_left_to_next);
342  }
343 
345  L2_INACL_ERROR_MISS,
346  misses);
348  L2_INACL_ERROR_HIT,
349  hits);
351  L2_INACL_ERROR_CHAIN_HIT,
352  chain_hits);
353  return frame->n_vectors;
354 }
355 
357  .function = l2_inacl_node_fn,
358  .name = "l2-input-acl",
359  .vector_size = sizeof (u32),
360  .format_trace = format_l2_inacl_trace,
362 
363  .n_errors = ARRAY_LEN(l2_inacl_error_strings),
364  .error_strings = l2_inacl_error_strings,
365 
366  .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
367 
368  /* edit / add dispositions here */
369  .next_nodes = {
370  [ACL_NEXT_INDEX_DENY] = "error-drop",
371  },
372 };
373 
375 {
377 
378  mp->vlib_main = vm;
379  mp->vnet_main = vnet_get_main();
380 
381  // Initialize the feature next-node indexes
383  l2_inacl_node.index,
387 
388  return 0;
389 }
390 
u64 vnet_classify_hash_packet(vnet_classify_table_t *t, u8 *h)
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Definition: main.c:459
acl_next_index_t
Definition: input_acl.h:30
#define CLIB_UNUSED(x)
Definition: clib.h:79
bad routing header type(not 4)") sr_error (NO_MORE_SEGMENTS
u32 * classify_table_index_by_sw_if_index[INPUT_ACL_N_TABLES]
Definition: input_acl.h:38
#define PREDICT_TRUE(x)
Definition: clib.h:98
struct _vlib_node_registration vlib_node_registration_t
static u8 * format_l2_inacl_trace(u8 *s, va_list *args)
Definition: l2_input_acl.c:55
vlib_main_t * vlib_main
Definition: l2_input_acl.c:43
vlib_error_t * errors
Definition: node.h:378
vnet_classify_main_t * vnet_classify_main
Definition: input_acl.h:43
vnet_main_t * vnet_get_main(void)
Definition: misc.c:45
l2_inacl_main_t l2_inacl_main
Definition: l2_input_acl.c:66
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:109
static char * l2_inacl_error_strings[]
Definition: l2_input_acl.c:86
unsigned long u64
Definition: types.h:89
static void vnet_classify_prefetch_bucket(vnet_classify_table_t *t, u64 hash)
clib_error_t * l2_inacl_init(vlib_main_t *vm)
Definition: l2_input_acl.c:374
always_inline void * vlib_frame_vector_args(vlib_frame_t *f)
Definition: node_funcs.h:202
static void vnet_classify_prefetch_entry(vnet_classify_table_t *t, u64 hash)
#define pool_elt_at_index(p, i)
Definition: pool.h:346
#define foreach_l2_inacl_error
Definition: l2_input_acl.c:70
#define PREDICT_FALSE(x)
Definition: clib.h:97
always_inline void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
Definition: node_funcs.h:970
l2_inacl_error_t
Definition: l2_input_acl.c:79
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Definition: buffer_node.h:83
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Definition: node_funcs.h:265
vlib_error_t error
Error code for buffers to be enqueued to error handler.
Definition: buffer.h:129
static uword vnet_classify_get_offset(vnet_classify_table_t *t, vnet_classify_entry_t *v)
u32 feat_next_node_index[32]
Definition: l2_input_acl.c:40
u16 n_vectors
Definition: node.h:307
#define CLIB_PREFETCH(addr, size, type)
Definition: cache.h:82
#define ARRAY_LEN(x)
Definition: clib.h:59
char ** l2input_get_feat_names(void)
Definition: l2_input.c:45
struct _vnet_classify_main vnet_classify_main_t
Definition: vnet_classify.h:50
input_acl_main_t input_acl_main
Definition: input_acl.c:19
u16 cached_next_index
Definition: node.h:422
input_acl_table_id_t
Definition: input_acl.h:23
unsigned int u32
Definition: types.h:88
#define vnet_buffer(b)
Definition: buffer.h:300
u8 * format(u8 *s, char *fmt,...)
Definition: format.c:405
#define VLIB_NODE_FLAG_TRACE
Definition: node.h:225
#define VLIB_BUFFER_IS_TRACED
Definition: buffer.h:91
u64 uword
Definition: types.h:112
double f64
Definition: types.h:140
always_inline void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
Definition: buffer.h:197
unsigned char u8
Definition: types.h:56
vnet_main_t * vnet_main
Definition: l2_input_acl.c:44
always_inline u32 feat_bitmap_get_next_node_index(u32 *next_nodes, u32 bitmap)
Definition: feat_bitmap.h:71
static uword l2_inacl_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: l2_input_acl.c:93
always_inline void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
Definition: trace_funcs.h:55
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
Definition: buffer.h:162
#define VLIB_REGISTER_NODE(x,...)
Definition: node.h:140
u8 data[0]
Packet data.
Definition: buffer.h:150
always_inline f64 vlib_time_now(vlib_main_t *vm)
Definition: main.h:182
static vlib_node_registration_t l2_inacl_node
(constructor) VLIB_REGISTER_NODE (l2_inacl_node)
Definition: l2_input_acl.c:68
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:67
u32 flags
buffer flags: VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:84
always_inline vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:69
vnet_classify_entry_t * vnet_classify_find_entry(vnet_classify_table_t *t, u8 *h, u64 hash, f64 now)
always_inline void feat_bitmap_init_next_nodes(vlib_main_t *vm, u32 node_index, u32 num_features, char **feat_names, u32 *next_nodes)
Definition: feat_bitmap.h:41
Definition: defs.h:45