FD.io VPP  v16.06
Vector Packet Processing
l2_rw.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <vlib/vlib.h>
17 #include <vnet/l2/feat_bitmap.h>
18 #include <vnet/l2/l2_rw.h>
19 
21 
23 
24 typedef struct {
29 
30 static u8 *format_l2_rw_entry (u8 * s, va_list *args)
31 {
32  l2_rw_entry_t *e = va_arg (*args, l2_rw_entry_t *);
33  l2_rw_main_t *rw = &l2_rw_main;
34  s = format (s, "%d - mask:%U value:%U\n",
35  e - rw->entries,
36  format_hex_bytes, e->mask, e->rewrite_n_vectors * sizeof(u32x4),
37  format_hex_bytes, e->value, e->rewrite_n_vectors * sizeof(u32x4));
38  s = format (s, " hits:%d skip_bytes:%d",
39  e->hit_count, e->skip_n_vectors * sizeof(u32x4));
40  return s;
41 }
42 
43 static u8 *format_l2_rw_config (u8 * s, va_list *args)
44 {
45  l2_rw_config_t *c = va_arg (*args, l2_rw_config_t *);
46  return format(s, "table-index:%d miss-index:%d",
47  c->table_index,
48  c->miss_index);
49 }
50 
51 /* packet trace format function */
52 static u8 *format_l2_rw_trace (u8 * s, va_list * args)
53 {
54  CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
55  CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
56  l2_rw_trace_t * t = va_arg (*args, l2_rw_trace_t *);
57  return format (s, "l2-rw: sw_if_index %d, table %d, entry %d",
60 }
61 
62 always_inline l2_rw_config_t *l2_rw_get_config(u32 sw_if_index)
63 {
64  l2_rw_main_t *rw = &l2_rw_main;
65  if (PREDICT_FALSE(!clib_bitmap_get(rw->configs_bitmap, sw_if_index))) {
66  vec_validate(rw->configs, sw_if_index);
67  rw->configs[sw_if_index].table_index = ~0;
68  rw->configs[sw_if_index].miss_index = ~0;
69  rw->configs_bitmap = clib_bitmap_set(rw->configs_bitmap, sw_if_index, 1);
70  }
71  return &rw->configs[sw_if_index];
72 }
73 
74 static_always_inline void l2_rw_rewrite(l2_rw_entry_t *rwe, u8 *h)
75 {
76  if (U32X4_ALIGNED(h)) {
77  u32x4 *d = ((u32x4 *) h) + rwe->skip_n_vectors;
78  switch(rwe->rewrite_n_vectors) {
79  case 5:
80  d[4] = (d[4] & ~rwe->mask[4]) | rwe->value[4];
81  /* FALLTHROUGH */
82  case 4:
83  d[3] = (d[3] & ~rwe->mask[3]) | rwe->value[3];
84  /* FALLTHROUGH */
85  case 3:
86  d[2] = (d[2] & ~rwe->mask[2]) | rwe->value[2];
87  /* FALLTHROUGH */
88  case 2:
89  d[1] = (d[1] & ~rwe->mask[1]) | rwe->value[1];
90  /* FALLTHROUGH */
91  case 1:
92  d[0] = (d[0] & ~rwe->mask[0]) | rwe->value[0];
93  break;
94  default:
95  abort();
96  }
97  } else {
98  u64 *d = ((u64 *) h) + rwe->skip_n_vectors * 2;
99  switch(rwe->rewrite_n_vectors) {
100  case 5:
101  d[8] = (d[8] & ~(((u64*)rwe->mask)[8])) | (((u64*)rwe->value)[8]);
102  d[9] = (d[9] & ~(((u64*)rwe->mask)[9])) | (((u64*)rwe->value)[9]);
103  /* FALLTHROUGH */
104  case 4:
105  d[6] = (d[6] & ~(((u64*)rwe->mask)[6])) | (((u64*)rwe->value)[6]);
106  d[7] = (d[7] & ~(((u64*)rwe->mask)[7])) | (((u64*)rwe->value)[7]);
107  /* FALLTHROUGH */
108  case 3:
109  d[4] = (d[4] & ~(((u64*)rwe->mask)[4])) | (((u64*)rwe->value)[4]);
110  d[5] = (d[5] & ~(((u64*)rwe->mask)[5])) | (((u64*)rwe->value)[5]);
111  /* FALLTHROUGH */
112  case 2:
113  d[2] = (d[2] & ~(((u64*)rwe->mask)[2])) | (((u64*)rwe->value)[2]);
114  d[3] = (d[3] & ~(((u64*)rwe->mask)[3])) | (((u64*)rwe->value)[3]);
115  /* FALLTHROUGH */
116  case 1:
117  d[0] = (d[0] & ~(((u64*)rwe->mask)[0])) | (((u64*)rwe->value)[0]);
118  d[1] = (d[1] & ~(((u64*)rwe->mask)[1])) | (((u64*)rwe->value)[1]);
119  break;
120  default:
121  abort();
122  }
123  }
124 }
125 
126 static uword
128  vlib_node_runtime_t * node,
129  vlib_frame_t * frame)
130 {
131  l2_rw_main_t *rw = &l2_rw_main;
132  u32 n_left_from, * from, * to_next, next_index;
133  //vlib_node_t *n = vlib_get_node(vm, node->node_index);
136  u32 prefetch_size = 0;
137 
138  from = vlib_frame_vector_args (frame);
139  n_left_from = frame->n_vectors; /* number of packets to process */
140  next_index = node->cached_next_index;
141 
142  while (n_left_from > 0)
143  {
144  u32 n_left_to_next;
145 
146  /* get space to enqueue frame to graph node "next_index" */
147  vlib_get_next_frame (vm, node, next_index,
148  to_next, n_left_to_next);
149 
150  while (n_left_from >= 4 && n_left_to_next >= 2)
151  {
152  u32 bi0, next0, sw_if_index0, feature_bitmap0, rwe_index0;
153  u32 bi1, next1, sw_if_index1, feature_bitmap1, rwe_index1;
154  vlib_buffer_t *b0, *b1;
155  ethernet_header_t *h0, *h1;
156  l2_rw_config_t *config0, *config1;
157  u64 hash0, hash1;
158  vnet_classify_table_t *t0, *t1;
159  vnet_classify_entry_t *e0, *e1;
160  l2_rw_entry_t *rwe0, *rwe1;
161 
162  {
163  vlib_buffer_t * p2, * p3;
164  p2 = vlib_get_buffer (vm, from[2]);
165  p3 = vlib_get_buffer (vm, from[3]);
166 
167  vlib_prefetch_buffer_header (p2, LOAD);
168  vlib_prefetch_buffer_header (p3, LOAD);
169  CLIB_PREFETCH (p2->data, prefetch_size, LOAD);
170  CLIB_PREFETCH (p3->data, prefetch_size, LOAD);
171  }
172 
173  bi0 = from[0];
174  bi1 = from[1];
175  to_next[0] = bi0;
176  to_next[1] = bi1;
177  from += 2;
178  to_next += 2;
179  n_left_from -= 2;
180  n_left_to_next -= 2;
181 
182  b0 = vlib_get_buffer(vm, bi0);
183  b1 = vlib_get_buffer(vm, bi1);
184  h0 = vlib_buffer_get_current(b0);
185  h1 = vlib_buffer_get_current(b1);
186 
187  sw_if_index0 = vnet_buffer(b0)->sw_if_index[VLIB_RX];
188  sw_if_index1 = vnet_buffer(b1)->sw_if_index[VLIB_RX];
189  config0 = l2_rw_get_config(sw_if_index0); //TODO: check sw_if_index0 value
190  config1 = l2_rw_get_config(sw_if_index1); //TODO: check sw_if_index0 value
191  t0 = pool_elt_at_index(vcm->tables, config0->table_index);
192  t1 = pool_elt_at_index(vcm->tables, config1->table_index);
193  prefetch_size = (t1->skip_n_vectors + t1->match_n_vectors)*sizeof(u32x4);
194 
195  hash0 = vnet_classify_hash_packet(t0, (u8 *)h0);
196  hash1 = vnet_classify_hash_packet(t1, (u8 *)h1);
197  e0 = vnet_classify_find_entry(t0, (u8 *) h0, hash0, now);
198  e1 = vnet_classify_find_entry(t1, (u8 *) h1, hash1, now);
199 
200  while (!e0 && (t0->next_table_index != ~0)) {
201  t0 = pool_elt_at_index(vcm->tables, t0->next_table_index);
202  hash0 = vnet_classify_hash_packet(t0, (u8 *)h0);
203  e0 = vnet_classify_find_entry(t0, (u8 *) h0, hash0, now);
204  }
205 
206  while (!e1 && (t1->next_table_index != ~0)) {
207  t1 = pool_elt_at_index(vcm->tables, t1->next_table_index);
208  hash1 = vnet_classify_hash_packet(t1, (u8 *)h1);
209  e1 = vnet_classify_find_entry(t1, (u8 *) h1, hash1, now);
210  }
211 
212  rwe_index0 = e0?e0->opaque_index:config0->miss_index;
213  rwe_index1 = e1?e1->opaque_index:config1->miss_index;
214 
215  if (rwe_index0 != ~0) {
216  rwe0 = pool_elt_at_index(rw->entries, rwe_index0);
217  l2_rw_rewrite(rwe0, (u8 *)h0);
218  }
219  if (rwe_index1 != ~0) {
220  rwe1 = pool_elt_at_index(rw->entries, rwe_index1);
221  l2_rw_rewrite(rwe1, (u8 *)h1);
222  }
223 
225  l2_rw_trace_t *t =
226  vlib_add_trace (vm, node, b0, sizeof (*t));
227  t->sw_if_index = sw_if_index0;
228  t->classify_table_index = config0->table_index;
229  t->rewrite_entry_index = rwe_index0;
230  }
231 
233  l2_rw_trace_t *t =
234  vlib_add_trace (vm, node, b1, sizeof (*t));
235  t->sw_if_index = sw_if_index1;
236  t->classify_table_index = config1->table_index;
237  t->rewrite_entry_index = rwe_index1;
238  }
239 
240  // Update feature bitmap and get next feature index
241  feature_bitmap0 = vnet_buffer(b0)->l2.feature_bitmap & ~L2INPUT_FEAT_RW;
242  feature_bitmap1 = vnet_buffer(b1)->l2.feature_bitmap & ~L2INPUT_FEAT_RW;
243  vnet_buffer(b0)->l2.feature_bitmap = feature_bitmap0;
244  vnet_buffer(b1)->l2.feature_bitmap = feature_bitmap1;
246  feature_bitmap0);
248  feature_bitmap1);
249 
250  vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
251  to_next, n_left_to_next,
252  bi0, bi1, next0, next1);
253  }
254 
255  while (n_left_from > 0 && n_left_to_next > 0)
256  {
257  u32 bi0, next0, sw_if_index0, feature_bitmap0, rwe_index0;
258  vlib_buffer_t *b0;
259  ethernet_header_t *h0;
260  l2_rw_config_t *config0;
261  u64 hash0;
263  vnet_classify_entry_t *e0;
264  l2_rw_entry_t *rwe0;
265 
266  bi0 = from[0];
267  to_next[0] = bi0;
268  from += 1;
269  to_next += 1;
270  n_left_from -= 1;
271  n_left_to_next -= 1;
272 
273  b0 = vlib_get_buffer(vm, bi0);
274  h0 = vlib_buffer_get_current(b0);
275 
276  sw_if_index0 = vnet_buffer(b0)->sw_if_index[VLIB_RX];
277  config0 = l2_rw_get_config(sw_if_index0); //TODO: check sw_if_index0 value
278  t0 = pool_elt_at_index(vcm->tables, config0->table_index);
279 
280  hash0 = vnet_classify_hash_packet(t0, (u8 *)h0);
281  e0 = vnet_classify_find_entry(t0, (u8 *) h0, hash0, now);
282 
283  while (!e0 && (t0->next_table_index != ~0)) {
284  t0 = pool_elt_at_index(vcm->tables, t0->next_table_index);
285  hash0 = vnet_classify_hash_packet(t0, (u8 *)h0);
286  e0 = vnet_classify_find_entry(t0, (u8 *) h0, hash0, now);
287  }
288 
289  rwe_index0 = e0?e0->opaque_index:config0->miss_index;
290 
291  if (rwe_index0 != ~0) {
292  rwe0 = pool_elt_at_index(rw->entries, rwe_index0);
293  l2_rw_rewrite(rwe0, (u8 *)h0);
294  }
295 
297  l2_rw_trace_t *t =
298  vlib_add_trace (vm, node, b0, sizeof (*t));
299  t->sw_if_index = sw_if_index0;
300  t->classify_table_index = config0->table_index;
301  t->rewrite_entry_index = rwe_index0;
302  }
303 
304  // Update feature bitmap and get next feature index
305  feature_bitmap0 = vnet_buffer(b0)->l2.feature_bitmap & ~L2INPUT_FEAT_RW;
306  vnet_buffer(b0)->l2.feature_bitmap = feature_bitmap0;
308  feature_bitmap0);
309 
310  vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
311  to_next, n_left_to_next,
312  bi0, next0);
313  }
314  vlib_put_next_frame (vm, node, next_index, n_left_to_next);
315  }
316 
317  return frame->n_vectors;
318 }
319 
320 int l2_rw_mod_entry(u32 *index,
321  u8 *mask, u8 *value, u32 len,
322  u32 skip, u8 is_del)
323 {
324  l2_rw_main_t *rw = &l2_rw_main;
325  l2_rw_entry_t *e = 0;
326  if (*index != ~0) {
327  if (pool_is_free_index(rw->entries, *index)) {
328  return -1;
329  }
330  e = pool_elt_at_index(rw->entries, *index);
331  } else {
332  pool_get(rw->entries, e);
333  *index = e - rw->entries;
334  }
335 
336  if (!e)
337  return -1;
338 
339  if (is_del) {
340  pool_put(rw->entries, e);
341  return 0;
342  }
343 
344  e->skip_n_vectors = skip / sizeof(u32x4);
345  skip -= e->skip_n_vectors * sizeof(u32x4);
346  e->rewrite_n_vectors = (skip + len - 1) / sizeof(u32x4) + 1;
347  vec_alloc_aligned(e->mask, e->rewrite_n_vectors, sizeof(u32x4));
348  memset(e->mask, 0, e->rewrite_n_vectors * sizeof(u32x4));
349  vec_alloc_aligned(e->value, e->rewrite_n_vectors, sizeof(u32x4));
350  memset(e->value, 0, e->rewrite_n_vectors * sizeof(u32x4));
351 
352  clib_memcpy(((u8 *)e->value) + skip, value, len);
353  clib_memcpy(((u8 *)e->mask) + skip, mask, len);
354 
355  int i;
356  for (i = 0; i < e->rewrite_n_vectors; i++) {
357  e->value[i] &= e->mask[i];
358  }
359 
360  return 0;
361 }
362 
363 static clib_error_t *
365  unformat_input_t * input,
366  vlib_cli_command_t * cmd)
367 {
368  u32 index = ~0;
369  u8 *mask = 0;
370  u8 *value = 0;
371  u32 skip = 0;
372  u8 del = 0;
373 
375  {
376  if (unformat (input, "index %d", &index))
377  ;
378  else if (unformat (input, "mask %U", unformat_hex_string, &mask))
379  ;
380  else if (unformat (input, "value %U", unformat_hex_string, &value))
381  ;
382  else if (unformat (input, "skip %d", &skip))
383  ;
384  else if (unformat (input, "del"))
385  del = 1;
386  else
387  break;
388  }
389 
390  if (!mask || !value)
391  return clib_error_return(0, "Unspecified mask or value");
392 
393  if (vec_len(mask) != vec_len(value))
394  return clib_error_return(0, "Mask and value lengths must be identical");
395 
396  int ret;
397  if ((ret = l2_rw_mod_entry(&index, mask, value, vec_len(mask), skip, del)))
398  return clib_error_return(0, "Could not add entry");
399 
400  return 0;
401 }
402 
403 VLIB_CLI_COMMAND (l2_rw_entry_cli, static) = {
404  .path = "l2 rewrite entry",
405  .short_help =
406  "l2 rewrite entry [index <index>] [mask <hex-mask>] [value <hex-value>] [skip <n_bytes>] [del]",
407  .function = l2_rw_entry_cli_fn,
408 };
409 
411  u32 table_index,
412  u32 miss_index)
413 {
414  l2_rw_config_t *c = l2_rw_get_config(sw_if_index);
415  l2_rw_main_t *rw = &l2_rw_main;
416 
417  c->table_index = table_index;
418  c->miss_index = miss_index;
419  u32 feature_bitmap = (table_index == ~0)?0:L2INPUT_FEAT_RW;
420 
421  l2input_intf_bitmap_enable(sw_if_index, L2INPUT_FEAT_RW, feature_bitmap);
422 
423  if (c->table_index == ~0)
424  clib_bitmap_set(rw->configs_bitmap, sw_if_index, 0);
425 
426  return 0;
427 }
428 
429 static clib_error_t *
431  unformat_input_t * input,
432  vlib_cli_command_t * cmd)
433 {
434  vnet_main_t * vnm = vnet_get_main();
435  u32 table_index = ~0;
436  u32 sw_if_index = ~0;
437  u32 miss_index = ~0;
438 
440  unformat (input, "%U", unformat_vnet_sw_interface,
441  vnm, &sw_if_index);
442  }
443 
445  {
446  if (unformat (input, "table %d", &table_index))
447  ;
448  else if (unformat (input, "miss-index %d", &miss_index))
449  ;
450  else
451  break;
452  }
453 
454  if (sw_if_index == ~0)
455  return clib_error_return(0, "You must specify an interface 'iface <interface>'",
456  format_unformat_error, input);
457  int ret;
458  if((ret = l2_rw_interface_set_table(sw_if_index, table_index, miss_index)))
459  return clib_error_return(0, "l2_rw_interface_set_table returned %d", ret);
460 
461  return 0;
462 }
463 
464 VLIB_CLI_COMMAND (l2_rw_interface_cli, static) = {
465  .path = "set interface l2 rewrite",
466  .short_help =
467  "set interface l2 rewrite <interface> [table <table index>] [miss-index <entry-index>]",
468  .function = l2_rw_interface_cli_fn,
469 };
470 
471 static clib_error_t *
473  unformat_input_t * input,
474  vlib_cli_command_t * cmd)
475 {
476  l2_rw_main_t *rw = &l2_rw_main;
478  vlib_cli_output (vm, "No interface is currently using l2 rewrite\n");
479 
480  uword i;
482  vlib_cli_output (vm, "sw_if_index:%d %U\n", i, format_l2_rw_config, &rw->configs[i]);
483  });
484  return 0;
485 }
486 
487 VLIB_CLI_COMMAND (l2_rw_show_interfaces_cli, static) = {
488  .path = "show l2 rewrite interfaces",
489  .short_help =
490  "show l2 rewrite interfaces",
491  .function = l2_rw_show_interfaces_cli_fn,
492 };
493 
494 static clib_error_t *
496  unformat_input_t * input,
497  vlib_cli_command_t * cmd)
498 {
499  l2_rw_main_t *rw = &l2_rw_main;
500  l2_rw_entry_t *e;
501  if (pool_elts(rw->entries) == 0)
502  vlib_cli_output (vm, "No entries\n");
503 
504  pool_foreach(e, rw->entries, {
505  vlib_cli_output (vm, "%U\n", format_l2_rw_entry, e);
506  });
507  return 0;
508 }
509 
510 VLIB_CLI_COMMAND (l2_rw_show_entries_cli, static) = {
511  .path = "show l2 rewrite entries",
512  .short_help =
513  "show l2 rewrite entries",
514  .function = l2_rw_show_entries_cli_fn,
515 };
516 
517 int
518 l2_rw_enable_disable(u32 bridge_domain, u8 disable)
519 {
520  u32 mask = L2INPUT_FEAT_RW;
521  l2input_set_bridge_features(bridge_domain, mask,
522  disable ? 0: mask);
523  return 0;
524 }
525 
526 static clib_error_t *
528  unformat_input_t * input,
529  vlib_cli_command_t * cmd)
530 {
531  u32 bridge_domain;
532  u8 disable = 0;
533 
535  !unformat (input, "%d", &bridge_domain)) {
536  return clib_error_return(0, "You must specify a bridge domain");
537  }
538 
540  unformat (input, "disable")) {
541  disable = 1;
542  }
543 
544  if (l2_rw_enable_disable(bridge_domain, disable))
545  return clib_error_return(0, "Could not enable or disable rewrite");
546 
547  return 0;
548 }
549 
550 VLIB_CLI_COMMAND (l2_rw_set_cli, static) = {
551  .path = "set bridge-domain rewrite",
552  .short_help =
553  "set bridge-domain rewrite <bridge-domain> [disable]",
554  .function = l2_rw_set_cli_fn,
555 };
556 
557 static
559 {
560  l2_rw_main_t *rw = &l2_rw_main;
561  rw->configs = 0;
562  rw->entries = 0;
565  l2_rw_node.index,
569  return 0;
570 }
572 
573 enum {
576 };
577 
578 #define foreach_l2_rw_error \
579 _(UNKNOWN, "Unknown error")
580 
581 typedef enum {
582 #define _(sym,str) L2_RW_ERROR_##sym,
584 #undef _
586 } l2_rw_error_t;
587 
588 static char * l2_rw_error_strings[] = {
589 #define _(sym,string) string,
591 #undef _
592 };
593 
595  .function = l2_rw_node_fn,
596  .name = "l2-rw",
597  .vector_size = sizeof (u32),
598  .format_trace = format_l2_rw_trace,
600  .n_errors = ARRAY_LEN(l2_rw_error_strings),
601  .error_strings = l2_rw_error_strings,
602  .runtime_data_bytes = 0,
603  .n_next_nodes = L2_RW_N_NEXT,
604  .next_nodes = { [L2_RW_NEXT_DROP] = "error-drop"},
605 };
606 
u64 vnet_classify_hash_packet(vnet_classify_table_t *t, u8 *h)
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
Definition: vec.h:394
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Definition: main.c:459
sll srl srl sll sra u16x4 i
Definition: vector_sse2.h:267
#define CLIB_UNUSED(x)
Definition: clib.h:79
uword unformat(unformat_input_t *i, char *fmt,...)
Definition: unformat.c:942
always_inline uword clib_bitmap_count_set_bits(uword *ai)
Definition: bitmap.h:357
bad routing header type(not 4)") sr_error (NO_MORE_SEGMENTS
static clib_error_t * l2_rw_show_interfaces_cli_fn(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
Definition: l2_rw.c:472
u32 l2input_set_bridge_features(u32 bd_index, u32 feat_mask, u32 feat_value)
Definition: l2_input.c:492
l2_rw_main_t l2_rw_main
Definition: l2_rw.c:20
#define UNFORMAT_END_OF_INPUT
Definition: format.h:142
static u8 * format_l2_rw_entry(u8 *s, va_list *args)
Definition: l2_rw.c:30
static clib_error_t * l2_rw_show_entries_cli_fn(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
Definition: l2_rw.c:495
#define U32X4_ALIGNED(p)
Definition: vnet_classify.h:47
l2_rw_config_t * configs
Definition: l2_rw.h:52
struct _vlib_node_registration vlib_node_registration_t
unformat_function_t unformat_vnet_sw_interface
#define pool_get(P, E)
Definition: pool.h:186
vlib_node_registration_t l2_rw_node
(constructor) VLIB_REGISTER_NODE (l2_rw_node)
Definition: l2_rw.c:22
always_inline void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
Definition: buffer.h:184
always_inline vlib_main_t * vlib_get_main(void)
Definition: global_funcs.h:23
static u8 * format_l2_rw_config(u8 *s, va_list *args)
Definition: l2_rw.c:43
#define foreach_l2_rw_error
Definition: l2_rw.c:578
always_inline uword unformat_check_input(unformat_input_t *i)
Definition: format.h:168
vnet_main_t * vnet_get_main(void)
Definition: misc.c:45
u32 classify_table_index
Definition: l2_rw.c:26
#define static_always_inline
Definition: clib.h:85
#define pool_foreach(VAR, POOL, BODY)
Definition: pool.h:328
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:109
#define always_inline
Definition: clib.h:84
unformat_function_t unformat_hex_string
Definition: format.h:285
static char * l2_rw_error_strings[]
Definition: l2_rw.c:588
u8 * format_hex_bytes(u8 *s, va_list *va)
Definition: std-formats.c:79
int l2_rw_enable_disable(u32 bridge_domain, u8 disable)
Definition: l2_rw.c:518
always_inline uword pool_elts(void *v)
Definition: pool.h:97
unsigned long u64
Definition: types.h:89
always_inline void * vlib_frame_vector_args(vlib_frame_t *f)
Definition: node_funcs.h:202
#define clib_bitmap_foreach(i, ai, body)
Definition: bitmap.h:308
static clib_error_t * l2_rw_set_cli_fn(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
Definition: l2_rw.c:527
#define pool_elt_at_index(p, i)
Definition: pool.h:346
int l2_rw_mod_entry(u32 *index, u8 *mask, u8 *value, u32 len, u32 skip, u8 is_del)
Definition: l2_rw.c:320
static clib_error_t * l2_rw_entry_cli_fn(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
Definition: l2_rw.c:364
#define vec_alloc_aligned(V, N, A)
Allocate space for N more elements (no header, given alignment)
Definition: vec.h:246
#define pool_put(P, E)
Definition: pool.h:200
static_always_inline void l2_rw_rewrite(l2_rw_entry_t *rwe, u8 *h)
Definition: l2_rw.c:74
uword * configs_bitmap
Definition: l2_rw.h:53
#define PREDICT_FALSE(x)
Definition: clib.h:97
always_inline uword * clib_bitmap_set(uword *ai, uword i, uword value)
Definition: bitmap.h:132
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
Definition: buffer_node.h:43
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Definition: buffer_node.h:83
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Definition: node_funcs.h:265
void vlib_cli_output(vlib_main_t *vm, char *fmt,...)
Definition: cli.c:538
static u8 * format_l2_rw_trace(u8 *s, va_list *args)
Definition: l2_rw.c:52
static uword l2_rw_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: l2_rw.c:127
#define clib_bitmap_alloc(v, n_bits)
Definition: bitmap.h:85
always_inline uword clib_bitmap_get(uword *ai, uword i)
Definition: bitmap.h:158
u16 n_vectors
Definition: node.h:307
#define CLIB_PREFETCH(addr, size, type)
Definition: cache.h:82
#define clib_memcpy(a, b, c)
Definition: string.h:63
#define pool_is_free_index(P, I)
Definition: pool.h:197
#define ARRAY_LEN(x)
Definition: clib.h:59
u32 rewrite_entry_index
Definition: l2_rw.c:27
char ** l2input_get_feat_names(void)
Definition: l2_input.c:45
#define VLIB_CLI_COMMAND(x,...)
Definition: cli.h:150
struct _vnet_classify_main vnet_classify_main_t
Definition: vnet_classify.h:50
u16 cached_next_index
Definition: node.h:422
unsigned int u32
Definition: types.h:88
u8 * format_unformat_error(u8 *s, va_list *va)
Definition: unformat.c:87
#define vnet_buffer(b)
Definition: buffer.h:300
u8 * format(u8 *s, char *fmt,...)
Definition: format.c:405
u32 sw_if_index
Definition: l2_rw.c:25
static clib_error_t * l2_rw_init(vlib_main_t *vm)
Definition: l2_rw.c:558
static clib_error_t * l2_rw_interface_cli_fn(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
Definition: l2_rw.c:430
u32 feat_next_node_index[32]
Definition: l2_rw.h:46
vnet_classify_main_t vnet_classify_main
Definition: vnet_classify.c:21
#define VLIB_BUFFER_IS_TRACED
Definition: buffer.h:91
u64 uword
Definition: types.h:112
u32 l2input_intf_bitmap_enable(u32 sw_if_index, u32 feature_bitmap, u32 enable)
Definition: l2_input.c:473
always_inline l2_rw_config_t * l2_rw_get_config(u32 sw_if_index)
Definition: l2_rw.c:62
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
double f64
Definition: types.h:140
unsigned char u8
Definition: types.h:56
always_inline u32 feat_bitmap_get_next_node_index(u32 *next_nodes, u32 bitmap)
Definition: feat_bitmap.h:71
int l2_rw_interface_set_table(u32 sw_if_index, u32 table_index, u32 miss_index)
Definition: l2_rw.c:410
always_inline void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
Definition: trace_funcs.h:55
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
Definition: buffer.h:162
#define VLIB_REGISTER_NODE(x,...)
Definition: node.h:140
u8 data[0]
Packet data.
Definition: buffer.h:150
always_inline f64 vlib_time_now(vlib_main_t *vm)
Definition: main.h:182
#define clib_error_return(e, args...)
Definition: error.h:112
struct _unformat_input_t unformat_input_t
l2_rw_entry_t * entries
Definition: l2_rw.h:49
u32 flags
buffer flags: VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:84
l2_rw_error_t
Definition: l2_rw.c:581
always_inline vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:69
vnet_classify_entry_t * vnet_classify_find_entry(vnet_classify_table_t *t, u8 *h, u64 hash, f64 now)
always_inline void feat_bitmap_init_next_nodes(vlib_main_t *vm, u32 node_index, u32 num_features, char **feat_names, u32 *next_nodes)
Definition: feat_bitmap.h:41
Definition: defs.h:45