FD.io VPP  v18.04-17-g3a0d853
Vector Packet Processing
node.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <vlib/vlib.h>
17 #include <vnet/vnet.h>
18 #include <vppinfra/error.h>
19 
20 #include <vnet/span/span.h>
21 #include <vnet/l2/l2_input.h>
22 #include <vnet/l2/l2_output.h>
23 #include <vnet/l2/feat_bitmap.h>
24 
25 #include <vppinfra/error.h>
26 #include <vppinfra/elog.h>
27 
29 
30 /* packet trace format function */
31 u8 *
32 format_span_trace (u8 * s, va_list * args)
33 {
34  CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
35  CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
36  span_trace_t *t = va_arg (*args, span_trace_t *);
37 
38  vnet_main_t *vnm = &vnet_main;
39  s = format (s, "SPAN: mirrored %U -> %U",
42 
43  return s;
44 }
45 
46 #define foreach_span_error \
47 _(HITS, "SPAN incomming packets processed")
48 
49 typedef enum
50 {
51 #define _(sym,str) SPAN_ERROR_##sym,
53 #undef _
55 } span_error_t;
56 
57 static char *span_error_strings[] = {
58 #define _(sym,string) string,
60 #undef _
61 };
62 
65  vlib_buffer_t * b0, vlib_frame_t ** mirror_frames,
67 {
68  vlib_buffer_t *c0;
69  span_main_t *sm = &span_main;
70  vnet_main_t *vnm = &vnet_main;
71  u32 *to_mirror_next = 0;
72  u32 i;
73  span_interface_t *si0;
74  span_mirror_t *sm0;
75 
76  if (sw_if_index0 >= vec_len (sm->interfaces))
77  return;
78 
79  si0 = vec_elt_at_index (sm->interfaces, sw_if_index0);
80  sm0 = &si0->mirror_rxtx[sf][rxtx];
81 
82  if (sm0->num_mirror_ports == 0)
83  return;
84 
85  /* Don't do it again */
86  if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE))
87  return;
88 
89  /* *INDENT-OFF* */
91  {
92  if (mirror_frames[i] == 0)
93  {
94  if (sf == SPAN_FEAT_L2)
95  mirror_frames[i] = vlib_get_frame_to_node (vnm->vlib_main,
96  l2output_node.index);
97  else
98  mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i);
99  }
100  to_mirror_next = vlib_frame_vector_args (mirror_frames[i]);
101  to_mirror_next += mirror_frames[i]->n_vectors;
102  /* This can fail */
103  c0 = vlib_buffer_copy (vm, b0);
104  if (PREDICT_TRUE(c0 != 0))
105  {
106  vnet_buffer (c0)->sw_if_index[VLIB_TX] = i;
107  c0->flags |= VNET_BUFFER_F_SPAN_CLONE;
108  if (sf == SPAN_FEAT_L2)
109  vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT;
110  to_mirror_next[0] = vlib_get_buffer_index (vm, c0);
111  mirror_frames[i]->n_vectors++;
112  if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED))
113  {
114  span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
115  t->src_sw_if_index = sw_if_index0;
116  t->mirror_sw_if_index = i;
117 #if 0
118  /* Enable this path to allow packet trace of SPAN packets.
119  Note that all SPAN packets will show up on the trace output
120  with the first SPAN packet (since they are in the same frame)
121  thus making trace output of the original packet confusing */
122  mirror_frames[i]->flags |= VLIB_FRAME_TRACE;
123  c0->flags |= VLIB_BUFFER_IS_TRACED;
124 #endif
125  }
126  }
127  }));
128  /* *INDENT-ON* */
129 }
130 
133  vlib_frame_t * frame, vlib_rx_or_tx_t rxtx,
134  span_feat_t sf)
135 {
136  span_main_t *sm = &span_main;
137  vnet_main_t *vnm = &vnet_main;
138  u32 n_left_from, *from, *to_next;
139  u32 n_span_packets = 0;
140  u32 next_index;
141  u32 sw_if_index;
142  static __thread vlib_frame_t **mirror_frames = 0;
143 
144  from = vlib_frame_vector_args (frame);
145  n_left_from = frame->n_vectors;
146  next_index = node->cached_next_index;
147 
148  vec_validate_aligned (mirror_frames, sm->max_sw_if_index,
150 
151  while (n_left_from > 0)
152  {
153  u32 n_left_to_next;
154 
155  vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
156 
157  while (n_left_from >= 4 && n_left_to_next >= 2)
158  {
159  u32 bi0;
160  u32 bi1;
161  vlib_buffer_t *b0;
162  vlib_buffer_t *b1;
163  u32 sw_if_index0;
164  u32 next0 = 0;
165  u32 sw_if_index1;
166  u32 next1 = 0;
167 
168  /* speculatively enqueue b0, b1 to the current next frame */
169  to_next[0] = bi0 = from[0];
170  to_next[1] = bi1 = from[1];
171  to_next += 2;
172  n_left_to_next -= 2;
173  from += 2;
174  n_left_from -= 2;
175 
176  b0 = vlib_get_buffer (vm, bi0);
177  b1 = vlib_get_buffer (vm, bi1);
178  sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
179  sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx];
180 
181  span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
182  span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf);
183 
184  switch (sf)
185  {
186  case SPAN_FEAT_L2:
187  if (rxtx == VLIB_RX)
188  {
189  next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
190  L2INPUT_FEAT_SPAN);
191  next1 = vnet_l2_feature_next (b1, sm->l2_input_next,
192  L2INPUT_FEAT_SPAN);
193  }
194  else
195  {
196  next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
197  L2OUTPUT_FEAT_SPAN);
198  next1 = vnet_l2_feature_next (b1, sm->l2_output_next,
199  L2OUTPUT_FEAT_SPAN);
200  }
201  break;
202  case SPAN_FEAT_DEVICE:
203  default:
204  vnet_feature_next (sw_if_index0, &next0, b0);
205  vnet_feature_next (sw_if_index1, &next1, b1);
206  break;
207  }
208 
209  /* verify speculative enqueue, maybe switch current next frame */
210  vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
211  to_next, n_left_to_next,
212  bi0, bi1, next0, next1);
213  }
214  while (n_left_from > 0 && n_left_to_next > 0)
215  {
216  u32 bi0;
217  vlib_buffer_t *b0;
218  u32 sw_if_index0;
219  u32 next0 = 0;
220 
221  /* speculatively enqueue b0 to the current next frame */
222  to_next[0] = bi0 = from[0];
223  to_next += 1;
224  n_left_to_next -= 1;
225  from += 1;
226  n_left_from -= 1;
227 
228  b0 = vlib_get_buffer (vm, bi0);
229  sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
230 
231  span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
232 
233  switch (sf)
234  {
235  case SPAN_FEAT_L2:
236  if (rxtx == VLIB_RX)
237  next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
238  L2INPUT_FEAT_SPAN);
239  else
240  next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
241  L2OUTPUT_FEAT_SPAN);
242  break;
243  case SPAN_FEAT_DEVICE:
244  default:
245  vnet_feature_next (sw_if_index0, &next0, b0);
246  break;
247  }
248 
249  /* verify speculative enqueue, maybe switch current next frame */
250  vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
251  n_left_to_next, bi0, next0);
252  }
253 
254  vlib_put_next_frame (vm, node, next_index, n_left_to_next);
255  }
256 
257 
258  for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++)
259  {
260  vlib_frame_t *f = mirror_frames[sw_if_index];
261  if (f == 0)
262  continue;
263 
264  if (sf == SPAN_FEAT_L2)
266  else
267  vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
268  mirror_frames[sw_if_index] = 0;
269  }
270  vlib_node_increment_counter (vm, span_node.index, SPAN_ERROR_HITS,
271  n_span_packets);
272 
273  return frame->n_vectors;
274 }
275 
276 static uword
278  vlib_frame_t * frame)
279 {
280  return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE);
281 }
282 
283 static uword
285  vlib_frame_t * frame)
286 {
287  return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE);
288 }
289 
290 static uword
292  vlib_frame_t * frame)
293 {
294  return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2);
295 }
296 
297 static uword
299  vlib_frame_t * frame)
300 {
301  return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2);
302 }
303 
304 #define span_node_defs \
305  .vector_size = sizeof (u32), \
306  .format_trace = format_span_trace, \
307  .type = VLIB_NODE_TYPE_INTERNAL, \
308  .n_errors = ARRAY_LEN(span_error_strings), \
309  .error_strings = span_error_strings, \
310  .n_next_nodes = 0, \
311  .next_nodes = { \
312  [0] = "error-drop" \
313  }
314 
315 /* *INDENT-OFF* */
318  .function = span_device_input_node_fn,
319  .name = "span-input",
320 };
321 
323 
326  .function = span_device_output_node_fn,
327  .name = "span-output",
328 };
329 
331 
334  .function = span_l2_input_node_fn,
335  .name = "span-l2-input",
336 };
337 
339 
342  .function = span_l2_output_node_fn,
343  .name = "span-l2-output",
344 };
345 
347 
349 {
350  span_main_t *sm = &span_main;
351 
352  sm->vlib_main = vm;
353  sm->vnet_main = vnet_get_main ();
354 
355  /* Initialize the feature next-node indexes */
357  span_l2_input_node.index,
360  sm->l2_input_next);
361 
363  span_l2_output_node.index,
366  sm->l2_output_next);
367  return 0;
368 }
369 
371 /* *INDENT-ON* */
372 
373 #undef span_node_defs
374 /*
375  * fd.io coding-style-patch-verification: ON
376  *
377  * Local Variables:
378  * eval: (c-set-style "gnu")
379  * End:
380  */
span_main_t span_main
Definition: span.c:24
#define CLIB_UNUSED(x)
Definition: clib.h:79
char ** l2output_get_feat_names(void)
Definition: l2_output.c:38
span_mirror_t mirror_rxtx[SPAN_FEAT_N][VLIB_N_RX_TX]
Definition: span.h:38
vnet_main_t * vnet_get_main(void)
Definition: misc.c:47
vlib_node_registration_t l2output_node
(constructor) VLIB_REGISTER_NODE (l2output_node)
Definition: l2_output.c:501
vlib_main_t * vlib_main
Definition: span.h:54
#define PREDICT_TRUE(x)
Definition: clib.h:106
static char * span_error_strings[]
Definition: node.c:57
u32 num_mirror_ports
Definition: span.h:33
u32 mirror_sw_if_index
Definition: span.h:63
span_error_t
Definition: node.c:49
int i
static uword span_l2_output_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: node.c:298
u8 * format(u8 *s, const char *fmt,...)
Definition: format.c:419
u32 l2_output_next[32]
Definition: span.h:45
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
Definition: vec.h:445
vlib_node_registration_t span_l2_output_node
(constructor) VLIB_REGISTER_NODE (span_l2_output_node)
Definition: node.c:340
format_function_t format_vnet_sw_if_index_name
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:572
static u32 vnet_l2_feature_next(vlib_buffer_t *b, u32 *next_nodes, u32 feat_bit)
Return the graph node index for the feature corresponding to the next set bit after clearing the curr...
Definition: feat_bitmap.h:94
vlib_node_registration_t span_input_node
(constructor) VLIB_REGISTER_NODE (span_input_node)
Definition: node.c:316
vlib_rx_or_tx_t
Definition: defs.h:44
clib_bitmap_t * mirror_ports
Definition: span.h:32
vlib_node_registration_t span_l2_input_node
(constructor) VLIB_REGISTER_NODE (span_l2_input_node)
Definition: node.c:332
#define static_always_inline
Definition: clib.h:93
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:111
static_always_inline uword span_node_inline_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, vlib_rx_or_tx_t rxtx, span_feat_t sf)
Definition: node.c:132
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
clib_error_t * span_init(vlib_main_t *vm)
Definition: node.c:348
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
Definition: buffer_funcs.h:74
static uword span_device_input_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: node.c:277
static uword span_device_output_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: node.c:284
u8 * format_span_trace(u8 *s, va_list *args)
Definition: node.c:32
#define clib_bitmap_foreach(i, ai, body)
Macro to iterate across set bits in a bitmap.
Definition: bitmap.h:361
vlib_node_registration_t span_output_node
(constructor) VLIB_REGISTER_NODE (span_output_node)
Definition: node.c:324
vlib_main_t * vlib_main
Definition: vnet.h:78
span_interface_t * interfaces
Definition: span.h:48
vnet_main_t * vnet_main
Definition: span.h:55
void vlib_put_frame_to_node(vlib_main_t *vm, u32 to_node_index, vlib_frame_t *f)
Definition: main.c:191
#define PREDICT_FALSE(x)
Definition: clib.h:105
vnet_main_t vnet_main
Definition: misc.c:43
vlib_node_registration_t span_node
Definition: node.c:28
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
Finish enqueueing two buffers forward in the graph.
Definition: buffer_node.h:70
static_always_inline void vnet_feature_next(u32 sw_if_index, u32 *next0, vlib_buffer_t *b0)
Definition: feature.h:221
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Finish enqueueing one buffer forward in the graph.
Definition: buffer_node.h:218
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
Definition: node_funcs.h:364
static void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
Definition: node_funcs.h:1166
The fine-grained event logger allows lightweight, thread-safe event logging at minimum cost...
#define VLIB_REGISTER_NODE(x,...)
Definition: node.h:143
u16 n_vectors
Definition: node.h:344
static void vnet_put_frame_to_sw_interface(vnet_main_t *vnm, u32 sw_if_index, vlib_frame_t *f)
vlib_main_t * vm
Definition: buffer.c:294
static void feat_bitmap_init_next_nodes(vlib_main_t *vm, u32 node_index, u32 num_features, char **feat_names, u32 *next_nodes)
Initialize the feature next-node indexes of a graph node.
Definition: feat_bitmap.h:43
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
Definition: main.c:454
char ** l2input_get_feat_names(void)
Return an array of strings containing graph node names of each feature.
Definition: l2_input.c:60
u32 l2_input_next[32]
Definition: span.h:44
#define span_node_defs
Definition: node.c:304
u16 cached_next_index
Next frame index that vector arguments were last enqueued to last time this node ran.
Definition: node.h:456
unsigned int u32
Definition: types.h:88
u16 flags
Definition: node.h:335
static_always_inline void span_mirror(vlib_main_t *vm, vlib_node_runtime_t *node, u32 sw_if_index0, vlib_buffer_t *b0, vlib_frame_t **mirror_frames, vlib_rx_or_tx_t rxtx, span_feat_t sf)
Definition: node.c:64
u64 uword
Definition: types.h:112
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
Definition: trace_funcs.h:55
struct _vlib_node_registration vlib_node_registration_t
Definition: defs.h:47
u32 src_sw_if_index
Definition: span.h:62
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
unsigned char u8
Definition: types.h:56
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
Definition: node_funcs.h:267
static uword span_l2_input_node_fn(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: node.c:291
#define VLIB_FRAME_TRACE
Definition: node.h:379
span_feat_t
Definition: span.h:23
#define vnet_buffer(b)
Definition: buffer.h:372
#define foreach_span_error
Definition: node.c:46
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:111
u32 max_sw_if_index
Definition: span.h:51
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:57
VLIB_NODE_FUNCTION_MULTIARCH(ethernet_input_not_l2_node, ethernet_input_not_l2)
Definition: node.c:1207
Definition: defs.h:46