23 #define foreach_vxlan_gpe_encap_error \ 24 _(ENCAPSULATED, "good packets encapsulated") 27 #define _(sym,string) string, 33 #define _(sym,str) VXLAN_GPE_ENCAP_ERROR_##sym, 61 #define foreach_fixed_header_offset \ 62 _(0) _(1) _(2) _(3) _(4) _(5) _(6) 69 u32 n_left_from, next_index, * from, * to_next;
73 u32 pkts_encapsulated = 0;
74 u16 old_l0 = 0, old_l1 = 0;
76 u32 stats_sw_if_index, stats_n_packets, stats_n_bytes;
83 stats_n_packets = stats_n_bytes = 0;
85 while (n_left_from > 0)
90 to_next, n_left_to_next);
92 while (n_left_from >= 4 && n_left_to_next >= 2)
98 u32 sw_if_index0, sw_if_index1, len0, len1;
102 u64 * copy_src0, * copy_dst0;
103 u64 * copy_src1, * copy_dst1;
104 u32 * copy_src_last0, * copy_dst_last0;
105 u32 * copy_src_last1, * copy_dst_last1;
157 copy_dst0 = (
u64 *) ip0;
159 copy_dst1 = (
u64 *) ip1;
162 ASSERT (
sizeof (ip4_vxlan_gpe_header_t) == 36);
165 #define _(offs) copy_dst0[offs] = copy_src0[offs]; 168 #define _(offs) copy_dst1[offs] = copy_src1[offs]; 173 copy_dst_last0 = (
u32 *)(©_dst0[7]);
174 copy_src_last0 = (
u32 *)(©_src0[7]);
175 copy_dst_last1 = (
u32 *)(©_dst1[7]);
176 copy_src_last1 = (
u32 *)(©_src1[7]);
178 copy_dst_last0[0] = copy_src_last0[0];
179 copy_dst_last1[0] = copy_src_last1[0];
227 pkts_encapsulated += 2;
231 stats_n_packets += 2;
232 stats_n_bytes += len0 + len1;
239 (sw_if_index0 != stats_sw_if_index)
240 || (sw_if_index1 != stats_sw_if_index))) {
241 stats_n_packets -= 2;
242 stats_n_bytes -= len0 + len1;
243 if (sw_if_index0 == sw_if_index1) {
247 cpu_index, stats_sw_if_index, stats_n_packets, stats_n_bytes);
248 stats_sw_if_index = sw_if_index0;
250 stats_n_bytes = len0 + len1;
254 cpu_index, sw_if_index0, 1, len0);
257 cpu_index, sw_if_index1, 1, len1);
276 to_next, n_left_to_next,
277 bi0, bi1, next0, next1);
280 while (n_left_from > 0 && n_left_to_next > 0)
285 u32 sw_if_index0, len0;
289 u64 * copy_src0, * copy_dst0;
290 u32 * copy_src_last0, * copy_dst_last0;
318 copy_dst0 = (
u64 *) ip0;
321 ASSERT (
sizeof (ip4_vxlan_gpe_header_t) == 36);
324 #define _(offs) copy_dst0[offs] = copy_src0[offs]; 328 copy_dst_last0 = (
u32 *)(©_dst0[7]);
329 copy_src_last0 = (
u32 *)(©_src0[7]);
331 copy_dst_last0[0] = copy_src_last0[0];
359 pkts_encapsulated ++;
362 stats_n_packets += 1;
363 stats_n_bytes += len0;
371 stats_n_packets -= 1;
372 stats_n_bytes -= len0;
376 cpu_index, stats_sw_if_index, stats_n_packets, stats_n_bytes);
378 stats_n_bytes = len0;
379 stats_sw_if_index = sw_if_index0;
388 to_next, n_left_to_next,
395 VXLAN_GPE_ENCAP_ERROR_ENCAPSULATED,
398 if (stats_n_packets) {
401 stats_sw_if_index, stats_n_packets, stats_n_bytes);
410 .name =
"vxlan-gpe-encap",
411 .vector_size =
sizeof (
u32),
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
#define foreach_vxlan_gpe_encap_error
bad routing header type(not 4)") sr_error (NO_MORE_SEGMENTS
vnet_interface_main_t interface_main
always_inline void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
static uword vxlan_gpe_encap(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame)
vlib_combined_counter_main_t * combined_sw_if_counters
always_inline void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 cpu_index, u32 index, u32 packet_increment, u32 byte_increment)
u8 * format_vxlan_gpe_encap_trace(u8 *s, va_list *args)
always_inline uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
always_inline void * vlib_frame_vector_args(vlib_frame_t *f)
always_inline u16 ip_csum_fold(ip_csum_t c)
#define pool_elt_at_index(p, i)
uword os_get_cpu_number(void)
vxlan_gpe_main_t vxlan_gpe_main
always_inline void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
#define CLIB_PREFETCH(addr, size, type)
#define clib_memcpy(a, b, c)
always_inline vnet_hw_interface_t * vnet_get_sup_hw_interface(vnet_main_t *vnm, u32 sw_if_index)
#define VLIB_BUFFER_IS_TRACED
vxlan_gpe_tunnel_t * tunnels
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
always_inline void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
#define foreach_fixed_header_offset
#define ip_csum_update(sum, old, new, type, field)
static char * vxlan_gpe_encap_error_strings[]
always_inline void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define VLIB_REGISTER_NODE(x,...)
vlib_node_registration_t vxlan_gpe_encap_node
(constructor) VLIB_REGISTER_NODE (vxlan_gpe_encap_node)
#define CLIB_CACHE_LINE_BYTES
u32 flags
buffer flags: VLIB_BUFFER_IS_TRACED: trace this buffer.
uword runtime_data[(128-1 *sizeof(vlib_node_function_t *)-1 *sizeof(vlib_error_t *)-11 *sizeof(u32)-5 *sizeof(u16))/sizeof(uword)]
always_inline vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.