23 #define foreach_gtpu_encap_error \
24 _(ENCAPSULATED, "good packets encapsulated")
27 #define _(sym,string) string,
33 #define _(sym,str) GTPU_ENCAP_ERROR_##sym,
39 #define foreach_gtpu_encap_next \
40 _(DROP, "error-drop") \
41 _(IP4_LOOKUP, "ip4-lookup") \
42 _(IP6_LOOKUP, "ip6-lookup")
52 #define foreach_fixed_header4_offset \
55 #define foreach_fixed_header6_offset \
56 _(0) _(1) _(2) _(3) _(4) _(5) _(6)
68 u32 pkts_encapsulated = 0;
69 u16 old_l0 = 0, old_l1 = 0, old_l2 = 0, old_l3 = 0;
71 u32 stats_sw_if_index, stats_n_packets, stats_n_bytes;
72 u32 sw_if_index0 = 0, sw_if_index1 = 0, sw_if_index2 = 0, sw_if_index3 = 0;
73 u32 next0 = 0, next1 = 0, next2 = 0, next3 = 0;
75 gtpu_tunnel_t * t0 = NULL, * t1 = NULL, * t2 = NULL, * t3 = NULL;
81 stats_sw_if_index =
node->runtime_data[0];
82 stats_n_packets = stats_n_bytes = 0;
89 to_next, n_left_to_next);
93 u32 bi0, bi1, bi2, bi3;
95 u32 flow_hash0, flow_hash1, flow_hash2, flow_hash3;
96 u32 len0, len1, len2, len3;
101 u64 * copy_src0, * copy_dst0;
102 u64 * copy_src1, * copy_dst1;
103 u64 * copy_src2, * copy_dst2;
104 u64 * copy_src3, * copy_dst3;
105 u32 * copy_src_last0, * copy_dst_last0;
106 u32 * copy_src_last1, * copy_dst_last1;
107 u32 * copy_src_last2, * copy_dst_last2;
108 u32 * copy_src_last3, * copy_dst_last3;
109 u16 new_l0, new_l1, new_l2, new_l3;
172 next1 = t1->next_dpo.dpoi_next_node;
174 next2 = t2->next_dpo.dpoi_next_node;
176 next3 = t3->next_dpo.dpoi_next_node;
193 copy_dst0 = (
u64 *) ip4_0;
195 copy_dst1 = (
u64 *) ip4_1;
196 copy_src1 = (
u64 *) t1->rewrite;
197 copy_dst2 = (
u64 *) ip4_2;
198 copy_src2 = (
u64 *) t2->rewrite;
199 copy_dst3 = (
u64 *) ip4_3;
200 copy_src3 = (
u64 *) t3->rewrite;
203 #define _(offs) copy_dst0[offs] = copy_src0[offs];
206 #define _(offs) copy_dst1[offs] = copy_src1[offs];
209 #define _(offs) copy_dst2[offs] = copy_src2[offs];
212 #define _(offs) copy_dst3[offs] = copy_src3[offs];
216 copy_dst_last0 = (
u32 *)(©_dst0[4]);
217 copy_src_last0 = (
u32 *)(©_src0[4]);
218 copy_dst_last0[0] = copy_src_last0[0];
219 copy_dst_last1 = (
u32 *)(©_dst1[4]);
220 copy_src_last1 = (
u32 *)(©_src1[4]);
221 copy_dst_last1[0] = copy_src_last1[0];
222 copy_dst_last2 = (
u32 *)(©_dst2[4]);
223 copy_src_last2 = (
u32 *)(©_src2[4]);
224 copy_dst_last2[0] = copy_src_last2[0];
225 copy_dst_last3 = (
u32 *)(©_dst3[4]);
226 copy_src_last3 = (
u32 *)(©_src3[4]);
227 copy_dst_last3[0] = copy_src_last3[0];
284 -
sizeof (*ip4_0) -
sizeof(*udp0)
289 -
sizeof (*ip4_1) -
sizeof(*udp1)
294 -
sizeof (*ip4_2) -
sizeof(*udp2)
299 -
sizeof (*ip4_3) -
sizeof(*udp3)
313 copy_dst0 = (
u64 *) ip6_0;
315 copy_dst1 = (
u64 *) ip6_1;
316 copy_src1 = (
u64 *) t1->rewrite;
317 copy_dst2 = (
u64 *) ip6_2;
318 copy_src2 = (
u64 *) t2->rewrite;
319 copy_dst3 = (
u64 *) ip6_3;
320 copy_src3 = (
u64 *) t3->rewrite;
322 #define _(offs) copy_dst0[offs] = copy_src0[offs];
325 #define _(offs) copy_dst1[offs] = copy_src1[offs];
328 #define _(offs) copy_dst2[offs] = copy_src2[offs];
331 #define _(offs) copy_dst3[offs] = copy_src3[offs];
369 -
sizeof (*ip6_0) -
sizeof(*udp0)
374 -
sizeof (*ip6_1) -
sizeof(*udp1)
379 -
sizeof (*ip6_2) -
sizeof(*udp2)
384 -
sizeof (*ip6_3) -
sizeof(*udp3)
408 pkts_encapsulated += 4;
413 stats_n_packets += 4;
414 stats_n_bytes += len0 + len1 + len2 + len3;
427 (sw_if_index1 != stats_sw_if_index) ||
428 (sw_if_index2 != stats_sw_if_index) ||
429 (sw_if_index3 != stats_sw_if_index) ))
431 stats_n_packets -= 4;
432 stats_n_bytes -= len0 + len1 + len2 + len3;
433 if ( (sw_if_index0 == sw_if_index1 ) &&
434 (sw_if_index1 == sw_if_index2 ) &&
435 (sw_if_index2 == sw_if_index3 ) )
441 stats_n_packets, stats_n_bytes);
442 stats_sw_if_index = sw_if_index0;
444 stats_n_bytes = len0 + len1 + len2 + len3;
476 tr->
tteid = t1->tteid;
484 tr->
tteid = t2->tteid;
492 tr->
tteid = t3->tteid;
496 to_next, n_left_to_next,
498 next0, next1, next2, next3);
511 u64 * copy_src0, * copy_dst0;
512 u32 * copy_src_last0, * copy_dst_last0;
543 copy_dst0 = (
u64 *) ip4_0;
546 #define _(offs) copy_dst0[offs] = copy_src0[offs];
550 copy_dst_last0 = (
u32 *)(©_dst0[4]);
551 copy_src_last0 = (
u32 *)(©_src0[4]);
552 copy_dst_last0[0] = copy_src_last0[0];
573 -
sizeof (*ip4_0) -
sizeof(*udp0)
584 copy_dst0 = (
u64 *) ip6_0;
587 #define _(offs) copy_dst0[offs] = copy_src0[offs];
604 -
sizeof (*ip4_0) -
sizeof(*udp0)
615 pkts_encapsulated ++;
617 stats_n_packets += 1;
618 stats_n_bytes += len0;
629 stats_n_packets -= 1;
630 stats_n_bytes -= len0;
635 stats_n_packets, stats_n_bytes);
637 stats_n_bytes = len0;
638 stats_sw_if_index = sw_if_index0;
649 to_next, n_left_to_next,
658 GTPU_ENCAP_ERROR_ENCAPSULATED,
666 thread_index, stats_sw_if_index, stats_n_packets, stats_n_bytes);
667 node->runtime_data[0] = stats_sw_if_index;
688 .name =
"gtpu4-encap",
689 .vector_size =
sizeof (
u32),
696 #define _(s,n) [GTPU_ENCAP_NEXT_##s] = n,
703 .name =
"gtpu6-encap",
704 .vector_size =
sizeof (
u32),
711 #define _(s,n) [GTPU_ENCAP_NEXT_##s] = n,