29 #define foreach_esp_encrypt_next \ 30 _(DROP4, "ip4-drop") \ 31 _(DROP6, "ip6-drop") \ 32 _(PENDING, "pending") \ 33 _(HANDOFF4, "handoff4") \ 34 _(HANDOFF6, "handoff6") \ 35 _(INTERFACE_OUTPUT, "interface-output") 37 #define _(v, s) ESP_ENCRYPT_NEXT_##v, 45 #define foreach_esp_encrypt_error \ 46 _(RX_PKTS, "ESP pkts received") \ 47 _(POST_RX_PKTS, "ESP-post pkts received") \ 48 _(SEQ_CYCLED, "sequence number cycled (packet dropped)") \ 49 _(CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \ 50 _(CRYPTO_QUEUE_FULL, "crypto queue full (packet dropped)") \ 51 _(NO_BUFFERS, "no buffers (packet dropped)") \ 55 #define _(sym,str) ESP_ENCRYPT_ERROR_##sym, 62 #define _(sym,string) string, 93 "esp: sa-index %d spi %u (0x%08x) seq %u sa-seq-hi %u crypto %U integrity %U%s",
97 t->
udp_encap ?
" udp-encap-enabled" :
"");
115 u8 block_size,
u8 icv_sz,
117 u16 buffer_data_size,
uword total_len)
120 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
121 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x00,
126 u8 pad_bytes = new_length - min_length;
131 if (last[0]->current_length + tail_sz > buffer_data_size)
139 last[0]->
flags |= VLIB_BUFFER_NEXT_PRESENT;
164 len = clib_net_to_host_u16 (len);
169 u8 prot = is_udp ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
188 udp->
length = clib_net_to_host_u16 (len);
194 #ifdef CLIB_HAVE_VEC128 195 static const u8x16 ext_hdr_types = {
196 IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS,
197 IP_PROTOCOL_IPV6_ROUTE,
198 IP_PROTOCOL_IPV6_FRAGMENTATION,
201 return !u8x16_is_all_zero (ext_hdr_types == u8x16_splat (nexthdr));
203 return ((nexthdr ^ IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS) |
204 (nexthdr ^ IP_PROTOCOL_IPV6_ROUTE) |
205 (nexthdr ^ IP_PROTOCOL_IPV6_FRAGMENTATION) != 0);
224 p = (
void *) (ip6 + 1);
253 ASSERT (op - ops < n_ops);
255 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
258 b[bi]->
error = node->
errors[ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR];
259 nexts[bi] = drop_next;
281 ASSERT (op - ops < n_ops);
283 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
286 b[bi]->
error = node->
errors[ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR];
287 nexts[bi] = drop_next;
298 } __clib_packed esp_gcm_nonce_t;
306 u32 start_len,
u16 * n_ch)
313 total_len = ch->
len = start_len;
314 ch->
src = ch->
dst = start;
327 if (!(cb->
flags & VLIB_BUFFER_NEXT_PRESENT))
343 u32 start_len,
u8 * digest,
u16 * n_ch)
350 total_len = ch->
len = start_len;
361 if (ipsec_sa_is_set_USE_ESN (sa0))
363 u32 seq_hi = clib_net_to_host_u32 (sa0->
seq_hi);
365 ch->
len +=
sizeof (seq_hi);
366 total_len +=
sizeof (seq_hi);
373 if (!(cb->
flags & VLIB_BUFFER_NEXT_PRESENT))
389 u8 * payload,
u16 payload_len,
u8 iv_sz,
u8 icv_sz,
392 esp_gcm_nonce_t * nonce)
400 op->
src = op->
dst = payload;
402 op->
len = payload_len - icv_sz;
405 if (ipsec_sa_is_set_IS_AEAD (sa0))
414 op->
tag = payload + op->
len;
417 u64 *
iv = (
u64 *) (payload - iv_sz);
418 nonce->salt = sa0->
salt;
420 op->
iv = (
u8 *) nonce;
424 op->
iv = payload - iv_sz;
445 op->
digest = payload + payload_len - icv_sz;
460 payload_len + iv_sz +
464 else if (ipsec_sa_is_set_USE_ESN (sa0))
466 u32 seq_hi = clib_net_to_host_u32 (sa0->
seq_hi);
468 op->
len +=
sizeof (seq_hi);
482 u8 *tag, *
iv, *aad = 0;
485 i16 crypto_start_offset, integ_start_offset = 0;
486 u16 crypto_total_len, integ_total_len;
489 next[0] = ESP_ENCRYPT_NEXT_PENDING;
492 crypto_start_offset = payload - b->
data;
493 crypto_total_len = integ_total_len = payload_len - icv_sz;
494 tag = payload + crypto_total_len;
497 if (ipsec_sa_is_set_IS_AEAD (sa))
499 esp_gcm_nonce_t *nonce;
500 u64 *pkt_iv = (
u64 *) (payload - iv_sz);
502 aad = payload - hdr_len -
sizeof (
esp_aead_t);
504 nonce = (esp_gcm_nonce_t *) (aad -
sizeof (*nonce));
505 nonce->salt = sa->
salt;
506 nonce->iv = *pkt_iv = clib_host_to_net_u64 (sa->
gcm_iv_counter++);
523 iv = payload - iv_sz;
524 integ_start_offset = crypto_start_offset - iv_sz -
sizeof (
esp_header_t);
539 payload_len + iv_sz +
543 else if (ipsec_sa_is_set_USE_ESN (sa) && !ipsec_sa_is_set_IS_AEAD (sa))
545 u32 seq_hi = clib_net_to_host_u32 (sa->
seq_hi);
547 integ_total_len +=
sizeof (seq_hi);
553 integ_total_len - crypto_total_len,
555 integ_start_offset, bi, async_next,
568 (b - n_drop)[0]->error = ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR;
569 (next - n_drop)[0] = drop_next;
588 u32 current_sa_index = ~0, current_sa_packets = 0;
589 u32 current_sa_bytes = 0,
spi = 0;
590 u8 block_sz = 0, iv_sz = 0, icv_sz = 0;
598 u16 drop_next = (is_ip6 ? ESP_ENCRYPT_NEXT_DROP6 : ESP_ENCRYPT_NEXT_DROP4);
615 u8 *payload, *next_hdr_ptr;
616 u16 payload_len, payload_len_total, n_bufs;
639 if (sa_index0 != current_sa_index)
641 if (current_sa_packets)
646 current_sa_packets = current_sa_bytes = 0;
649 current_sa_index = sa_index0;
650 spi = clib_net_to_host_u32 (sa0->
spi);
658 if (async_frame && async_frame->
n_elts)
683 ESP_ENCRYPT_NEXT_HANDOFF6 : ESP_ENCRYPT_NEXT_HANDOFF4);
691 b[0]->
error = node->
errors[ESP_ENCRYPT_ERROR_NO_BUFFERS];
699 while (lb->
flags & VLIB_BUFFER_NEXT_PRESENT)
705 b[0]->
error = node->
errors[ESP_ENCRYPT_ERROR_SEQ_CYCLED];
713 if (ipsec_sa_is_set_IS_TUNNEL (sa0))
723 b[0]->
error = node->
errors[ESP_ENCRYPT_ERROR_NO_BUFFERS];
727 b[0]->
flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
732 hdr_len +=
sizeof (*esp);
736 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
740 payload_len_total + hdr_len);
744 if (ipsec_sa_is_set_IS_TUNNEL_V6 (sa0))
751 *next_hdr_ptr = (is_ip6 ?
752 IP_PROTOCOL_IPV6 : IP_PROTOCOL_IP_IN_IP);
753 len = payload_len_total + hdr_len -
len;
763 *next_hdr_ptr = (is_ip6 ?
764 IP_PROTOCOL_IPV6 : IP_PROTOCOL_IP_IN_IP);
765 len = payload_len_total + hdr_len;
776 next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
780 u8 *l2_hdr, l2_len, *ip_hdr, ip_len;
781 ip6_ext_header_t *ext_hdr;
800 b[0]->
flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
805 hdr_len +=
sizeof (*esp);
809 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
817 ip_hdr = payload - hdr_len;
822 l2_len =
vnet_buffer (b[0])->ip.save_rewrite_length;
824 l2_hdr = payload - hdr_len;
838 ip6->
protocol = IP_PROTOCOL_IPSEC_ESP;
842 *next_hdr_ptr = ext_hdr->next_hdr;
843 ext_hdr->next_hdr = IP_PROTOCOL_IPSEC_ESP;
846 clib_host_to_net_u16 (payload_len_total + hdr_len - l2_len -
854 len = payload_len_total + hdr_len - l2_len;
858 udp_len = len - ip_len;
871 next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
886 esp->
seq = clib_net_to_host_u32 (sa0->
seq);
894 payload, payload_len, iv_sz,
895 icv_sz, from[b - bufs], next, hdr_len,
906 payload_len, iv_sz, icv_sz, bufs, b, lb,
907 hdr_len, esp, nonce++);
912 current_sa_packets += 1;
913 current_sa_bytes += payload_len_total;
924 tr->
udp_encap = ipsec_sa_is_set_UDP_ENCAP (sa0);
935 current_sa_index, current_sa_packets,
947 else if (async_frame && async_frame->
n_elts)
954 ESP_ENCRYPT_ERROR_RX_PKTS, frame->
n_vectors);
993 if (b[0]->
flags & VLIB_BUFFER_IS_TRACED)
999 if (b[1]->
flags & VLIB_BUFFER_IS_TRACED)
1005 if (b[2]->
flags & VLIB_BUFFER_IS_TRACED)
1011 if (b[3]->
flags & VLIB_BUFFER_IS_TRACED)
1040 ESP_ENCRYPT_ERROR_POST_RX_PKTS,
1056 .name =
"esp4-encrypt",
1057 .vector_size =
sizeof (
u32),
1066 [ESP_ENCRYPT_NEXT_DROP4] =
"ip4-drop",
1067 [ESP_ENCRYPT_NEXT_DROP6] =
"ip6-drop",
1068 [ESP_ENCRYPT_NEXT_HANDOFF4] =
"esp4-encrypt-handoff",
1069 [ESP_ENCRYPT_NEXT_HANDOFF6] =
"esp6-encrypt-handoff",
1070 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] =
"interface-output",
1071 [ESP_ENCRYPT_NEXT_PENDING] =
"esp-encrypt-pending",
1085 .name =
"esp4-encrypt-post",
1086 .vector_size =
sizeof (
u32),
1089 .sibling_of =
"esp4-encrypt",
1106 .name =
"esp6-encrypt",
1107 .vector_size =
sizeof (
u32),
1110 .sibling_of =
"esp4-encrypt",
1126 .name =
"esp6-encrypt-post",
1127 .vector_size =
sizeof (
u32),
1130 .sibling_of =
"esp4-encrypt",
1147 .name =
"esp4-encrypt-tun",
1148 .vector_size =
sizeof (
u32),
1157 [ESP_ENCRYPT_NEXT_DROP4] =
"ip4-drop",
1158 [ESP_ENCRYPT_NEXT_DROP6] =
"ip6-drop",
1159 [ESP_ENCRYPT_NEXT_HANDOFF4] =
"esp4-encrypt-tun-handoff",
1160 [ESP_ENCRYPT_NEXT_HANDOFF6] =
"error-drop",
1161 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] =
"adj-midchain-tx",
1162 [ESP_ENCRYPT_NEXT_PENDING] =
"esp-encrypt-pending",
1175 .name =
"esp4-encrypt-tun-post",
1176 .vector_size =
sizeof (
u32),
1179 .sibling_of =
"esp4-encrypt-tun",
1196 .name =
"esp6-encrypt-tun",
1197 .vector_size =
sizeof (
u32),
1206 [ESP_ENCRYPT_NEXT_DROP4] =
"ip4-drop",
1207 [ESP_ENCRYPT_NEXT_DROP6] =
"ip6-drop",
1208 [ESP_ENCRYPT_NEXT_HANDOFF4] =
"error-drop",
1209 [ESP_ENCRYPT_NEXT_HANDOFF6] =
"esp6-encrypt-tun-handoff",
1210 [ESP_ENCRYPT_NEXT_PENDING] =
"esp-encrypt-pending",
1211 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] =
"adj-midchain-tx",
1226 .name =
"esp6-encrypt-tun-post",
1227 .vector_size =
sizeof (
u32),
1230 .sibling_of =
"esp6-encrypt-tun",
1266 "Outbound ESP packets received",
1318 .name =
"esp4-no-crypto",
1319 .vector_size =
sizeof (
u32),
1339 .name =
"esp6-no-crypto",
1340 .vector_size =
sizeof (
u32),
1355 return from_frame->n_vectors;
1360 .name =
"esp-encrypt-pending",
1361 .vector_size =
sizeof (
u32),
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static_always_inline int vnet_crypto_async_add_to_frame(vlib_main_t *vm, vnet_crypto_async_frame_t **frame, u32 key_index, u32 crypto_len, i16 integ_len_adj, i16 crypto_start_offset, u16 integ_start_offset, u32 buffer_index, u16 next_node, u8 *iv, u8 *tag, u8 *aad, u8 flags)
static vlib_cli_command_t trace
(constructor) VLIB_CLI_COMMAND (trace)
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer's data.
static_always_inline void vnet_crypto_async_reset_frame(vnet_crypto_async_frame_t *f)
ipsec_per_thread_data_t * ptd
vnet_crypto_op_t * integ_ops
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
static_always_inline u8 esp_get_ip6_hdr_len(ip6_header_t *ip6, ip6_ext_header_t **ext_hdr)
static char * esp_no_crypto_error_strings[]
static u16 esp_aad_fill(u8 *data, const esp_header_t *esp, const ipsec_sa_t *sa)
#define clib_memcpy_fast(a, b, c)
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
static u32 ipsec_sa_assign_thread(u32 thread_id)
#define vec_add2_aligned(V, P, N, A)
Add N elements to end of vector V, return pointer to new elements in P.
vlib_node_registration_t esp_encrypt_pending_node
(constructor) VLIB_REGISTER_NODE (esp_encrypt_pending_node)
ipsec_integ_alg_t integ_alg
u16 current_length
Nbytes between current data and the end of this buffer.
vnet_crypto_op_t * crypto_ops
struct esp_aead_t_ esp_aead_t
AES GCM Additional Authentication data.
static heap_elt_t * last(heap_header_t *h)
static void * ip6_ext_next_header(ip6_ext_header_t *ext_hdr)
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
static u8 * format_esp_encrypt_trace(u8 *s, va_list *args)
ipsec_integ_alg_t integ_alg
ipsec_crypto_alg_t crypto_alg
#define VLIB_NODE_FN(node)
vnet_crypto_op_chunk_t * chunks
static_always_inline void esp_process_chained_ops(vlib_main_t *vm, vlib_node_runtime_t *node, vnet_crypto_op_t *ops, vlib_buffer_t *b[], u16 *nexts, vnet_crypto_op_chunk_t *chunks, u16 drop_next)
vlib_error_t * errors
Vector of errors for this node.
vnet_crypto_op_id_t integ_op_id
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
vlib_node_registration_t esp4_encrypt_tun_node
(constructor) VLIB_REGISTER_NODE (esp4_encrypt_tun_node)
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
static_always_inline int esp_prepare_async_frame(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, vnet_crypto_async_frame_t **async_frame, ipsec_sa_t *sa, vlib_buffer_t *b, esp_header_t *esp, u8 *payload, u32 payload_len, u8 iv_sz, u8 icv_sz, u32 bi, u16 *next, u32 hdr_len, u16 async_next, vlib_buffer_t *lb)
static int esp_seq_advance(ipsec_sa_t *sa)
vnet_crypto_key_index_t linked_key_index
vnet_crypto_key_index_t crypto_key_index
vl_api_ip_proto_t protocol
static uword esp_no_crypto_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
#define static_always_inline
#define foreach_esp_encrypt_next
static_always_inline void esp_async_recycle_failed_submit(vnet_crypto_async_frame_t *f, vlib_buffer_t **b, u16 *next, u16 drop_next)
static u8 * format_esp_post_encrypt_trace(u8 *s, va_list *args)
static_always_inline void esp_fill_udp_hdr(ipsec_sa_t *sa, udp_header_t *udp, u16 len)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
static_always_inline void vnet_crypto_op_init(vnet_crypto_op_t *op, vnet_crypto_op_id_t type)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
static_always_inline void vlib_buffer_enqueue_to_single_next(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 next_index, u32 count)
static_always_inline u8 ext_hdr_is_pre_esp(u8 nexthdr)
static const u8 pad_data[]
STATIC_ASSERT_SIZEOF(esp_gcm_nonce_t, 12)
static u8 * format_esp_no_crypto_trace(u8 *s, va_list *args)
vl_api_fib_path_type_t type
vlib_error_t error
Error code for buffers to be enqueued to error handler.
static_always_inline u32 esp_encrypt_chain_integ(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, ipsec_sa_t *sa0, vlib_buffer_t *b, vlib_buffer_t *lb, u8 icv_sz, u8 *start, u32 start_len, u8 *digest, u16 *n_ch)
vlib_node_registration_t esp6_encrypt_node
(constructor) VLIB_REGISTER_NODE (esp6_encrypt_node)
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
The identity of a DPO is a combination of its type and its instance number/index of objects of that t...
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
vlib_node_registration_t esp6_encrypt_tun_node
(constructor) VLIB_REGISTER_NODE (esp6_encrypt_tun_node)
static index_t ipsec_tun_protect_get_sa_out(adj_index_t ai)
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
esp_async_post_next_t esp_encrypt_async_next
vlib_node_registration_t esp4_no_crypto_tun_node
(constructor) VLIB_REGISTER_NODE (esp4_no_crypto_tun_node)
static void esp_prepare_sync_op(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, vnet_crypto_op_t **crypto_ops, vnet_crypto_op_t **integ_ops, ipsec_sa_t *sa0, u8 *payload, u16 payload_len, u8 iv_sz, u8 icv_sz, vlib_buffer_t **bufs, vlib_buffer_t **b, vlib_buffer_t *lb, u32 hdr_len, esp_header_t *esp, esp_gcm_nonce_t *nonce)
u32 node_index
Node index.
static char * esp_encrypt_error_strings[]
static_always_inline u32 esp_encrypt_chain_crypto(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, ipsec_sa_t *sa0, vlib_buffer_t *b, vlib_buffer_t *lb, u8 icv_sz, u8 *start, u32 start_len, u16 *n_ch)
static uword esp_encrypt_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, int is_ip6, int is_tun, u16 async_next)
static_always_inline vnet_crypto_async_frame_t * vnet_crypto_async_get_frame(vlib_main_t *vm, vnet_crypto_async_op_id_t opt)
async crypto inline functions
static void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
#define ip6_ext_header_len(p)
vnet_crypto_op_t * chained_crypto_ops
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
#define VLIB_REGISTER_NODE(x,...)
#define VNET_CRYPTO_OP_FLAG_INIT_IV
#define CLIB_PREFETCH(addr, size, type)
static_always_inline void vlib_buffer_enqueue_to_next(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts, uword count)
vlib_node_registration_t esp6_no_crypto_tun_node
(constructor) VLIB_REGISTER_NODE (esp6_no_crypto_tun_node)
vlib_node_registration_t esp4_encrypt_node
(constructor) VLIB_REGISTER_NODE (esp4_encrypt_node)
static uword round_pow2(uword x, uword pow2)
vlib_node_registration_t esp4_encrypt_post_node
(constructor) VLIB_REGISTER_NODE (esp4_encrypt_post_node)
vlib_node_registration_t esp6_encrypt_post_node
(constructor) VLIB_REGISTER_NODE (esp6_encrypt_post_node)
vlib_node_registration_t esp6_encrypt_tun_post_node
(constructor) VLIB_REGISTER_NODE (esp6_encrypt_tun_post_node)
vlib_main_t vlib_node_runtime_t * node
#define ESP_MAX_BLOCK_SIZE
#define clib_atomic_cmp_and_swap(addr, old, new)
static uword esp_encrypt_post_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
vlib_combined_counter_main_t ipsec_sa_counters
SA packet & bytes counters.
u32 vnet_crypto_process_chained_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops)
static_always_inline void esp_update_ip4_hdr(ip4_header_t *ip4, u16 len, int is_transport, int is_udp)
static_always_inline void esp_process_ops(vlib_main_t *vm, vlib_node_runtime_t *node, vnet_crypto_op_t *ops, vlib_buffer_t *b[], u16 *nexts, u16 drop_next)
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
vnet_crypto_async_op_id_t
vnet_crypto_key_index_t integ_key_index
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
static_always_inline int vnet_crypto_async_submit_open_frame(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
static_always_inline void clib_memcpy_le64(u8 *dst, u8 *src, u8 len)
index_t dpoi_index
the index of objects of that type
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 next_buffer
Next buffer for this linked-list of buffers.
VLIB buffer representation.
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
#define ip_csum_update(sum, old, new, type, field)
vnet_crypto_op_id_t crypto_enc_op_id
vnet_crypto_op_t * chained_integ_ops
vnet_crypto_op_status_t status
ipsec_crypto_alg_t crypto_alg
vlib_node_registration_t esp4_encrypt_tun_post_node
(constructor) VLIB_REGISTER_NODE (esp4_encrypt_tun_post_node)
vnet_crypto_async_op_id_t crypto_async_enc_op_id
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
u16 flags
Copy of main node flags.
u16 dpoi_next_node
The next VLIB node to follow.
static int ip4_header_bytes(const ip4_header_t *i)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define VLIB_NODE_FLAG_TRACE
#define CLIB_CACHE_LINE_BYTES
static u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
#define foreach_esp_encrypt_error
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static u16 ip_csum_fold(ip_csum_t c)
static_always_inline u8 * esp_add_footer_and_icv(vlib_main_t *vm, vlib_buffer_t **last, u8 block_size, u8 icv_sz, u16 *next, vlib_node_runtime_t *node, u16 buffer_data_size, uword total_len)
static_always_inline void clib_memcpy_le32(u8 *dst, u8 *src, u8 len)