30 #define foreach_esp_decrypt_next \ 31 _(DROP, "error-drop") \ 32 _(IP4_INPUT, "ip4-input-no-checksum") \ 33 _(IP6_INPUT, "ip6-input") \ 34 _(L2_INPUT, "l2-input") \ 37 #define _(v, s) ESP_DECRYPT_NEXT_##v, 45 #define foreach_esp_decrypt_post_next \ 46 _(DROP, "error-drop") \ 47 _(IP4_INPUT, "ip4-input-no-checksum") \ 48 _(IP6_INPUT, "ip6-input") \ 49 _(L2_INPUT, "l2-input") 51 #define _(v, s) ESP_DECRYPT_POST_NEXT_##v, 59 #define foreach_esp_decrypt_error \ 60 _(RX_PKTS, "ESP pkts received") \ 61 _(RX_POST_PKTS, "ESP-POST pkts received") \ 62 _(DECRYPTION_FAILED, "ESP decryption failed") \ 63 _(INTEG_ERROR, "Integrity check failed") \ 64 _(CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \ 65 _(REPLAY, "SA replayed packet") \ 66 _(RUNT, "undersized packet") \ 67 _(NO_BUFFERS, "no buffers (packet dropped)") \ 68 _(OVERSIZED_HEADER, "buffer with oversized header (dropped)") \ 69 _(NO_TAIL_SPACE, "no enough buffer tail space (dropped)") \ 70 _(TUN_NO_PROTO, "no tunnel protocol") \ 71 _(UNSUP_PAYLOAD, "unsupported payload") \ 76 #define _(sym,str) ESP_DECRYPT_ERROR_##sym, 83 #define _(sym,string) string, 93 ipsec_crypto_alg_t crypto_alg;
107 "esp: crypto %U integrity %U pkt-seq %d sa-seq %u sa-seq-hi %u",
113 #define ESP_ENCRYPT_PD_F_FD_TRANSPORT (1 << 2) 130 ASSERT (op - ops < n_ops);
131 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
134 if (op->
status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
137 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
139 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
162 ASSERT (op - ops < n_ops);
163 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
166 if (op->
status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
169 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
171 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
191 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
198 before_last->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
209 u16 first_sz = icv_sz - last_sz;
211 bp = before_last =
first;
212 while (bp->
flags & VLIB_BUFFER_NEXT_PRESENT)
219 memmove (lb_curr + first_sz, lb_curr, last_sz);
226 pd2->
lb = before_last;
229 before_last->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
238 if (!ipsec_sa_is_set_USE_ESN (sa))
242 u32 seq_hi = clib_host_to_net_u32 (sa->
seq_hi);
248 if (space_left >= sz)
279 if (ipsec_sa_is_set_USE_ESN (sa))
282 u32 seq_hi = clib_host_to_net_u32 (sa->
seq_hi);
285 if (space_left >= sz)
307 u8 * start_src,
u32 start_len,
308 u8 ** digest,
u16 * n_ch,
u32 * integ_total_len)
315 total_len = ch->
len = start_len;
329 if (ipsec_sa_is_set_USE_ESN (sa0))
331 u32 seq_hi = clib_host_to_net_u32 (sa0->
seq_hi);
343 esn = tmp_b->
data - sz;
367 (pd2->
lb), &seq_hi, sz);
379 total_len += ch->
len;
385 if (!(cb->
flags & VLIB_BUFFER_NEXT_PRESENT))
394 *integ_total_len = total_len;
403 u8 * start,
u32 start_len,
u8 ** tag,
u16 * n_ch)
410 total_len = ch->
len = start_len;
411 ch->
src = ch->
dst = start;
422 if (ipsec_sa_is_set_IS_AEAD (sa0))
459 if (!(cb->
flags & VLIB_BUFFER_NEXT_PRESENT))
510 &extra_esn, &op->
len);
540 b->
error = node->
errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
541 next[0] = ESP_DECRYPT_NEXT_DROP;
561 if (ipsec_sa_is_set_IS_AEAD (sa0))
571 scratch = payload - esp_sz;
574 scratch -= (
sizeof (*aad) + pd->
hdr_sz);
584 op->
iv -=
sizeof (sa0->
salt);
590 op->
src = op->
dst = payload += iv_sz;
591 op->
len = len - iv_sz;
624 u8 *tag = payload +
len, *
iv = payload + esp_sz, *aad = 0;
626 u32 crypto_len, integ_len = 0;
627 i16 crypto_start_offset, integ_start_offset = 0;
630 if (!ipsec_sa_is_set_IS_AEAD (sa0))
634 integ_start_offset = payload - b->
data;
650 &extra_esn, &integ_len);
678 b->
error = node->
errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
679 next[0] = ESP_DECRYPT_NEXT_DROP;
684 esp_insert_esn (vm, sa0, pd2, &integ_len, &tag, &len, b, payload);
695 if (ipsec_sa_is_set_IS_AEAD (sa0))
704 scratch = payload - esp_sz;
717 iv -=
sizeof (sa0->
salt);
723 crypto_start_offset = (payload += iv_sz) - b->
data;
724 crypto_len = len - iv_sz;
744 integ_len - crypto_len,
747 bi, async_next, iv, tag, aad, flags);
754 u16 * next,
int is_ip6,
int is_tun,
int is_async)
760 const u8 tun_flags = IPSEC_SA_FLAG_IS_TUNNEL | IPSEC_SA_FLAG_IS_TUNNEL_V6;
761 u8 pad_length = 0, next_header = 0;
789 next[0] = ESP_DECRYPT_NEXT_DROP;
810 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
825 pad_length = (bt - 1)[0];
852 b->
flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
854 if ((pd->
flags & tun_flags) == 0 && !is_tun)
856 u8 udp_sz = (is_ip6 == 0 && pd->
flags & IPSEC_SA_FLAG_UDP_ENCAP) ?
860 u8 *
ip = old_ip + adv + udp_sz;
862 if (is_ip6 && ip_hdr_sz > 64)
863 memmove (ip, old_ip, ip_hdr_sz);
875 len -= adv + tail_orig;
878 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
885 len = clib_host_to_net_u16 (len - adv - tail_orig - udp_sz);
892 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
899 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
904 else if (next_header == IP_PROTOCOL_IPV6)
906 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
913 if (is_tun && next_header == IP_PROTOCOL_GRE)
924 switch (clib_net_to_host_u16 (gre->
protocol))
926 case GRE_PROTOCOL_teb:
928 next[0] = ESP_DECRYPT_NEXT_L2_INPUT;
930 case GRE_PROTOCOL_ip4:
931 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
933 case GRE_PROTOCOL_ip6:
934 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
937 b->
error = node->
errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
938 next[0] = ESP_DECRYPT_NEXT_DROP;
944 next[0] = ESP_DECRYPT_NEXT_DROP;
945 b->
error = node->
errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
951 if (ipsec_sa_is_set_IS_PROTECT (sa0))
976 ipsec.protect_index);
989 next[0] = ESP_DECRYPT_NEXT_DROP;
990 b->
error = node->
errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
993 else if (next_header == IP_PROTOCOL_IPV6)
1004 next[0] = ESP_DECRYPT_NEXT_DROP;
1005 b->
error = node->
errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
1029 u32 current_sa_index = ~0, current_sa_bytes = 0, current_sa_pkts = 0;
1038 u16 n_async_drop = 0;
1068 b[0]->
error = node->
errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
1070 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
1071 next[0] = ESP_DECRYPT_NEXT_DROP;
1075 if (
vnet_buffer (b[0])->ipsec.sad_index != current_sa_index)
1077 if (current_sa_pkts)
1082 current_sa_bytes = current_sa_pkts = 0;
1084 current_sa_index =
vnet_buffer (b[0])->ipsec.sad_index;
1097 if (async_frame && async_frame->
n_elts)
1101 nexts, &n_async_drop,
1102 ESP_DECRYPT_NEXT_DROP,
1103 ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR);
1123 &n_async_drop, ESP_DECRYPT_NEXT_HANDOFF, next);
1124 next[0] = ESP_DECRYPT_NEXT_HANDOFF;
1143 while (pd2->
lb->
flags & VLIB_BUFFER_NEXT_PRESENT)
1155 b[0]->
error = node->
errors[ESP_DECRYPT_ERROR_REPLAY];
1157 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
1163 b[0]->
error = node->
errors[ESP_DECRYPT_ERROR_RUNT];
1165 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
1170 current_sa_pkts += 1;
1182 b[0], next, async_next);
1185 b[0]->
error = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
1187 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
1192 if (next[0] != ESP_DECRYPT_NEXT_DROP && async_frame->
n_elts)
1194 nexts, &n_async_drop,
1195 ESP_DECRYPT_NEXT_DROP,
1196 ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR);
1202 op, sa0, payload, len, cpd.
icv_sz,
1203 cpd.
iv_sz, pd, pd2, b[0], next,
1216 current_sa_index, current_sa_pkts,
1221 if (async_frame && async_frame->
n_elts)
1226 ESP_DECRYPT_NEXT_DROP,
1227 ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR);
1232 ESP_DECRYPT_ERROR_RX_PKTS, n_left);
1241 ESP_DECRYPT_ERROR_INTEG_ERROR);
1243 ptd->
chunks, ESP_DECRYPT_ERROR_INTEG_ERROR);
1246 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1249 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1271 CLIB_PREFETCH (data + pd[1].current_length - pd[1].icv_sz - 2,
1281 current_sa_index =
vnet_buffer (b[0])->ipsec.sad_index;
1310 ESP_DECRYPT_ERROR_RX_PKTS, n_left);
1373 ESP_DECRYPT_ERROR_RX_POST_PKTS, n_left);
1442 .name =
"esp4-decrypt",
1443 .vector_size =
sizeof (
u32),
1452 [ESP_DECRYPT_NEXT_DROP] =
"ip4-drop",
1453 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1454 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1455 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1456 [ESP_DECRYPT_NEXT_HANDOFF] =
"esp4-decrypt-handoff",
1461 .name =
"esp4-decrypt-post",
1462 .vector_size =
sizeof (
u32),
1469 .sibling_of =
"esp4-decrypt",
1473 .name =
"esp6-decrypt",
1474 .vector_size =
sizeof (
u32),
1483 [ESP_DECRYPT_NEXT_DROP] =
"ip6-drop",
1484 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1485 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1486 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1487 [ESP_DECRYPT_NEXT_HANDOFF]=
"esp6-decrypt-handoff",
1492 .name =
"esp6-decrypt-post",
1493 .vector_size =
sizeof (
u32),
1500 .sibling_of =
"esp6-decrypt",
1504 .name =
"esp4-decrypt-tun",
1505 .vector_size =
sizeof (
u32),
1512 [ESP_DECRYPT_NEXT_DROP] =
"ip4-drop",
1513 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1514 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1515 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1516 [ESP_DECRYPT_NEXT_HANDOFF] =
"esp4-decrypt-tun-handoff",
1521 .name =
"esp4-decrypt-tun-post",
1522 .vector_size =
sizeof (
u32),
1529 .sibling_of =
"esp4-decrypt-tun",
1533 .name =
"esp6-decrypt-tun",
1534 .vector_size =
sizeof (
u32),
1541 [ESP_DECRYPT_NEXT_DROP] =
"ip6-drop",
1542 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1543 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1544 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1545 [ESP_DECRYPT_NEXT_HANDOFF]=
"esp6-decrypt-tun-handoff",
1550 .name =
"esp6-decrypt-tun-post",
1551 .vector_size =
sizeof (
u32),
1558 .sibling_of =
"esp6-decrypt-tun",
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static_always_inline void esp_process_ops(vlib_main_t *vm, vlib_node_runtime_t *node, vnet_crypto_op_t *ops, vlib_buffer_t *b[], u16 *nexts, int e)
static_always_inline int vnet_crypto_async_add_to_frame(vlib_main_t *vm, vnet_crypto_async_frame_t **frame, u32 key_index, u32 crypto_len, i16 integ_len_adj, i16 crypto_start_offset, u16 integ_start_offset, u32 buffer_index, u16 next_node, u8 *iv, u8 *tag, u8 *aad, u8 flags)
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer's data.
static u8 * format_esp_decrypt_trace(u8 *s, va_list *args)
The post data structure to for esp_encrypt/decrypt_inline to write to vib_buffer_t opaque unused fiel...
ipsec_per_thread_data_t * ptd
vnet_crypto_op_t * integ_ops
vl_api_wireguard_peer_flags_t flags
static char * esp_decrypt_error_strings[]
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
static u16 esp_aad_fill(u8 *data, const esp_header_t *esp, const ipsec_sa_t *sa)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
vl_api_ip_proto_t protocol
#define clib_memcpy_fast(a, b, c)
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
static u32 ipsec_sa_assign_thread(u32 thread_id)
static_always_inline u8 * esp_move_icv(vlib_main_t *vm, vlib_buffer_t *first, esp_decrypt_packet_data2_t *pd2, u16 icv_sz, u16 *dif)
ipsec_integ_alg_t integ_alg
static_always_inline int esp_decrypt_prepare_async_frame(vlib_main_t *vm, vlib_node_runtime_t *node, ipsec_per_thread_data_t *ptd, vnet_crypto_async_frame_t **f, ipsec_sa_t *sa0, u8 *payload, u16 len, u8 icv_sz, u8 iv_sz, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, u32 bi, vlib_buffer_t *b, u16 *next, u16 async_next)
u16 current_length
Nbytes between current data and the end of this buffer.
struct esp_aead_t_ esp_aead_t
AES GCM Additional Authentication data.
vnet_crypto_op_t * crypto_ops
ipsec_crypto_alg_t crypto_alg
static heap_elt_t * last(heap_header_t *h)
AES GCM Additional Authentication data.
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
#define VLIB_NODE_FN(node)
vnet_crypto_op_chunk_t * chunks
vlib_error_t * errors
Vector of errors for this node.
vnet_crypto_op_id_t integ_op_id
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
static_always_inline int ip46_address_is_equal_v6(const ip46_address_t *ip46, const ip6_address_t *ip6)
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
static uword esp_decrypt_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame, int is_ip6, int is_tun, u16 async_next)
vnet_crypto_key_index_t linked_key_index
vnet_crypto_key_index_t crypto_key_index
static_always_inline u32 esp_decrypt_chain_crypto(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, esp_decrypt_packet_data2_t *pd2, ipsec_sa_t *sa0, vlib_buffer_t *b, u8 icv_sz, u8 *start, u32 start_len, u8 **tag, u16 *n_ch)
vlib_node_registration_t esp6_decrypt_tun_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_tun_node)
#define static_always_inline
description fragment has unexpected format
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
vlib_node_registration_t esp4_decrypt_tun_post_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_tun_post_node)
static_always_inline void vnet_crypto_op_init(vnet_crypto_op_t *op, vnet_crypto_op_id_t type)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
vlib_node_registration_t esp4_decrypt_tun_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_tun_node)
#define foreach_esp_decrypt_error
static heap_elt_t * first(heap_header_t *h)
vl_api_fib_path_type_t type
vlib_error_t error
Error code for buffers to be enqueued to error handler.
static void esp_async_recycle_failed_submit(vnet_crypto_async_frame_t *f, vlib_buffer_t **b, u32 *from, u16 *nexts, u16 *n_dropped, u16 drop_next_index, vlib_error_t err)
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
static_always_inline u8 * esp_move_icv_esn(vlib_main_t *vm, vlib_buffer_t *first, esp_decrypt_packet_data2_t *pd2, u16 icv_sz, ipsec_sa_t *sa, u8 *extra_esn, u32 *len)
esp_async_post_next_t esp_decrypt_async_next
static_always_inline i16 esp_insert_esn(vlib_main_t *vm, ipsec_sa_t *sa, esp_decrypt_packet_data2_t *pd2, u32 *data_len, u8 **digest, u16 *len, vlib_buffer_t *b, u8 *payload)
static __clib_warn_unused_result u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
static void ipsec_sa_anti_replay_advance(ipsec_sa_t *sa, u32 seq)
static uword esp_decrypt_post_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame, int is_ip6, int is_tun)
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
vnet_crypto_async_op_id_t crypto_async_dec_op_id
static void esp_remove_tail(vlib_main_t *vm, vlib_buffer_t *b, vlib_buffer_t *last, u16 tail)
u32 node_index
Node index.
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
vlib_node_registration_t esp6_decrypt_post_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_post_node)
static_always_inline vnet_crypto_async_frame_t * vnet_crypto_async_get_frame(vlib_main_t *vm, vnet_crypto_async_op_id_t opt)
async crypto inline functions
static void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
vnet_crypto_op_t * chained_crypto_ops
#define vec_add_aligned(V, E, N, A)
Add N elements to end of vector V (no header, specified alignment)
#define VLIB_REGISTER_NODE(x,...)
#define foreach_esp_decrypt_post_next
#define CLIB_PREFETCH(addr, size, type)
static_always_inline void vlib_buffer_enqueue_to_next(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts, uword count)
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
vlib_node_registration_t esp6_decrypt_tun_post_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_tun_post_node)
vlib_main_t vlib_node_runtime_t * node
#define clib_atomic_cmp_and_swap(addr, old, new)
vlib_combined_counter_main_t ipsec_sa_counters
SA packet & bytes counters.
u32 vnet_crypto_process_chained_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops)
static ipsec_tun_protect_t * ipsec_tun_protect_get(u32 index)
static int ipsec_sa_anti_replay_check(ipsec_sa_t *sa, u32 seq)
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
vnet_crypto_async_op_id_t
vnet_crypto_key_index_t integ_key_index
static_always_inline void clib_memset_u16(void *p, u16 val, uword count)
static_always_inline int vnet_crypto_async_submit_open_frame(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
static_always_inline void clib_memcpy_le64(u8 *dst, u8 *src, u8 len)
vlib_node_registration_t esp4_decrypt_post_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_post_node)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 next_buffer
Next buffer for this linked-list of buffers.
VLIB buffer representation.
#define esp_post_data2(b)
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
#define ip_csum_update(sum, old, new, type, field)
static_always_inline int ip46_address_is_equal_v4(const ip46_address_t *ip46, const ip4_address_t *ip4)
vnet_crypto_op_t * chained_integ_ops
vnet_crypto_op_status_t status
#define foreach_esp_decrypt_next
ipsec_crypto_alg_t crypto_alg
static_always_inline void esp_process_chained_ops(vlib_main_t *vm, vlib_node_runtime_t *node, vnet_crypto_op_t *ops, vlib_buffer_t *b[], u16 *nexts, vnet_crypto_op_chunk_t *chunks, int e)
static_always_inline int esp_decrypt_chain_integ(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, esp_decrypt_packet_data2_t *pd2, ipsec_sa_t *sa0, vlib_buffer_t *b, u8 icv_sz, u8 *start_src, u32 start_len, u8 **digest, u16 *n_ch, u32 *integ_total_len)
void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
vl_api_ikev2_sa_transform_t esn
static_always_inline void esp_decrypt_post_crypto(vlib_main_t *vm, vlib_node_runtime_t *node, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, vlib_buffer_t *b, u16 *next, int is_ip6, int is_tun, int is_async)
vnet_crypto_op_id_t crypto_dec_op_id
vlib_node_registration_t esp6_decrypt_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_node)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static u16 ip_csum_fold(ip_csum_t c)
static void esp_set_next_index(int is_async, u32 *from, u16 *nexts, u32 bi, u16 *drop_index, u16 drop_next, u16 *next)
ipsec_integ_alg_t integ_alg
vlib_node_registration_t esp4_decrypt_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_node)
static_always_inline void esp_decrypt_prepare_sync_op(vlib_main_t *vm, vlib_node_runtime_t *node, ipsec_per_thread_data_t *ptd, vnet_crypto_op_t ***crypto_ops, vnet_crypto_op_t ***integ_ops, vnet_crypto_op_t *op, ipsec_sa_t *sa0, u8 *payload, u16 len, u8 icv_sz, u8 iv_sz, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, vlib_buffer_t *b, u16 *next, u32 index)