|
FD.io VPP
v21.10.1-2-g0a485f517
Vector Packet Processing
|
Go to the documentation of this file.
30 #define foreach_esp_decrypt_next \
31 _ (DROP, "error-drop") \
32 _ (IP4_INPUT, "ip4-input-no-checksum") \
33 _ (IP6_INPUT, "ip6-input") \
34 _ (L2_INPUT, "l2-input") \
35 _ (MPLS_INPUT, "mpls-input") \
36 _ (HANDOFF, "handoff")
38 #define _(v, s) ESP_DECRYPT_NEXT_##v,
46 #define foreach_esp_decrypt_post_next \
47 _ (DROP, "error-drop") \
48 _ (IP4_INPUT, "ip4-input-no-checksum") \
49 _ (IP6_INPUT, "ip6-input") \
50 _ (MPLS_INPUT, "mpls-input") \
51 _ (L2_INPUT, "l2-input")
53 #define _(v, s) ESP_DECRYPT_POST_NEXT_##v,
61 #define foreach_esp_decrypt_error \
62 _ (RX_PKTS, "ESP pkts received") \
63 _ (RX_POST_PKTS, "ESP-POST pkts received") \
64 _ (HANDOFF, "hand-off") \
65 _ (DECRYPTION_FAILED, "ESP decryption failed") \
66 _ (INTEG_ERROR, "Integrity check failed") \
67 _ (CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \
68 _ (REPLAY, "SA replayed packet") \
69 _ (RUNT, "undersized packet") \
70 _ (NO_BUFFERS, "no buffers (packet dropped)") \
71 _ (OVERSIZED_HEADER, "buffer with oversized header (dropped)") \
72 _ (NO_TAIL_SPACE, "no enough buffer tail space (dropped)") \
73 _ (TUN_NO_PROTO, "no tunnel protocol") \
74 _ (UNSUP_PAYLOAD, "unsupported payload")
78 #define _(sym,str) ESP_DECRYPT_ERROR_##sym,
85 #define _(sym,string) string,
101 #define N_HI_ESN_BYTES 4
112 "esp: crypto %U integrity %U pkt-seq %d sa-seq %u sa-seq-hi %u "
119 #define ESP_ENCRYPT_PD_F_FD_TRANSPORT (1 << 2)
136 ASSERT (op - ops < n_ops);
137 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
140 if (op->
status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
143 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
145 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
168 ASSERT (op - ops < n_ops);
169 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
172 if (op->
status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
175 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
177 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
190 if (
last->current_length > tail)
192 last->current_length -= tail;
197 while (
b->
flags & VLIB_BUFFER_NEXT_PRESENT)
204 before_last->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
216 u16 first_sz = icv_sz - last_sz;
218 bp = before_last =
first;
219 while (bp->
flags & VLIB_BUFFER_NEXT_PRESENT)
226 memmove (lb_curr + first_sz, lb_curr, last_sz);
230 if (before_last ==
first)
235 pd2->
lb = before_last;
238 before_last->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
247 if (!ipsec_sa_is_set_USE_ESN (sa))
250 u32 seq_hi = clib_host_to_net_u32 (pd->
seq_hi);
290 if (ipsec_sa_is_set_USE_ESN (sa))
292 u32 seq_hi = clib_host_to_net_u32 (pd->
seq_hi);
320 u32 start_len,
u8 **digest,
u16 *n_ch,
321 u32 *integ_total_len)
328 total_len = ch->
len = start_len;
342 if (ipsec_sa_is_set_USE_ESN (sa0))
344 u32 seq_hi = clib_host_to_net_u32 (pd->
seq_hi);
393 total_len += ch->
len;
399 if (!(cb->
flags & VLIB_BUFFER_NEXT_PRESENT))
408 *integ_total_len = total_len;
418 u8 * start,
u32 start_len,
u8 ** tag,
u16 * n_ch)
425 total_len = ch->
len = start_len;
426 ch->
src = ch->
dst = start;
437 if (ipsec_sa_is_set_IS_AEAD (sa0))
474 if (!(cb->
flags & VLIB_BUFFER_NEXT_PRESENT))
525 &extra_esn, &op->
len);
555 b->
error =
node->errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
556 next[0] = ESP_DECRYPT_NEXT_DROP;
576 if (ipsec_sa_is_set_IS_CTR (sa0))
579 esp_ctr_nonce_t *nonce =
580 (esp_ctr_nonce_t *) (payload - esp_sz - pd->
hdr_sz -
582 if (ipsec_sa_is_set_IS_AEAD (sa0))
593 nonce->ctr = clib_host_to_net_u32 (1);
595 nonce->salt = sa0->
salt;
597 nonce->iv = *(
u64 *) op->
iv;
598 op->
iv = (
u8 *) nonce;
600 op->
src = op->
dst = payload += iv_sz;
630 u8 *tag = payload +
len, *
iv = payload + esp_sz, *aad = 0;
632 u32 crypto_len, integ_len = 0;
633 i16 crypto_start_offset, integ_start_offset = 0;
636 if (!ipsec_sa_is_set_IS_AEAD (sa0))
640 integ_start_offset = payload -
b->
data;
658 &extra_esn, &integ_len);
686 return (ESP_DECRYPT_ERROR_NO_BUFFERS);
701 if (ipsec_sa_is_set_IS_CTR (sa0))
704 esp_ctr_nonce_t *nonce =
705 (esp_ctr_nonce_t *) (payload - esp_sz - pd->
hdr_sz - sizeof (*nonce));
706 if (ipsec_sa_is_set_IS_AEAD (sa0))
716 nonce->ctr = clib_host_to_net_u32 (1);
718 nonce->salt = sa0->
salt;
720 nonce->iv = *(
u64 *)
iv;
724 crypto_start_offset = (payload += iv_sz) -
b->
data;
725 crypto_len =
len - iv_sz;
744 vm,
f, key_index, crypto_len, integ_len - crypto_len, crypto_start_offset,
745 integ_start_offset, bi, async_next,
iv, tag, aad,
flags);
747 return (ESP_DECRYPT_ERROR_RX_PKTS);
759 const u8 tun_flags = IPSEC_SA_FLAG_IS_TUNNEL | IPSEC_SA_FLAG_IS_TUNNEL_V6;
760 u8 pad_length = 0, next_header = 0;
788 b->
error =
node->errors[ESP_DECRYPT_ERROR_REPLAY];
789 next[0] = ESP_DECRYPT_NEXT_DROP;
810 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
820 pad_length =
f->pad_length;
821 next_header =
f->next_header;
825 pad_length = (bt - 1)[0];
835 pad_length =
f->pad_length;
836 next_header =
f->next_header;
845 pad_length =
f->pad_length;
846 next_header =
f->next_header;
852 b->
flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
854 if ((pd->
flags & tun_flags) == 0 && !is_tun)
856 u8 udp_sz = (
is_ip6 == 0 && pd->
flags & IPSEC_SA_FLAG_UDP_ENCAP) ?
860 u8 *
ip = old_ip + adv + udp_sz;
862 if (
is_ip6 && ip_hdr_sz > 64)
863 memmove (
ip, old_ip, ip_hdr_sz);
874 u16 len = clib_net_to_host_u16 (
ip6->payload_length);
875 len -= adv + tail_orig;
876 ip6->payload_length = clib_host_to_net_u16 (
len);
877 ip6->protocol = next_header;
878 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
884 u16 len = clib_net_to_host_u16 (
ip4->length);
885 len = clib_host_to_net_u16 (
len - adv - tail_orig - udp_sz);
890 ip4->protocol = next_header;
892 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
899 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
904 else if (next_header == IP_PROTOCOL_IPV6)
906 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
911 else if (next_header == IP_PROTOCOL_MPLS_IN_IP)
913 next[0] = ESP_DECRYPT_NEXT_MPLS_INPUT;
920 if (is_tun && next_header == IP_PROTOCOL_GRE)
931 switch (clib_net_to_host_u16 (gre->
protocol))
933 case GRE_PROTOCOL_teb:
935 next[0] = ESP_DECRYPT_NEXT_L2_INPUT;
937 case GRE_PROTOCOL_ip4:
938 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
940 case GRE_PROTOCOL_ip6:
941 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
944 b->
error =
node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
945 next[0] = ESP_DECRYPT_NEXT_DROP;
951 next[0] = ESP_DECRYPT_NEXT_DROP;
952 b->
error =
node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
958 if (ipsec_sa_is_set_IS_PROTECT (sa0))
988 &
ip4->dst_address) ||
992 next[0] = ESP_DECRYPT_NEXT_DROP;
993 b->
error =
node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
996 else if (next_header == IP_PROTOCOL_IPV6)
1003 &
ip6->dst_address) ||
1007 next[0] = ESP_DECRYPT_NEXT_DROP;
1008 b->
error =
node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
1019 u16 async_next_node)
1037 u32 current_sa_index = ~0, current_sa_bytes = 0, current_sa_pkts = 0;
1043 int is_async =
im->async_mode;
1058 clib_memset (sync_nexts, -1,
sizeof (sync_nexts));
1059 clib_memset (async_frames, 0,
sizeof (async_frames));
1065 err = ESP_DECRYPT_ERROR_RX_PKTS;
1079 err = ESP_DECRYPT_ERROR_NO_BUFFERS;
1081 ESP_DECRYPT_NEXT_DROP);
1085 if (
vnet_buffer (
b[0])->ipsec.sad_index != current_sa_index)
1087 if (current_sa_pkts)
1092 current_sa_bytes = current_sa_pkts = 0;
1094 current_sa_index =
vnet_buffer (
b[0])->ipsec.sad_index;
1103 is_async =
im->async_mode | ipsec_sa_is_set_IS_ASYNC (sa0);
1118 err = ESP_DECRYPT_ERROR_HANDOFF;
1120 ESP_DECRYPT_NEXT_HANDOFF);
1139 while (pd2->
lb->
flags & VLIB_BUFFER_NEXT_PRESENT)
1157 err = ESP_DECRYPT_ERROR_REPLAY;
1159 ESP_DECRYPT_NEXT_DROP);
1165 err = ESP_DECRYPT_ERROR_RUNT;
1167 ESP_DECRYPT_NEXT_DROP);
1172 current_sa_pkts += 1;
1181 if (NULL == async_frames[async_op] ||
1184 async_frames[async_op] =
1191 vm,
node, ptd, async_frames[async_op], sa0, payload,
len,
1194 if (ESP_DECRYPT_ERROR_RX_PKTS != err)
1197 ESP_DECRYPT_NEXT_DROP);
1202 vm,
node, ptd, &crypto_ops, &integ_ops, op, sa0, payload,
len,
1206 if (ESP_DECRYPT_ERROR_RX_PKTS != err)
1215 sync_bufs[n_sync] =
b[0];
1230 current_sa_index, current_sa_pkts,
1247 vm, *async_frame,
node, ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR,
1248 n_sync, noop_bi, noop_nexts, ESP_DECRYPT_NEXT_DROP);
1257 ESP_DECRYPT_ERROR_INTEG_ERROR);
1260 ESP_DECRYPT_ERROR_INTEG_ERROR);
1263 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1266 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1273 sync_next = sync_nexts;
1298 current_sa_index =
vnet_buffer (
b[0])->ipsec.sad_index;
1393 ESP_DECRYPT_ERROR_RX_POST_PKTS,
n_left);
1462 .name =
"esp4-decrypt",
1463 .vector_size =
sizeof (
u32),
1472 [ESP_DECRYPT_NEXT_DROP] =
"ip4-drop",
1473 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1474 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1475 [ESP_DECRYPT_NEXT_MPLS_INPUT] =
"mpls-drop",
1476 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1477 [ESP_DECRYPT_NEXT_HANDOFF] =
"esp4-decrypt-handoff",
1482 .name =
"esp4-decrypt-post",
1483 .vector_size =
sizeof (
u32),
1490 .sibling_of =
"esp4-decrypt",
1494 .name =
"esp6-decrypt",
1495 .vector_size =
sizeof (
u32),
1504 [ESP_DECRYPT_NEXT_DROP] =
"ip6-drop",
1505 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1506 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1507 [ESP_DECRYPT_NEXT_MPLS_INPUT] =
"mpls-drop",
1508 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1509 [ESP_DECRYPT_NEXT_HANDOFF]=
"esp6-decrypt-handoff",
1514 .name =
"esp6-decrypt-post",
1515 .vector_size =
sizeof (
u32),
1522 .sibling_of =
"esp6-decrypt",
1526 .name =
"esp4-decrypt-tun",
1527 .vector_size =
sizeof (
u32),
1534 [ESP_DECRYPT_NEXT_DROP] =
"ip4-drop",
1535 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1536 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1537 [ESP_DECRYPT_NEXT_MPLS_INPUT] =
"mpls-input",
1538 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1539 [ESP_DECRYPT_NEXT_HANDOFF] =
"esp4-decrypt-tun-handoff",
1544 .name =
"esp4-decrypt-tun-post",
1545 .vector_size =
sizeof (
u32),
1552 .sibling_of =
"esp4-decrypt-tun",
1556 .name =
"esp6-decrypt-tun",
1557 .vector_size =
sizeof (
u32),
1564 [ESP_DECRYPT_NEXT_DROP] =
"ip6-drop",
1565 [ESP_DECRYPT_NEXT_IP4_INPUT] =
"ip4-input-no-checksum",
1566 [ESP_DECRYPT_NEXT_IP6_INPUT] =
"ip6-input",
1567 [ESP_DECRYPT_NEXT_MPLS_INPUT] =
"mpls-input",
1568 [ESP_DECRYPT_NEXT_L2_INPUT] =
"l2-input",
1569 [ESP_DECRYPT_NEXT_HANDOFF]=
"esp6-decrypt-tun-handoff",
1574 .name =
"esp6-decrypt-tun-post",
1575 .vector_size =
sizeof (
u32),
1582 .sibling_of =
"esp6-decrypt-tun",
1586 #ifndef CLIB_MARCH_VARIANT
1593 im->esp4_dec_fq_index =
1595 im->esp6_dec_fq_index =
1597 im->esp4_dec_tun_fq_index =
1599 im->esp6_dec_tun_fq_index =
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
#define foreach_esp_decrypt_error
vnet_crypto_op_t * integ_ops
u32 next_buffer
Next buffer for this linked-list of buffers.
static_always_inline void vnet_crypto_async_free_frame(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
static_always_inline void vnet_crypto_op_init(vnet_crypto_op_t *op, vnet_crypto_op_id_t type)
vnet_interface_main_t * im
ipsec_integ_alg_t integ_alg
static_always_inline u32 esp_decrypt_chain_crypto(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, ipsec_sa_t *sa0, vlib_buffer_t *b, u8 icv_sz, u8 *start, u32 start_len, u8 **tag, u16 *n_ch)
vnet_crypto_key_index_t integ_key_index
vlib_buffer_t * bufs[VLIB_FRAME_SIZE]
static int ipsec_sa_anti_replay_and_sn_advance(const ipsec_sa_t *sa, u32 seq, u32 hi_seq_used, bool post_decrypt, u32 *hi_seq_req)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
static void esp_remove_tail(vlib_main_t *vm, vlib_buffer_t *b, vlib_buffer_t *last, u16 tail)
@ ESP_DECRYPT_POST_N_NEXT
vlib_node_registration_t esp6_decrypt_tun_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_tun_node)
@ VNET_CRYPTO_ASYNC_OP_N_IDS
vl_api_ikev2_sa_transform_t esn
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static u8 * format_esp_decrypt_trace(u8 *s, va_list *args)
vnet_crypto_op_chunk_t * chunks
vnet_crypto_op_status_t status
vlib_get_buffers(vm, from, b, n_left_from)
@ VLIB_NODE_TYPE_INTERNAL
vlib_main_t vlib_node_runtime_t * node
static heap_elt_t * first(heap_header_t *h)
static void esp_set_next_index(vlib_buffer_t *b, vlib_node_runtime_t *node, u32 err, u16 index, u16 *nexts, u16 drop_next)
static_always_inline void esp_decrypt_prepare_sync_op(vlib_main_t *vm, vlib_node_runtime_t *node, ipsec_per_thread_data_t *ptd, vnet_crypto_op_t ***crypto_ops, vnet_crypto_op_t ***integ_ops, vnet_crypto_op_t *op, ipsec_sa_t *sa0, u8 *payload, u16 len, u8 icv_sz, u8 iv_sz, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, vlib_buffer_t *b, u16 *next, u32 index)
static_always_inline void vnet_crypto_async_add_to_frame(vlib_main_t *vm, vnet_crypto_async_frame_t *f, u32 key_index, u32 crypto_len, i16 integ_len_adj, i16 crypto_start_offset, u16 integ_start_offset, u32 buffer_index, u16 next_node, u8 *iv, u8 *tag, u8 *aad, u8 flags)
ipsec_crypto_alg_t crypto_alg
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
AES GCM Additional Authentication data.
vlib_main_t vlib_node_runtime_t vlib_frame_t * from_frame
vlib_buffer_enqueue_to_next(vm, node, from,(u16 *) nexts, frame->n_vectors)
vlib_node_registration_t esp4_decrypt_tun_post_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_tun_post_node)
vnet_crypto_op_id_t crypto_dec_op_id
static clib_error_t * esp_decrypt_init(vlib_main_t *vm)
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
static_always_inline void * clib_memcpy_fast(void *restrict dst, const void *restrict src, size_t n)
static_always_inline int vnet_crypto_async_submit_open_frame(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
#define vec_add_aligned(V, E, N, A)
Add N elements to end of vector V (no header, specified alignment)
static uword esp_decrypt_post_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame, int is_ip6, int is_tun)
u32 vlib_frame_queue_main_init(u32 node_index, u32 frame_queue_nelts)
static_always_inline void esp_process_chained_ops(vlib_main_t *vm, vlib_node_runtime_t *node, vnet_crypto_op_t *ops, vlib_buffer_t *b[], u16 *nexts, vnet_crypto_op_chunk_t *chunks, int e)
#define CLIB_PREFETCH(addr, size, type)
static_always_inline u8 * esp_move_icv(vlib_main_t *vm, vlib_buffer_t *first, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, u16 icv_sz, u16 *dif)
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
vlib_node_registration_t esp4_decrypt_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_node)
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
vnet_crypto_async_op_id_t crypto_async_dec_op_id
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
vlib_error_t error
Error code for buffers to be enqueued to error handler.
static_always_inline int ip46_address_is_equal_v6(const ip46_address_t *ip46, const ip6_address_t *ip6)
#define VLIB_NODE_FN(node)
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
static __clib_warn_unused_result u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
static ipsec_sa_t * ipsec_sa_get(u32 sa_index)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define foreach_esp_decrypt_post_next
u32 vnet_crypto_process_chained_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops)
vlib_combined_counter_main_t ipsec_sa_counters
SA packet & bytes counters.
static_always_inline void clib_memcpy_le64(u8 *dst, u8 *src, u8 len)
vnet_crypto_async_frame_t ** async_frames
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
#define static_always_inline
static heap_elt_t * last(heap_header_t *h)
if(node->flags &VLIB_NODE_FLAG_TRACE) vnet_interface_output_trace(vm
static_always_inline void vnet_crypto_async_reset_frame(vnet_crypto_async_frame_t *f)
static void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
#define foreach_esp_decrypt_next
static_always_inline u8 vnet_crypto_async_frame_is_full(const vnet_crypto_async_frame_t *f)
vlib_node_registration_t esp6_decrypt_tun_post_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_tun_post_node)
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
static u32 ipsec_sa_assign_thread(u32 thread_id)
#define CLIB_CACHE_LINE_BYTES
struct _vlib_node_registration vlib_node_registration_t
ipsec_crypto_alg_t crypto_alg
u16 current_length
Nbytes between current data and the end of this buffer.
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
static_always_inline int esp_decrypt_chain_integ(vlib_main_t *vm, ipsec_per_thread_data_t *ptd, const esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, ipsec_sa_t *sa0, vlib_buffer_t *b, u8 icv_sz, u8 *start_src, u32 start_len, u8 **digest, u16 *n_ch, u32 *integ_total_len)
static_always_inline void esp_process_ops(vlib_main_t *vm, vlib_node_runtime_t *node, vnet_crypto_op_t *ops, vlib_buffer_t *b[], u16 *nexts, int e)
static_always_inline u8 * esp_move_icv_esn(vlib_main_t *vm, vlib_buffer_t *first, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, u16 icv_sz, ipsec_sa_t *sa, u8 *extra_esn, u32 *len)
static_always_inline void esp_decrypt_post_crypto(vlib_main_t *vm, vlib_node_runtime_t *node, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, vlib_buffer_t *b, u16 *next, int is_ip6, int is_tun, int is_async)
vlib_node_registration_t esp6_decrypt_post_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_post_node)
static char * esp_decrypt_error_strings[]
vlib_node_registration_t esp6_decrypt_node
(constructor) VLIB_REGISTER_NODE (esp6_decrypt_node)
description fragment has unexpected format
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer's data.
vnet_crypto_op_t * chained_crypto_ops
#define VLIB_INIT_FUNCTION(x)
#define clib_atomic_cmp_and_swap(addr, old, new)
vl_api_ip_proto_t protocol
static_always_inline void clib_prefetch_load(void *p)
static u16 esp_aad_fill(u8 *data, const esp_header_t *esp, const ipsec_sa_t *sa, u32 seq_hi)
static_always_inline vnet_crypto_async_frame_t * vnet_crypto_async_get_frame(vlib_main_t *vm, vnet_crypto_async_op_id_t opt)
async crypto inline functions
#define vec_foreach(var, vec)
Vector iterator.
vnet_crypto_op_id_t integ_op_id
static_always_inline esp_decrypt_error_t esp_decrypt_prepare_async_frame(vlib_main_t *vm, vlib_node_runtime_t *node, ipsec_per_thread_data_t *ptd, vnet_crypto_async_frame_t *f, ipsec_sa_t *sa0, u8 *payload, u16 len, u8 icv_sz, u8 iv_sz, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, u32 bi, vlib_buffer_t *b, u16 *next, u16 async_next)
vnet_crypto_key_index_t linked_key_index
vlib_node_registration_t esp4_decrypt_post_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_post_node)
vnet_crypto_op_t * chained_integ_ops
#define esp_post_data2(b)
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
#define ip_csum_update(sum, old, new, type, field)
void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
vnet_crypto_async_op_id_t
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
clib_error_t *() vlib_init_function_t(struct vlib_main_t *vm)
ipsec_integ_alg_t integ_alg
vlib_node_registration_t esp4_decrypt_tun_node
(constructor) VLIB_REGISTER_NODE (esp4_decrypt_tun_node)
u16 nexts[VLIB_FRAME_SIZE]
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
static ipsec_tun_protect_t * ipsec_tun_protect_get(u32 index)
static u32 esp_async_recycle_failed_submit(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vlib_node_runtime_t *node, u32 err, u16 index, u32 *from, u16 *nexts, u16 drop_next_index)
static u16 ip_csum_fold(ip_csum_t c)
The post data structure to for esp_encrypt/decrypt_inline to write to vib_buffer_t opaque unused fiel...
esp_async_post_next_t esp_decrypt_async_next
vl_api_fib_path_type_t type
vlib_increment_combined_counter(ccm, ti, sw_if_index, n_buffers, n_bytes)
static uword esp_decrypt_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame, int is_ip6, int is_tun, u16 async_next_node)
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index,...
vnet_crypto_key_index_t crypto_key_index
static_always_inline u16 esp_insert_esn(vlib_main_t *vm, ipsec_sa_t *sa, esp_decrypt_packet_data_t *pd, esp_decrypt_packet_data2_t *pd2, u32 *data_len, u8 **digest, u16 *len, vlib_buffer_t *b, u8 *payload)
vnet_crypto_op_t * crypto_ops
VLIB buffer representation.
#define VLIB_REGISTER_NODE(x,...)
static void ipsec_sa_anti_replay_advance(ipsec_sa_t *sa, u32 seq, u32 hi_seq)
static_always_inline int ip46_address_is_equal_v4(const ip46_address_t *ip46, const ip4_address_t *ip4)
vl_api_wireguard_peer_flags_t flags