43 }) ip4_and_esp_header_t;
51 }) ip4_and_udp_and_esp_header_t;
58 }) ip6_and_esp_header_t;
75 #define ESP_SEQ_MAX (4294967295UL) 76 #define ESP_MAX_BLOCK_SIZE (16) 77 #define ESP_MAX_IV_SIZE (16) 78 #define ESP_MAX_ICV_SIZE (32) 125 if (ipsec_sa_is_set_USE_ESN (sa))
127 u32 seq_hi = clib_host_to_net_u32 (sa->
seq_hi);
143 aad = (esp_aead_t *) op->
aad;
146 if (ipsec_sa_is_set_USE_ESN (sa))
149 aad->
data[1] = clib_host_to_net_u32 (sa->
seq_hi);
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
struct esp_aead_t_ esp_aead_t
AES GCM Additional Authentication data.
AES GCM Additional Authentication data.
vnet_crypto_op_id_t integ_op_id
#define clib_memcpy(d, s, n)
static int esp_seq_advance(ipsec_sa_t *sa)
u32 data[3]
for GCM: when using ESN it's: SPI, seq-hi, seg-low else SPI, seq-low
static_always_inline void vnet_crypto_op_init(vnet_crypto_op_t *op, vnet_crypto_op_id_t type)
static void esp_aad_fill(vnet_crypto_op_t *op, const esp_header_t *esp, const ipsec_sa_t *sa)
static unsigned int hmac_calc(vlib_main_t *vm, ipsec_sa_t *sa, u8 *data, int data_len, u8 *signature)
typedef CLIB_PACKED(struct { ip4_header_t ip4;esp_header_t esp;}) ip4_and_esp_header_t
u8 * format_esp_header(u8 *s, va_list *args)
vnet_crypto_key_index_t integ_key_index