15 #ifndef __DPDK_IPSEC_H__ 16 #define __DPDK_IPSEC_H__ 23 #include <rte_config.h> 24 #include <rte_crypto.h> 25 #include <rte_cryptodev.h> 28 #define always_inline static inline 30 #define always_inline static inline __attribute__ ((__always_inline__)) 33 #define DPDK_CRYPTO_N_QUEUE_DESC 2048 34 #define DPDK_CRYPTO_NB_SESS_OBJS 20000 36 #define foreach_dpdk_crypto_input_next \ 37 _(DROP, "error-drop") \ 38 _(IP4_LOOKUP, "ip4-lookup") \ 39 _(IP6_LOOKUP, "ip6-lookup") \ 40 _(INTERFACE_OUTPUT, "interface-output") \ 41 _(DECRYPT4_POST, "dpdk-esp4-decrypt-post") \ 42 _(DECRYPT6_POST, "dpdk-esp6-decrypt-post") 46 #define _(f,s) DPDK_CRYPTO_INPUT_NEXT_##f, 52 #define MAX_QP_PER_LCORE 16 75 struct rte_crypto_op **
ops;
85 enum rte_crypto_sym_xform_type type;
176 { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0 };
189 sizeof (
struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op);
191 return ((op_size + align - 1) & ~(align - 1)) +
sizeof (
dpdk_op_priv_t);
197 const u32 align = 16;
200 offset =
sizeof (
struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op);
201 offset = (offset + align - 1) & ~(align - 1);
244 struct rte_cryptodev_sym_session *sess;
266 is_aead = ((sa->
crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128) ||
267 (sa->
crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) ||
268 (sa->
crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256));
273 if (cipher_res == auth_res)
289 ret = rte_mempool_get_bulk (data->
crypto_op, (
void **) ops, n);
307 rte_mempool_put_bulk (data->
crypto_op, (
void **) ops, n);
312 u32 node_index,
u32 error,
u8 numa,
u8 encrypt)
328 n_ops = res->
n_ops < n_ops ? res->
n_ops : n_ops;
329 enq = rte_cryptodev_enqueue_burst (res->
dev_id, res->
qp_id,
357 struct rte_crypto_op *op,
void *session,
358 u32 cipher_off,
u32 cipher_len,
359 u32 auth_off,
u32 auth_len,
360 u8 * aad,
u8 * digest,
u64 digest_paddr)
362 struct rte_crypto_sym_op *sym_op;
364 sym_op = (
struct rte_crypto_sym_op *) (op + 1);
367 sym_op->session = session;
371 sym_op->aead.data.offset = cipher_off;
372 sym_op->aead.data.length = cipher_len;
374 sym_op->aead.aad.data = aad;
375 sym_op->aead.aad.phys_addr =
376 op->phys_addr + (uintptr_t) aad - (uintptr_t) op;
378 sym_op->aead.digest.data = digest;
379 sym_op->aead.digest.phys_addr = digest_paddr;
383 sym_op->cipher.data.offset = cipher_off;
384 sym_op->cipher.data.length = cipher_len;
386 sym_op->auth.data.offset = auth_off;
387 sym_op->auth.data.length = auth_len;
389 sym_op->auth.digest.data = digest;
390 sym_op->auth.digest.phys_addr = digest_paddr;
static_always_inline void crypto_op_setup(u8 is_aead, struct rte_mbuf *mb0, struct rte_crypto_op *op, void *session, u32 cipher_off, u32 cipher_len, u32 auth_off, u32 auth_len, u8 *aad, u8 *digest, u64 digest_paddr)
u8 pad[3]
log2 (size of the packing page block)
#define CLIB_CACHE_LINE_ALIGN_MARK(mark)
clib_error_t * create_sym_session(struct rte_cryptodev_sym_session **session, u32 sa_idx, crypto_resource_t *res, crypto_worker_main_t *cwm, u8 is_outbound)
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
#define foreach_dpdk_crypto_input_next
static_always_inline i32 crypto_alloc_ops(u8 numa, struct rte_crypto_op **ops, u32 n)
ipsec_integ_alg_t integ_alg
static_always_inline void crypto_set_icb(dpdk_gcm_cnt_blk *icb, u32 salt, u32 seq, u32 seq_hi)
static_always_inline u32 crypto_op_get_priv_offset(void)
struct rte_cryptodev_sym_session * session
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
dpdk_crypto_main_t dpdk_crypto_main
#define DPDK_CRYPTO_N_QUEUE_DESC
static_always_inline clib_error_t * crypto_get_session(struct rte_cryptodev_sym_session **session, u32 sa_idx, crypto_resource_t *res, crypto_worker_main_t *cwm, u8 is_outbound)
static_always_inline void crypto_free_ops(u8 numa, struct rte_crypto_op **ops, u32 n)
#define static_always_inline
u16 cipher_resource_idx[IPSEC_CRYPTO_N_ALG]
static_always_inline void crypto_enqueue_ops(vlib_main_t *vm, crypto_worker_main_t *cwm, u32 node_index, u32 error, u8 numa, u8 encrypt)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
static const u8 pad_data[]
static_always_inline void add_session_by_drv_and_sa_idx(struct rte_cryptodev_sym_session *session, crypto_data_t *data, u32 drv_id, u32 sa_idx)
struct rte_cryptodev_sym_session * session
void crypto_auto_placement(void)
crypto_alg_t * cipher_algs
static_always_inline u32 crypto_op_len(void)
static void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
struct rte_mempool ** session_drv
crypto_session_by_drv_t * session_by_drv_id_and_sa_index
crypto_session_disposal_t * session_disposal
struct rte_mempool * crypto_op
crypto_worker_main_t * workers_main
crypto_resource_t * resource
struct rte_mempool * session_h
#define CLIB_ALIGN_MARK(name, alignment)
uword * session_by_sa_index
struct rte_crypto_op ** ops
struct clib_bihash_value offset
template key/value backing page structure
static_always_inline u16 get_resource(crypto_worker_main_t *cwm, ipsec_sa_t *sa)
ipsec_crypto_alg_t crypto_alg
u16 auth_resource_idx[IPSEC_INTEG_N_ALG]
#define vec_foreach(var, vec)
Vector iterator.
static_always_inline struct rte_cryptodev_sym_session * get_session_by_drv_and_sa_idx(crypto_data_t *data, u32 drv_id, u32 sa_idx)
#define CLIB_CACHE_LINE_BYTES
struct rte_crypto_op * ops[VLIB_FRAME_SIZE]
static_always_inline dpdk_op_priv_t * crypto_op_get_priv(struct rte_crypto_op *op)