|
FD.io VPP
v21.06-3-gbb25fbf28
Vector Packet Processing
|
Go to the documentation of this file.
22 #include <vpp/app/version.h>
28 #include <rte_bus_vdev.h>
29 #include <rte_cryptodev.h>
30 #include <rte_crypto_sym.h>
31 #include <rte_crypto.h>
32 #include <rte_cryptodev_pmd.h>
33 #include <rte_config.h>
38 #define always_inline static inline
40 #define always_inline static inline __attribute__ ((__always_inline__))
47 union rte_crypto_sym_ofs ofs;
53 *max_end =
clib_max (crypto_end, integ_end);
56 ofs.ofs.cipher.tail = *max_end - crypto_end;
58 ofs.ofs.auth.tail = *max_end - integ_end;
65 struct rte_crypto_vec *data_vec,
u16 *n_seg,
68 struct rte_crypto_vec *vec = data_vec + 1;
72 while ((
b->
flags & VLIB_BUFFER_NEXT_PRESENT) &&
size)
78 if (iova_mode == RTE_IOVA_VA)
97 union rte_cryptodev_session_ctx sess_ctx;
104 cet->
ctx, RTE_CRYPTO_OP_WITH_SESSION,
118 struct rte_crypto_va_iova_ptr iv_vec, digest_vec;
120 u32 last_key_index = ~0;
126 n_elts =
frame->n_elts;
131 VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR);
142 union rte_crypto_sym_ofs cofs;
156 union rte_cryptodev_session_ctx sess_ctx;
173 status = rte_cryptodev_configure_raw_dp_ctx (
175 RTE_CRYPTO_OP_WITH_SESSION, sess_ctx, is_update);
185 vec->len = max_end - min_ofs;
188 vec[0].base = (
void *) (
b[0]->
data + min_ofs);
190 iv_vec.va = (
void *) fe->
iv;
192 digest_vec.va = (
void *) fe->
tag;
197 vec[0].base = (
void *) (
b[0]->
data + min_ofs);
199 iv_vec.va = (
void *) fe->
iv;
201 digest_vec.va = (
void *) fe->
tag;
209 max_end - min_ofs - vec->len) < 0)
213 status = rte_cryptodev_raw_enqueue (cet->
ctx, vec, n_seg, cofs, &iv_vec,
214 &digest_vec, 0, (
void *)
frame);
223 status = rte_cryptodev_raw_enqueue_done (cet->
ctx,
frame->n_elts);
235 VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR);
249 union rte_crypto_sym_ofs cofs;
251 struct rte_crypto_va_iova_ptr iv_vec, digest_vec, aad_vec;
252 u32 last_key_index = ~0;
256 n_elts =
frame->n_elts;
261 VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR);
285 union rte_cryptodev_session_ctx sess_ctx;
314 status = rte_cryptodev_configure_raw_dp_ctx (
316 RTE_CRYPTO_OP_WITH_SESSION, sess_ctx, is_update);
329 iv_vec.va = (
void *) fe->
iv;
331 digest_vec.va = (
void *) fe->
tag;
333 aad_vec.va = (
void *) (cet->
aad_buf + aad_offset);
342 iv_vec.va = (
void *) fe->
iv;
344 aad_vec.va = (
void *) (cet->
aad_buf + aad_offset);
346 digest_vec.va = (
void *) fe->
tag;
371 rte_cryptodev_raw_enqueue (cet->
ctx, vec, n_seg, cofs, &iv_vec,
372 &digest_vec, &aad_vec, (
void *)
frame);
381 status = rte_cryptodev_raw_enqueue_done (cet->ctx,
frame->n_elts);
385 cet->inflight +=
frame->n_elts;
391 VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR);
408 f->elts[
index].status = is_op_success ? VNET_CRYPTO_OP_STATUS_COMPLETED :
409 VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
412 #define GET_RING_OBJ(r, pos, f) \
415 vnet_crypto_async_frame_t **ring = (void *) &r[1]; \
416 f = ring[(r->cons.head + pos) & r->mask]; \
422 u32 *enqueue_thread_idx)
427 u32 n_deq, n_success;
429 u8 no_job_to_deq = 0;
438 for (
i = 0;
i < n_cached_frame;
i++)
442 enum rte_crypto_op_status op_status;
447 if (
i < n_cached_frame - 2)
457 err =
f->state & 0x80;
459 for (j =
f->n_elts -
n_left; j < f->n_elts && inflight; j++)
462 f_ret = rte_cryptodev_raw_dequeue (cet->
ctx, &ret, &op_status);
469 case RTE_CRYPTO_OP_STATUS_SUCCESS:
470 f->elts[j].status = VNET_CRYPTO_OP_STATUS_COMPLETED;
473 f->elts[j].status = VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
490 f->state =
f->n_elts - j;
500 f->state =
f->n_elts - j;
508 rte_ring_sc_dequeue (cet->
cached_frame, (
void **) &frame_ret);
514 if (!inflight || no_job_to_deq || !n_room_left)
517 n_deq = rte_cryptodev_raw_dequeue_burst (
519 (
void **) &
frame, 0, &n_success, &dequeue_status);
524 no_job_to_deq = n_deq <
frame->n_elts;
526 if (frame_ret || n_cached_frame || no_job_to_deq)
529 frame->state |= ((n_success < n_deq) << 7);
542 while (inflight && n_room_left && !no_job_to_deq)
544 n_deq = rte_cryptodev_raw_dequeue_burst (
546 (
void **) &
frame, 0, &n_success, &dequeue_status);
550 no_job_to_deq = n_deq <
frame->n_elts;
552 frame->state |= ((n_success < n_deq) << 7);
558 if (inflight < cet->inflight)
561 rte_cryptodev_raw_dequeue_done (cet->
ctx, cet->
inflight - inflight);
568 *nb_elts_processed = frame_ret->
n_elts;
623 struct rte_cryptodev_info info;
624 struct rte_cryptodev_sym_capability_idx cap_auth_idx;
625 struct rte_cryptodev_sym_capability_idx cap_cipher_idx;
626 struct rte_cryptodev_sym_capability_idx cap_aead_idx;
627 u32 support_raw_api = 1, max_ctx_size = 0;
633 rte_cryptodev_info_get (cinst->
dev_id, &info);
634 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYM_RAW_DP))
640 ctx_size = rte_cryptodev_get_raw_dp_ctx_size (cinst->
dev_id);
641 max_ctx_size =
clib_max (ctx_size, max_ctx_size);
644 if (!support_raw_api)
651 u8 *
name =
format (0,
"cache_frame_ring_%u_%u", numa, thread_id);
655 RING_F_SC_DEQ | RING_F_SP_ENQ);
657 cet->
aad_buf = rte_zmalloc_socket (
685 #define _(a, b, c, d, e, f, g) \
686 cap_aead_idx.type = RTE_CRYPTO_SYM_XFORM_AEAD; \
687 cap_aead_idx.algo.aead = RTE_CRYPTO_##b##_##c; \
688 if (cryptodev_check_cap_support (&cap_aead_idx, g, e, f)) \
690 vnet_crypto_register_async_handler ( \
691 vm, eidx, VNET_CRYPTO_OP_##a##_TAG##e##_AAD##f##_ENC, \
692 cryptodev_raw_enq_aead_aad_##f##_enc, cryptodev_raw_dequeue); \
693 vnet_crypto_register_async_handler ( \
694 vm, eidx, VNET_CRYPTO_OP_##a##_TAG##e##_AAD##f##_DEC, \
695 cryptodev_raw_enq_aead_aad_##f##_dec, cryptodev_raw_dequeue); \
700 #define _(a, b, c, d, e) \
701 cap_auth_idx.type = RTE_CRYPTO_SYM_XFORM_AUTH; \
702 cap_auth_idx.algo.auth = RTE_CRYPTO_AUTH_##d##_HMAC; \
703 cap_cipher_idx.type = RTE_CRYPTO_SYM_XFORM_CIPHER; \
704 cap_cipher_idx.algo.cipher = RTE_CRYPTO_CIPHER_##b; \
705 if (cryptodev_check_cap_support (&cap_cipher_idx, c, -1, -1) && \
706 cryptodev_check_cap_support (&cap_auth_idx, -1, e, -1)) \
708 vnet_crypto_register_async_handler ( \
709 vm, eidx, VNET_CRYPTO_OP_##a##_##d##_TAG##e##_ENC, \
710 cryptodev_raw_enq_linked_alg_enc, cryptodev_raw_dequeue); \
711 vnet_crypto_register_async_handler ( \
712 vm, eidx, VNET_CRYPTO_OP_##a##_##d##_TAG##e##_DEC, \
713 cryptodev_raw_enq_linked_alg_dec, cryptodev_raw_dequeue); \
u32 next_buffer
Next buffer for this linked-list of buffers.
enum rte_iova_mode iova_mode
static_always_inline vnet_crypto_async_frame_t * cryptodev_raw_dequeue(vlib_main_t *vm, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
static uword vlib_buffer_get_current_pa(vlib_main_t *vm, vlib_buffer_t *b)
static_always_inline int cryptodev_frame_linked_algs_enqueue(vlib_main_t *vm, vnet_crypto_async_frame_t *frame, cryptodev_op_type_t op_type)
static_always_inline void cryptodev_post_dequeue(void *frame, u32 index, u8 is_op_success)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static uword pointer_to_uword(const void *p)
vlib_get_buffers(vm, from, b, n_left_from)
@ CRYPTODEV_OP_TYPE_DECRYPT
static_always_inline int cryptodev_raw_enq_aead_aad_12_enc(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
#define clib_error_return(e, args...)
static_always_inline void cryptodev_reset_ctx(cryptodev_engine_thread_t *cet)
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
@ CRYPTODEV_OP_TYPE_ENCRYPT
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
@ VNET_CRYPTO_FRAME_STATE_ELT_ERROR
#define CRYPTODEV_DEQ_CACHE_SZ
static_always_inline int cryptodev_raw_enq_aead_aad_8_dec(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
struct rte_cryptodev_sym_session * reset_sess
static_always_inline int cryptodev_frame_build_sgl(vlib_main_t *vm, enum rte_iova_mode iova_mode, struct rte_crypto_vec *data_vec, u16 *n_seg, vlib_buffer_t *b, u32 size)
#define CLIB_PREFETCH(addr, size, type)
static_always_inline int cryptodev_raw_aead_enqueue(vlib_main_t *vm, vnet_crypto_async_frame_t *frame, cryptodev_op_type_t op_type, u8 aad_len)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
cryptodev_main_t cryptodev_main
static u64 vlib_physmem_get_pa(vlib_main_t *vm, void *mem)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
static void error_exit(int code)
cryptodev_inst_t * cryptodev_inst
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
#define static_always_inline
#define GET_RING_OBJ(r, pos, f)
cryptodev_engine_thread_t * per_thread_data
static_always_inline int cryptodev_raw_enq_aead_aad_8_enc(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
sll srl srl sll sra u16x4 i
void cryptodev_sess_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx, u32 aad_len)
u8 flags
share same VNET_CRYPTO_OP_FLAG_* values
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
static_always_inline void cryptodev_mark_frame_err_status(vnet_crypto_async_frame_t *f, vnet_crypto_op_status_t s)
#define CLIB_CACHE_LINE_BYTES
static_always_inline int cryptodev_raw_enq_aead_aad_12_dec(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
u16 current_length
Nbytes between current data and the end of this buffer.
#define CRYPTODEV_NB_CRYPTO_OPS
#define vec_free(V)
Free vector's memory (no header).
clib_error_t * cryptodev_register_raw_hdl(vlib_main_t *vm, u32 eidx)
description fragment has unexpected format
@ VNET_CRYPTO_FRAME_STATE_SUCCESS
#define vec_foreach(var, vec)
Vector iterator.
static vlib_main_t * vlib_get_main_by_index(u32 thread_index)
#define foreach_vnet_aead_crypto_conversion
#define CRYPTODEV_MAX_AAD_SIZE
static_always_inline int cryptodev_raw_enq_linked_alg_dec(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
vlib_buffer_t * b[VNET_CRYPTO_FRAME_SIZE]
struct rte_crypto_raw_dp_ctx * ctx
struct rte_ring * cached_frame
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
#define CRYPTODEV_MAX_INFLIGHT
#define CRYPTODEV_AAD_MASK
#define CRYPTODEV_MAX_N_SGL
maximum number of segments
clib_error_t * cryptodev_register_cop_hdl(vlib_main_t *vm, u32 eidx)
static_always_inline u32 cryptodev_get_frame_n_elts(void *frame)
static_always_inline u64 compute_ofs_linked_alg(vnet_crypto_async_frame_elt_t *fe, i16 *min_ofs, u32 *max_end)
#define foreach_cryptodev_link_async_alg
crypto (alg, cryptodev_alg, key_size), hash (alg, digest-size)
int cryptodev_session_create(vlib_main_t *vm, vnet_crypto_key_index_t idx, u32 aad_len)
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index,...
VLIB buffer representation.
static_always_inline int cryptodev_raw_enq_linked_alg_enc(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)