|
FD.io VPP
v21.10.1-2-g0a485f517
Vector Packet Processing
|
Go to the documentation of this file.
18 #include <vpp/app/version.h>
32 return VNET_API_ERROR_INVALID_VALUE;
37 ptd =
cm->per_thread_data +
i;
41 if (enabled ||
count > 1)
44 (worker_idx)].self_crypto_enabled = enabled;
48 return VNET_API_ERROR_INVALID_VALUE_2;
66 cm->keys[idx].index_crypto = UINT32_MAX;
67 cm->keys[idx].index_integ = UINT32_MAX;
89 for (
i = 0;
i < n_elts;
i++)
90 frame->elts[
i].status = VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
108 for (
i = tail;
i < head;
i++)
164 while (
len &&
b->
flags & VLIB_BUFFER_NEXT_PRESENT)
234 u32 digest_len,
u8 is_enc)
261 crypto_op->
op = crypto_op_id;
262 crypto_op->
iv = fe->
iv;
266 integ_op->op = integ_op_id;
267 integ_op->digest = fe->
digest;
268 integ_op->digest_len = digest_len;
269 integ_op->key_index =
key->index_integ;
288 ASSERT (op - ops < n_ops);
290 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
315 ASSERT (op - ops < n_ops);
317 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
331 u8 aad_len,
u32 * nb_elts_processed,
332 u32 * enqueue_thread_idx)
349 ptd =
cm->per_thread_data +
i;
350 q = ptd->
queues[async_op_id];
362 *nb_elts_processed = n_elts =
f->n_elts;
364 bi =
f->buffer_indices;
376 sync_op_id, aad_len, tag_len);
385 *enqueue_thread_idx =
f->enqueue_thread_index;
396 u16 digest_len,
u8 is_enc,
397 u32 * nb_elts_processed,
398 u32 * enqueue_thread_idx)
415 ptd =
cm->per_thread_data +
i;
416 q = ptd->
queues[async_op_id];
434 *nb_elts_processed = n_elts =
f->n_elts;
436 bi =
f->buffer_indices;
445 fe, fe -
f->elts, bi[0],
473 *enqueue_thread_idx =
f->enqueue_thread_index;
494 if (
unformat (line_input,
"worker %u", &worker_index))
496 if (
unformat (line_input,
"crypto"))
500 else if (
unformat (line_input,
"off"))
517 if (
rv == VNET_API_ERROR_INVALID_VALUE)
521 else if (
rv == VNET_API_ERROR_INVALID_VALUE_2)
538 .path =
"set sw_scheduler",
539 .short_help =
"set sw_scheduler worker <idx> crypto <on|off>",
574 .path =
"show sw_scheduler workers",
575 .short_help =
"show sw_scheduler workers",
590 #define _(n, s, k, t, a) \
591 static vnet_crypto_async_frame_t \
592 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc ( \
593 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
595 return crypto_sw_scheduler_dequeue_aead ( \
596 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
597 VNET_CRYPTO_OP_##n##_ENC, t, a, nb_elts_processed, thread_idx); \
599 static vnet_crypto_async_frame_t \
600 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec ( \
601 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
603 return crypto_sw_scheduler_dequeue_aead ( \
604 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
605 VNET_CRYPTO_OP_##n##_DEC, t, a, nb_elts_processed, thread_idx); \
610 #define _(c, h, s, k, d) \
611 static vnet_crypto_async_frame_t \
612 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc ( \
613 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
615 return crypto_sw_scheduler_dequeue_link ( \
616 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
617 VNET_CRYPTO_OP_##c##_ENC, VNET_CRYPTO_OP_##h##_HMAC, d, 1, \
618 nb_elts_processed, thread_idx); \
620 static vnet_crypto_async_frame_t \
621 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec ( \
622 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
624 return crypto_sw_scheduler_dequeue_link ( \
625 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
626 VNET_CRYPTO_OP_##c##_DEC, VNET_CRYPTO_OP_##h##_HMAC, d, 0, \
627 nb_elts_processed, thread_idx); \
662 cm->crypto_engine_index =
664 "SW Scheduler Async Engine");
672 #define _(n, s, k, t, a) \
673 vnet_crypto_register_async_handler ( \
674 vm, cm->crypto_engine_index, \
675 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
676 crypto_sw_scheduler_frame_enqueue, \
677 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc); \
678 vnet_crypto_register_async_handler ( \
679 vm, cm->crypto_engine_index, \
680 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
681 crypto_sw_scheduler_frame_enqueue, \
682 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec);
686 #define _(c, h, s, k, d) \
687 vnet_crypto_register_async_handler ( \
688 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
689 crypto_sw_scheduler_frame_enqueue, \
690 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc); \
691 vnet_crypto_register_async_handler ( \
692 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
693 crypto_sw_scheduler_frame_enqueue, \
694 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec);
707 .runs_after =
VLIB_INITS (
"vnet_crypto_init"),
711 .version = VPP_BUILD_VER,
712 .description =
"SW Scheduler Crypto Async Engine plugin",
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
vnet_crypto_op_chunk_t * chunks
clib_error_t * sw_scheduler_cli_init(vlib_main_t *vm)
u32 next_buffer
Next buffer for this linked-list of buffers.
static u32 vlib_num_workers()
crypto_sw_scheduler_queue_t * queues[VNET_CRYPTO_ASYNC_OP_N_IDS]
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
#define foreach_crypto_aead_async_alg
async crypto
#define foreach_crypto_link_async_alg
@ VNET_CRYPTO_ASYNC_OP_N_IDS
static_always_inline void crypto_sw_scheduler_convert_link_crypto(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_key_t *key, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t crypto_op_id, vnet_crypto_op_id_t integ_op_id, u32 digest_len, u8 is_enc)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static u32 vlib_get_worker_thread_index(u32 worker_index)
vnet_crypto_op_status_t status
static u32 vlib_get_worker_index(u32 thread_index)
vnet_crypto_op_t * crypto_ops
#define clib_error_return(e, args...)
u32 vnet_crypto_key_index_t
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
vl_api_dhcp_client_state_t state
vnet_crypto_async_frame_state_t state
#define VNET_CRYPTO_OP_FLAG_INIT_IV
vlib_worker_thread_t * vlib_worker_threads
foreach_crypto_link_async_alg crypto_sw_scheduler_main_t crypto_sw_scheduler_main
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
static vlib_cli_command_t cmd_set_sw_scheduler_worker_crypto
(constructor) VLIB_CLI_COMMAND (cmd_set_sw_scheduler_worker_crypto)
@ VNET_CRYPTO_FRAME_STATE_ELT_ERROR
static_always_inline void process_chained_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, vnet_crypto_op_chunk_t *chunks, u8 *state)
static void crypto_sw_scheduler_key_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx)
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_link(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_crypto_op_id, vnet_crypto_op_id_t sync_integ_op_id, u16 digest_len, u8 is_enc, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
static_always_inline void process_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, u8 *state)
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
#define CLIB_MEMORY_STORE_BARRIER()
#define CRYPTO_SW_SCHEDULER_QUEUE_MASK
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
static clib_error_t * sw_scheduler_set_worker_crypto(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
struct clib_bihash_value offset
template key/value backing page structure
u32 vnet_crypto_process_chained_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops)
vnet_crypto_op_t * chained_crypto_ops
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
static_always_inline void crypto_sw_scheduler_convert_aead(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t op_id, u16 aad_len, u8 tag_len)
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
#define static_always_inline
#define vec_foreach_index(var, v)
Iterate over vector indices.
vnet_crypto_op_t * integ_ops
vlib_thread_main_t vlib_thread_main
#define CRYPTO_SW_SCHEDULER_QUEUE_SIZE
vnet_feature_config_main_t * cm
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment)
#define VLIB_CLI_COMMAND(x,...)
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
u8 flags
share same VNET_CRYPTO_OP_FLAG_* values
#define CLIB_CACHE_LINE_BYTES
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_completed_frame(crypto_sw_scheduler_queue_t *q)
void vlib_cli_output(vlib_main_t *vm, char *fmt,...)
static_always_inline void clib_memset_u8(void *p, u8 val, uword count)
u16 current_length
Nbytes between current data and the end of this buffer.
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
clib_error_t * crypto_sw_scheduler_init(vlib_main_t *vm)
#define vec_free(V)
Free vector's memory (no header).
template key/value backing page structure
vnet_crypto_async_frame_t * jobs[0]
void vnet_crypto_register_key_handler(vlib_main_t *vm, u32 engine_index, vnet_crypto_key_handler_t *key_handler)
#define VLIB_INIT_FUNCTION(x)
static_always_inline void clib_prefetch_load(void *p)
static int crypto_sw_scheduler_frame_enqueue(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
@ VNET_CRYPTO_FRAME_STATE_SUCCESS
#define vec_foreach(var, vec)
Vector iterator.
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_aead(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_op_id, u8 tag_len, u8 aad_len, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
@ VNET_CRYPTO_FRAME_STATE_PENDING
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_pending_frame(crypto_sw_scheduler_queue_t *q)
vnet_crypto_async_op_id_t
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
clib_error_t *() vlib_init_function_t(struct vlib_main_t *vm)
clib_error_t * crypto_sw_scheduler_api_init(vlib_main_t *vm)
static_always_inline void cryptodev_sw_scheduler_sgl(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vlib_buffer_t *b, vnet_crypto_op_t *op, i16 offset, u32 len)
static void * clib_mem_alloc_aligned(uword size, uword align)
int crypto_sw_scheduler_set_worker_crypto(u32 worker_idx, u8 enabled)
static vlib_thread_main_t * vlib_get_thread_main()
static vlib_cli_command_t cmd_show_sw_scheduler_workers
(constructor) VLIB_CLI_COMMAND (cmd_show_sw_scheduler_workers)
static clib_error_t * sw_scheduler_show_workers(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
#define clib_atomic_bool_cmp_and_swap(addr, old, new)
#define VNET_CRYPTO_KEY_TYPE_LINK
@ VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index,...
VLIB buffer representation.
vnet_crypto_op_t * chained_integ_ops