|
FD.io VPP
v21.06-3-gbb25fbf28
Vector Packet Processing
|
Go to the documentation of this file.
18 #include <vpp/app/version.h>
32 return VNET_API_ERROR_INVALID_VALUE;
37 ptd =
cm->per_thread_data +
i;
41 if (enabled ||
count > 1)
44 (worker_idx)].self_crypto_enabled = enabled;
48 return VNET_API_ERROR_INVALID_VALUE_2;
66 cm->keys[idx].index_crypto = UINT32_MAX;
67 cm->keys[idx].index_integ = UINT32_MAX;
89 for (
i = 0;
i < n_elts;
i++)
90 frame->elts[
i].status = VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
108 for (
i = tail;
i < head;
i++)
167 if (nb->
flags & VLIB_BUFFER_NEXT_PRESENT)
235 u32 digest_len,
u8 is_enc)
262 crypto_op->
op = crypto_op_id;
263 crypto_op->
iv = fe->
iv;
267 integ_op->op = integ_op_id;
268 integ_op->digest = fe->
digest;
269 integ_op->digest_len = digest_len;
270 integ_op->key_index =
key->index_integ;
289 ASSERT (op - ops < n_ops);
291 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
316 ASSERT (op - ops < n_ops);
318 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
332 u8 aad_len,
u32 * nb_elts_processed,
333 u32 * enqueue_thread_idx)
350 ptd =
cm->per_thread_data +
i;
351 q = ptd->
queues[async_op_id];
363 *nb_elts_processed = n_elts =
f->n_elts;
365 bi =
f->buffer_indices;
377 sync_op_id, aad_len, tag_len);
386 *enqueue_thread_idx =
f->enqueue_thread_index;
397 u16 digest_len,
u8 is_enc,
398 u32 * nb_elts_processed,
399 u32 * enqueue_thread_idx)
416 ptd =
cm->per_thread_data +
i;
417 q = ptd->
queues[async_op_id];
435 *nb_elts_processed = n_elts =
f->n_elts;
437 bi =
f->buffer_indices;
446 fe, fe -
f->elts, bi[0],
474 *enqueue_thread_idx =
f->enqueue_thread_index;
495 if (
unformat (line_input,
"worker %u", &worker_index))
497 if (
unformat (line_input,
"crypto"))
501 else if (
unformat (line_input,
"off"))
518 if (
rv == VNET_API_ERROR_INVALID_VALUE)
522 else if (
rv == VNET_API_ERROR_INVALID_VALUE_2)
539 .path =
"set sw_scheduler",
540 .short_help =
"set sw_scheduler worker <idx> crypto <on|off>",
575 .path =
"show sw_scheduler workers",
576 .short_help =
"show sw_scheduler workers",
591 #define _(n, s, k, t, a) \
592 static vnet_crypto_async_frame_t \
593 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc ( \
594 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
596 return crypto_sw_scheduler_dequeue_aead ( \
597 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
598 VNET_CRYPTO_OP_##n##_ENC, t, a, nb_elts_processed, thread_idx); \
600 static vnet_crypto_async_frame_t \
601 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec ( \
602 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
604 return crypto_sw_scheduler_dequeue_aead ( \
605 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
606 VNET_CRYPTO_OP_##n##_DEC, t, a, nb_elts_processed, thread_idx); \
611 #define _(c, h, s, k, d) \
612 static vnet_crypto_async_frame_t \
613 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc ( \
614 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
616 return crypto_sw_scheduler_dequeue_link ( \
617 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
618 VNET_CRYPTO_OP_##c##_ENC, VNET_CRYPTO_OP_##h##_HMAC, d, 1, \
619 nb_elts_processed, thread_idx); \
621 static vnet_crypto_async_frame_t \
622 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec ( \
623 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
625 return crypto_sw_scheduler_dequeue_link ( \
626 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
627 VNET_CRYPTO_OP_##c##_DEC, VNET_CRYPTO_OP_##h##_HMAC, d, 0, \
628 nb_elts_processed, thread_idx); \
663 cm->crypto_engine_index =
665 "SW Scheduler Async Engine");
673 #define _(n, s, k, t, a) \
674 vnet_crypto_register_async_handler ( \
675 vm, cm->crypto_engine_index, \
676 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
677 crypto_sw_scheduler_frame_enqueue, \
678 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc); \
679 vnet_crypto_register_async_handler ( \
680 vm, cm->crypto_engine_index, \
681 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
682 crypto_sw_scheduler_frame_enqueue, \
683 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec);
687 #define _(c, h, s, k, d) \
688 vnet_crypto_register_async_handler ( \
689 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
690 crypto_sw_scheduler_frame_enqueue, \
691 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc); \
692 vnet_crypto_register_async_handler ( \
693 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
694 crypto_sw_scheduler_frame_enqueue, \
695 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec);
708 .runs_after =
VLIB_INITS (
"vnet_crypto_init"),
712 .version = VPP_BUILD_VER,
713 .description =
"SW Scheduler Crypto Async Engine plugin",
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
vnet_crypto_op_chunk_t * chunks
clib_error_t * sw_scheduler_cli_init(vlib_main_t *vm)
u32 next_buffer
Next buffer for this linked-list of buffers.
static u32 vlib_num_workers()
crypto_sw_scheduler_queue_t * queues[VNET_CRYPTO_ASYNC_OP_N_IDS]
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
#define foreach_crypto_aead_async_alg
async crypto
#define foreach_crypto_link_async_alg
@ VNET_CRYPTO_ASYNC_OP_N_IDS
static_always_inline void crypto_sw_scheduler_convert_link_crypto(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_key_t *key, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t crypto_op_id, vnet_crypto_op_id_t integ_op_id, u32 digest_len, u8 is_enc)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static u32 vlib_get_worker_thread_index(u32 worker_index)
vnet_crypto_op_status_t status
static u32 vlib_get_worker_index(u32 thread_index)
vnet_crypto_op_t * crypto_ops
#define clib_error_return(e, args...)
u32 vnet_crypto_key_index_t
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
vl_api_dhcp_client_state_t state
vnet_crypto_async_frame_state_t state
#define VNET_CRYPTO_OP_FLAG_INIT_IV
vlib_worker_thread_t * vlib_worker_threads
foreach_crypto_link_async_alg crypto_sw_scheduler_main_t crypto_sw_scheduler_main
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
static vlib_cli_command_t cmd_set_sw_scheduler_worker_crypto
(constructor) VLIB_CLI_COMMAND (cmd_set_sw_scheduler_worker_crypto)
@ VNET_CRYPTO_FRAME_STATE_ELT_ERROR
static_always_inline void process_chained_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, vnet_crypto_op_chunk_t *chunks, u8 *state)
static void crypto_sw_scheduler_key_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx)
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_link(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_crypto_op_id, vnet_crypto_op_id_t sync_integ_op_id, u16 digest_len, u8 is_enc, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
static_always_inline void process_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, u8 *state)
#define CLIB_PREFETCH(addr, size, type)
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
#define CLIB_MEMORY_STORE_BARRIER()
#define CRYPTO_SW_SCHEDULER_QUEUE_MASK
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
static clib_error_t * sw_scheduler_set_worker_crypto(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
static_always_inline void cryptodev_sw_scheduler_sgl(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vlib_buffer_t *b, vnet_crypto_op_t *op, i32 offset, i32 len)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
struct clib_bihash_value offset
template key/value backing page structure
u32 vnet_crypto_process_chained_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops)
vnet_crypto_op_t * chained_crypto_ops
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
static_always_inline void crypto_sw_scheduler_convert_aead(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t op_id, u16 aad_len, u8 tag_len)
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
#define static_always_inline
#define vec_foreach_index(var, v)
Iterate over vector indices.
vnet_crypto_op_t * integ_ops
vlib_thread_main_t vlib_thread_main
sll srl srl sll sra u16x4 i
#define CRYPTO_SW_SCHEDULER_QUEUE_SIZE
vnet_feature_config_main_t * cm
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment)
#define VLIB_CLI_COMMAND(x,...)
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
u8 flags
share same VNET_CRYPTO_OP_FLAG_* values
#define CLIB_CACHE_LINE_BYTES
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_completed_frame(crypto_sw_scheduler_queue_t *q)
void vlib_cli_output(vlib_main_t *vm, char *fmt,...)
static_always_inline void clib_memset_u8(void *p, u8 val, uword count)
u16 current_length
Nbytes between current data and the end of this buffer.
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
clib_error_t * crypto_sw_scheduler_init(vlib_main_t *vm)
#define vec_free(V)
Free vector's memory (no header).
template key/value backing page structure
vnet_crypto_async_frame_t * jobs[0]
void vnet_crypto_register_key_handler(vlib_main_t *vm, u32 engine_index, vnet_crypto_key_handler_t *key_handler)
#define VLIB_INIT_FUNCTION(x)
static int crypto_sw_scheduler_frame_enqueue(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
@ VNET_CRYPTO_FRAME_STATE_SUCCESS
#define vec_foreach(var, vec)
Vector iterator.
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_aead(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_op_id, u8 tag_len, u8 aad_len, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
@ VNET_CRYPTO_FRAME_STATE_PENDING
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_pending_frame(crypto_sw_scheduler_queue_t *q)
vnet_crypto_async_op_id_t
clib_error_t *() vlib_init_function_t(struct vlib_main_t *vm)
clib_error_t * crypto_sw_scheduler_api_init(vlib_main_t *vm)
static void * clib_mem_alloc_aligned(uword size, uword align)
int crypto_sw_scheduler_set_worker_crypto(u32 worker_idx, u8 enabled)
static vlib_thread_main_t * vlib_get_thread_main()
static vlib_cli_command_t cmd_show_sw_scheduler_workers
(constructor) VLIB_CLI_COMMAND (cmd_show_sw_scheduler_workers)
static clib_error_t * sw_scheduler_show_workers(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
#define clib_atomic_bool_cmp_and_swap(addr, old, new)
#define VNET_CRYPTO_KEY_TYPE_LINK
@ VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index,...
VLIB buffer representation.
vnet_crypto_op_t * chained_integ_ops