18 #include <vpp/app/version.h> 32 return VNET_API_ERROR_INVALID_VALUE;
41 if (enabled || count > 1)
48 return VNET_API_ERROR_INVALID_VALUE_2;
89 for (
i = 0;
i < n_elts;
i++)
90 frame->
elts[
i].
status = VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
108 for (i = tail; i < head; i++)
169 if (nb->
flags & VLIB_BUFFER_NEXT_PRESENT)
175 ASSERT (offset == 0 && len == 0);
223 u32 digest_len,
u8 is_enc)
250 crypto_op->
op = crypto_op_id;
251 crypto_op->
iv = fe->
iv;
254 integ_op->op = integ_op_id;
255 integ_op->digest = fe->
digest;
256 integ_op->digest_len = digest_len;
279 ASSERT (op - ops < n_ops);
281 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
306 ASSERT (op - ops < n_ops);
308 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
322 u8 aad_len,
u32 * nb_elts_processed,
323 u32 * enqueue_thread_idx)
341 q = ptd->
queues[async_op_id];
353 *nb_elts_processed = n_elts = f->
n_elts;
367 sync_op_id, aad_len, tag_len);
387 u16 digest_len,
u8 is_enc,
388 u32 * nb_elts_processed,
389 u32 * enqueue_thread_idx)
407 q = ptd->
queues[async_op_id];
425 *nb_elts_processed = n_elts = f->
n_elts;
436 fe, fe - f->
elts, bi[0],
485 if (
unformat (line_input,
"worker %u", &worker_index))
487 if (
unformat (line_input,
"crypto"))
491 else if (
unformat (line_input,
"off"))
508 if (rv == VNET_API_ERROR_INVALID_VALUE)
512 else if (rv == VNET_API_ERROR_INVALID_VALUE_2)
529 .path =
"set sw_scheduler",
530 .short_help =
"set sw_scheduler worker <idx> crypto <on|off>",
565 .path =
"show sw_scheduler workers",
566 .short_help =
"show sw_scheduler workers",
581 #define _(n, s, k, t, a) \ 582 static vnet_crypto_async_frame_t \ 583 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc ( \ 584 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 586 return crypto_sw_scheduler_dequeue_aead ( \ 587 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ 588 VNET_CRYPTO_OP_##n##_ENC, t, a, nb_elts_processed, thread_idx); \ 590 static vnet_crypto_async_frame_t \ 591 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec ( \ 592 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 594 return crypto_sw_scheduler_dequeue_aead ( \ 595 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \ 596 VNET_CRYPTO_OP_##n##_DEC, t, a, nb_elts_processed, thread_idx); \ 601 #define _(c, h, s, k, d) \ 602 static vnet_crypto_async_frame_t \ 603 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc ( \ 604 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 606 return crypto_sw_scheduler_dequeue_link ( \ 607 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ 608 VNET_CRYPTO_OP_##c##_ENC, VNET_CRYPTO_OP_##h##_HMAC, d, 1, \ 609 nb_elts_processed, thread_idx); \ 611 static vnet_crypto_async_frame_t \ 612 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec ( \ 613 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 615 return crypto_sw_scheduler_dequeue_link ( \ 616 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \ 617 VNET_CRYPTO_OP_##c##_DEC, VNET_CRYPTO_OP_##h##_HMAC, d, 0, \ 618 nb_elts_processed, thread_idx); \ 655 "SW Scheduler Async Engine");
663 #define _(n, s, k, t, a) \ 664 vnet_crypto_register_async_handler ( \ 665 vm, cm->crypto_engine_index, \ 666 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ 667 crypto_sw_scheduler_frame_enqueue, \ 668 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc); \ 669 vnet_crypto_register_async_handler ( \ 670 vm, cm->crypto_engine_index, \ 671 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \ 672 crypto_sw_scheduler_frame_enqueue, \ 673 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec); 677 #define _(c, h, s, k, d) \ 678 vnet_crypto_register_async_handler ( \ 679 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ 680 crypto_sw_scheduler_frame_enqueue, \ 681 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc); \ 682 vnet_crypto_register_async_handler ( \ 683 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \ 684 crypto_sw_scheduler_frame_enqueue, \ 685 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec); 698 .runs_after =
VLIB_INITS (
"vnet_crypto_init"),
702 .version = VPP_BUILD_VER,
703 .description =
"SW Scheduler Crypto Async Engine plugin",
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
#define vec_foreach_index(var, v)
Iterate over vector indices.
#define foreach_crypto_link_async_alg
#define VNET_CRYPTO_KEY_TYPE_LINK
static clib_error_t * sw_scheduler_show_workers(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
static int crypto_sw_scheduler_frame_enqueue(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
#define CLIB_MEMORY_STORE_BARRIER()
static_always_inline void process_chained_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, vnet_crypto_op_chunk_t *chunks, u8 *state)
clib_error_t * crypto_sw_scheduler_init(vlib_main_t *vm)
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
#define VNET_CRYPTO_FRAME_STATE_PENDING
static void crypto_sw_scheduler_key_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx)
u16 current_length
Nbytes between current data and the end of this buffer.
vnet_crypto_op_chunk_t * chunks
static_always_inline void crypto_sw_scheduler_convert_aead(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t op_id, u16 aad_len, u8 tag_len)
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
static_always_inline void cryptodev_sw_scheduler_sgl(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vlib_buffer_t *b, vnet_crypto_op_t *op, i32 offset, i32 len)
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
vnet_crypto_op_t * integ_ops
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
clib_error_t * sw_scheduler_cli_init(vlib_main_t *vm)
static_always_inline void crypto_sw_scheduler_convert_link_crypto(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_key_t *key, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t crypto_op_id, vnet_crypto_op_id_t integ_op_id, u32 digest_len, u8 is_enc)
static u32 vlib_get_worker_index(u32 thread_index)
void vnet_crypto_register_key_handler(vlib_main_t *vm, u32 engine_index, vnet_crypto_key_handler_t *key_handler)
#define static_always_inline
#define VLIB_INIT_FUNCTION(x)
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_completed_frame(crypto_sw_scheduler_queue_t *q)
vnet_crypto_op_t * chained_crypto_ops
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_aead(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_op_id, u8 tag_len, u8 aad_len, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define clib_error_return(e, args...)
vnet_crypto_op_t * crypto_ops
vnet_crypto_op_t * chained_integ_ops
vlib_worker_thread_t * vlib_worker_threads
crypto_sw_scheduler_queue_t * queues[VNET_CRYPTO_ASYNC_OP_N_IDS]
int crypto_sw_scheduler_set_worker_crypto(u32 worker_idx, u8 enabled)
#define CRYPTO_SW_SCHEDULER_QUEUE_SIZE
clib_error_t * crypto_sw_scheduler_api_init(vlib_main_t *vm)
vlib_thread_main_t vlib_thread_main
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE]
#define VNET_CRYPTO_OP_FLAG_INIT_IV
#define CLIB_PREFETCH(addr, size, type)
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_link(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_crypto_op_id, vnet_crypto_op_id_t sync_integ_op_id, u16 digest_len, u8 is_enc, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
sll srl srl sll sra u16x4 i
#define vec_free(V)
Free vector's memory (no header).
#define VNET_CRYPTO_FRAME_STATE_SUCCESS
#define CRYPTO_SW_SCHEDULER_QUEUE_MASK
#define VNET_CRYPTO_FRAME_STATE_ELT_ERROR
#define VLIB_CLI_COMMAND(x,...)
void vlib_cli_output(vlib_main_t *vm, char *fmt,...)
vnet_crypto_async_frame_t * jobs[0]
#define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS
u32 vnet_crypto_process_chained_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops)
u8 flags
share same VNET_CRYPTO_OP_FLAG_* values
u32 vnet_crypto_key_index_t
crypto_sw_scheduler_per_thread_data_t * per_thread_data
#define foreach_crypto_aead_async_alg
async crypto
static_always_inline void process_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, u8 *state)
vnet_crypto_async_op_id_t
static_always_inline void clib_memset_u8(void *p, u8 val, uword count)
template key/value backing page structure
vnet_crypto_async_op_id_t op
#define clib_atomic_bool_cmp_and_swap(addr, old, new)
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 next_buffer
Next buffer for this linked-list of buffers.
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
VLIB buffer representation.
foreach_crypto_link_async_alg crypto_sw_scheduler_main_t crypto_sw_scheduler_main
struct clib_bihash_value offset
template key/value backing page structure
vnet_crypto_op_status_t status
static void * clib_mem_alloc_aligned(uword size, uword align)
static vlib_thread_main_t * vlib_get_thread_main()
vl_api_dhcp_client_state_t state
static u32 vlib_num_workers()
#define vec_foreach(var, vec)
Vector iterator.
#define CLIB_CACHE_LINE_BYTES
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
static clib_error_t * sw_scheduler_set_worker_crypto(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_pending_frame(crypto_sw_scheduler_queue_t *q)
vnet_crypto_op_status_t status
vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE]
static u32 vlib_get_worker_thread_index(u32 worker_index)