18 #include <vpp/app/version.h> 32 return VNET_API_ERROR_INVALID_VALUE;
41 if (enabled || count > 1)
48 return VNET_API_ERROR_INVALID_VALUE_2;
89 for (
i = 0;
i < n_elts;
i++)
90 frame->
elts[
i].
status = VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
110 for (i = tail; i < head; i++)
171 if (nb->
flags & VLIB_BUFFER_NEXT_PRESENT)
177 ASSERT (offset == 0 && len == 0);
225 u32 digest_len,
u8 is_enc)
252 crypto_op->
op = crypto_op_id;
253 crypto_op->
iv = fe->
iv;
256 integ_op->op = integ_op_id;
257 integ_op->digest = fe->
digest;
258 integ_op->digest_len = digest_len;
281 ASSERT (op - ops < n_ops);
283 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
308 ASSERT (op - ops < n_ops);
310 if (op->
status != VNET_CRYPTO_OP_STATUS_COMPLETED)
324 u8 aad_len,
u32 * nb_elts_processed,
325 u32 * enqueue_thread_idx)
343 q = ptd->
queues[async_op_id];
355 *nb_elts_processed = n_elts = f->
n_elts;
369 sync_op_id, aad_len, tag_len);
389 u16 digest_len,
u8 is_enc,
390 u32 * nb_elts_processed,
391 u32 * enqueue_thread_idx)
409 q = ptd->
queues[async_op_id];
427 *nb_elts_processed = n_elts = f->
n_elts;
438 fe, fe - f->
elts, bi[0],
487 if (
unformat (line_input,
"worker %u", &worker_index))
489 if (
unformat (line_input,
"crypto"))
493 else if (
unformat (line_input,
"off"))
510 if (rv == VNET_API_ERROR_INVALID_VALUE)
514 else if (rv == VNET_API_ERROR_INVALID_VALUE_2)
531 .path =
"set sw_scheduler",
532 .short_help =
"set sw_scheduler worker <idx> crypto <on|off>",
567 .path =
"show sw_scheduler workers",
568 .short_help =
"show sw_scheduler workers",
583 #define _(n, s, k, t, a) \ 584 static vnet_crypto_async_frame_t \ 585 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc ( \ 586 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 588 return crypto_sw_scheduler_dequeue_aead ( \ 589 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ 590 VNET_CRYPTO_OP_##n##_ENC, t, a, nb_elts_processed, thread_idx); \ 592 static vnet_crypto_async_frame_t \ 593 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec ( \ 594 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 596 return crypto_sw_scheduler_dequeue_aead ( \ 597 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \ 598 VNET_CRYPTO_OP_##n##_DEC, t, a, nb_elts_processed, thread_idx); \ 603 #define _(c, h, s, k, d) \ 604 static vnet_crypto_async_frame_t \ 605 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc ( \ 606 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 608 return crypto_sw_scheduler_dequeue_link ( \ 609 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ 610 VNET_CRYPTO_OP_##c##_ENC, VNET_CRYPTO_OP_##h##_HMAC, d, 1, \ 611 nb_elts_processed, thread_idx); \ 613 static vnet_crypto_async_frame_t \ 614 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec ( \ 615 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \ 617 return crypto_sw_scheduler_dequeue_link ( \ 618 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \ 619 VNET_CRYPTO_OP_##c##_DEC, VNET_CRYPTO_OP_##h##_HMAC, d, 0, \ 620 nb_elts_processed, thread_idx); \ 657 "SW Scheduler Async Engine");
665 #define _(n, s, k, t, a) \ 666 vnet_crypto_register_async_handler ( \ 667 vm, cm->crypto_engine_index, \ 668 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ 669 crypto_sw_scheduler_frame_enqueue, \ 670 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc); \ 671 vnet_crypto_register_async_handler ( \ 672 vm, cm->crypto_engine_index, \ 673 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \ 674 crypto_sw_scheduler_frame_enqueue, \ 675 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec); 679 #define _(c, h, s, k, d) \ 680 vnet_crypto_register_async_handler ( \ 681 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ 682 crypto_sw_scheduler_frame_enqueue, \ 683 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc); \ 684 vnet_crypto_register_async_handler ( \ 685 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \ 686 crypto_sw_scheduler_frame_enqueue, \ 687 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec); 700 .runs_after =
VLIB_INITS (
"vnet_crypto_init"),
704 .version = VPP_BUILD_VER,
705 .description =
"SW Scheduler Crypto Async Engine plugin",
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
#define vec_foreach_index(var, v)
Iterate over vector indices.
#define foreach_crypto_link_async_alg
#define VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
#define VNET_CRYPTO_KEY_TYPE_LINK
static clib_error_t * sw_scheduler_show_workers(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
static int crypto_sw_scheduler_frame_enqueue(vlib_main_t *vm, vnet_crypto_async_frame_t *frame)
#define CLIB_MEMORY_STORE_BARRIER()
static_always_inline void process_chained_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, vnet_crypto_op_chunk_t *chunks, u8 *state)
clib_error_t * crypto_sw_scheduler_init(vlib_main_t *vm)
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
#define VNET_CRYPTO_FRAME_STATE_PENDING
static void crypto_sw_scheduler_key_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx)
u16 current_length
Nbytes between current data and the end of this buffer.
vnet_crypto_op_chunk_t * chunks
static_always_inline void crypto_sw_scheduler_convert_aead(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t op_id, u16 aad_len, u8 tag_len)
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
static_always_inline void cryptodev_sw_scheduler_sgl(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vlib_buffer_t *b, vnet_crypto_op_t *op, i32 offset, i32 len)
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
vnet_crypto_op_t * integ_ops
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
clib_error_t * sw_scheduler_cli_init(vlib_main_t *vm)
static_always_inline void crypto_sw_scheduler_convert_link_crypto(vlib_main_t *vm, crypto_sw_scheduler_per_thread_data_t *ptd, vnet_crypto_key_t *key, vnet_crypto_async_frame_elt_t *fe, u32 index, u32 bi, vnet_crypto_op_id_t crypto_op_id, vnet_crypto_op_id_t integ_op_id, u32 digest_len, u8 is_enc)
static u32 vlib_get_worker_index(u32 thread_index)
void vnet_crypto_register_key_handler(vlib_main_t *vm, u32 engine_index, vnet_crypto_key_handler_t *key_handler)
#define static_always_inline
#define VLIB_INIT_FUNCTION(x)
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_completed_frame(crypto_sw_scheduler_queue_t *q)
vnet_crypto_op_t * chained_crypto_ops
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_aead(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_op_id, u8 tag_len, u8 aad_len, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define clib_error_return(e, args...)
vnet_crypto_op_t * crypto_ops
vnet_crypto_op_t * chained_integ_ops
vlib_worker_thread_t * vlib_worker_threads
crypto_sw_scheduler_queue_t * queues[VNET_CRYPTO_ASYNC_OP_N_IDS]
int crypto_sw_scheduler_set_worker_crypto(u32 worker_idx, u8 enabled)
#define CRYPTO_SW_SCHEDULER_QUEUE_SIZE
clib_error_t * crypto_sw_scheduler_api_init(vlib_main_t *vm)
vlib_thread_main_t vlib_thread_main
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE]
#define VNET_CRYPTO_OP_FLAG_INIT_IV
#define CLIB_PREFETCH(addr, size, type)
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_dequeue_link(vlib_main_t *vm, vnet_crypto_async_op_id_t async_op_id, vnet_crypto_op_id_t sync_crypto_op_id, vnet_crypto_op_id_t sync_integ_op_id, u16 digest_len, u8 is_enc, u32 *nb_elts_processed, u32 *enqueue_thread_idx)
sll srl srl sll sra u16x4 i
#define vec_free(V)
Free vector's memory (no header).
#define VNET_CRYPTO_FRAME_STATE_SUCCESS
#define CRYPTO_SW_SCHEDULER_QUEUE_MASK
#define VNET_CRYPTO_FRAME_STATE_ELT_ERROR
#define VLIB_CLI_COMMAND(x,...)
void vlib_cli_output(vlib_main_t *vm, char *fmt,...)
vnet_crypto_async_frame_t * jobs[0]
#define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS
u32 vnet_crypto_process_chained_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops)
u8 flags
share same VNET_CRYPTO_OP_FLAG_* values
u32 vnet_crypto_key_index_t
crypto_sw_scheduler_per_thread_data_t * per_thread_data
#define foreach_crypto_aead_async_alg
async crypto
static_always_inline void process_ops(vlib_main_t *vm, vnet_crypto_async_frame_t *f, vnet_crypto_op_t *ops, u8 *state)
vnet_crypto_async_op_id_t
static_always_inline void clib_memset_u8(void *p, u8 val, uword count)
template key/value backing page structure
vnet_crypto_async_op_id_t op
#define clib_atomic_bool_cmp_and_swap(addr, old, new)
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 next_buffer
Next buffer for this linked-list of buffers.
vlib_main_t vlib_node_runtime_t vlib_frame_t * frame
VLIB buffer representation.
foreach_crypto_link_async_alg crypto_sw_scheduler_main_t crypto_sw_scheduler_main
struct clib_bihash_value offset
template key/value backing page structure
vnet_crypto_op_status_t status
static void * clib_mem_alloc_aligned(uword size, uword align)
static vlib_thread_main_t * vlib_get_thread_main()
vl_api_dhcp_client_state_t state
static u32 vlib_num_workers()
#define vec_foreach(var, vec)
Vector iterator.
#define CLIB_CACHE_LINE_BYTES
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
static clib_error_t * sw_scheduler_set_worker_crypto(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
static_always_inline vnet_crypto_async_frame_t * crypto_sw_scheduler_get_pending_frame(crypto_sw_scheduler_queue_t *q)
vnet_crypto_op_status_t status
vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE]
static u32 vlib_get_worker_thread_index(u32 worker_index)