|
FD.io VPP
v21.10.1-2-g0a485f517
Vector Packet Processing
|
Go to the documentation of this file.
17 #ifndef included_cryptodev_h
18 #define included_cryptodev_h
22 #include <rte_cryptodev.h>
24 #define CRYPTODEV_NB_CRYPTO_OPS 1024
25 #define CRYPTODEV_CACHE_QUEUE_SIZE VNET_CRYPTO_FRAME_POOL_SIZE
26 #define CRYPTODEV_CACHE_QUEUE_MASK (VNET_CRYPTO_FRAME_POOL_SIZE - 1)
27 #define CRYPTODEV_MAX_INFLIGHT (CRYPTODEV_NB_CRYPTO_OPS - 1)
28 #define CRYPTODEV_AAD_MASK (CRYPTODEV_NB_CRYPTO_OPS - 1)
29 #define CRYPTODEV_DEQ_CACHE_SZ 32
30 #define CRYPTODEV_NB_SESSION 10240
31 #define CRYPTODEV_MAX_IV_SIZE 16
32 #define CRYPTODEV_MAX_AAD_SIZE 16
33 #define CRYPTODEV_MAX_N_SGL 8
35 #define CRYPTODEV_IV_OFFSET (offsetof (cryptodev_op_t, iv))
36 #define CRYPTODEV_AAD_OFFSET (offsetof (cryptodev_op_t, aad))
40 #define foreach_vnet_aead_crypto_conversion \
41 _ (AES_128_GCM, AEAD, AES_GCM, 12, 16, 8, 16) \
42 _ (AES_128_GCM, AEAD, AES_GCM, 12, 16, 12, 16) \
43 _ (AES_192_GCM, AEAD, AES_GCM, 12, 16, 8, 24) \
44 _ (AES_192_GCM, AEAD, AES_GCM, 12, 16, 12, 24) \
45 _ (AES_256_GCM, AEAD, AES_GCM, 12, 16, 8, 32) \
46 _ (AES_256_GCM, AEAD, AES_GCM, 12, 16, 12, 32)
51 #define foreach_cryptodev_link_async_alg \
52 _ (AES_128_CBC, AES_CBC, 16, MD5, 12) \
53 _ (AES_192_CBC, AES_CBC, 24, MD5, 12) \
54 _ (AES_256_CBC, AES_CBC, 32, MD5, 12) \
55 _ (AES_128_CBC, AES_CBC, 16, SHA1, 12) \
56 _ (AES_192_CBC, AES_CBC, 24, SHA1, 12) \
57 _ (AES_256_CBC, AES_CBC, 32, SHA1, 12) \
58 _ (AES_128_CBC, AES_CBC, 16, SHA224, 14) \
59 _ (AES_192_CBC, AES_CBC, 24, SHA224, 14) \
60 _ (AES_256_CBC, AES_CBC, 32, SHA224, 14) \
61 _ (AES_128_CBC, AES_CBC, 16, SHA256, 16) \
62 _ (AES_192_CBC, AES_CBC, 24, SHA256, 16) \
63 _ (AES_256_CBC, AES_CBC, 32, SHA256, 16) \
64 _ (AES_128_CBC, AES_CBC, 16, SHA384, 24) \
65 _ (AES_192_CBC, AES_CBC, 24, SHA384, 24) \
66 _ (AES_256_CBC, AES_CBC, 32, SHA384, 24) \
67 _ (AES_128_CBC, AES_CBC, 16, SHA512, 32) \
68 _ (AES_192_CBC, AES_CBC, 24, SHA512, 32) \
69 _ (AES_256_CBC, AES_CBC, 32, SHA512, 32)
81 struct rte_cryptodev_sym_session ***
keys;
88 enum rte_crypto_sym_xform_type xform_type;
93 enum rte_crypto_auth_algorithm algo;
98 enum rte_crypto_cipher_algorithm algo;
103 enum rte_crypto_aead_algorithm algo;
128 struct rte_crypto_op op;
129 struct rte_crypto_sym_op sop;
150 struct rte_crypto_raw_dp_ctx *
ctx;
168 enum rte_iova_mode iova_mode;
182 u32 n_elts =
f->n_elts,
i;
184 for (
i = 0;
i < n_elts;
i++)
185 f->elts[
i].status = s;
196 u32 key_size,
u32 digest_size,
u32 aad_size);
void cryptodev_sess_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx, u32 aad_len)
cryptodev_main_t cryptodev_main
struct rte_mempool * sess_pool
struct rte_mempool * cop_pool
clib_bitmap_t * active_cdev_inst_mask
#define CLIB_CACHE_LINE_ALIGN_MARK(mark)
@ CRYPTODEV_OP_TYPE_DECRYPT
#define CRYPTODEV_MAX_IV_SIZE
#define VNET_CRYPTO_FRAME_SIZE
cryptodev_capability_t * supported_caps
u32 vnet_crypto_key_index_t
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
@ CRYPTODEV_OP_TYPE_ENCRYPT
struct rte_cryptodev_sym_session * reset_sess
struct rte_mempool * sess_priv_pool
int cryptodev_check_cap_support(struct rte_cryptodev_sym_capability_idx *idx, u32 key_size, u32 digest_size, u32 aad_size)
clib_error_t *__clib_weak cryptodev_register_raw_hdl(vlib_main_t *vm, u32 eidx)
clib_error_t *__clib_weak dpdk_cryptodev_init(vlib_main_t *vm)
cryptodev_inst_t * cryptodev_inst
#define static_always_inline
cryptodev_engine_thread_t * per_thread_data
int cryptodev_session_create(vlib_main_t *vm, vnet_crypto_key_index_t idx, u32 aad_len)
static_always_inline void cryptodev_mark_frame_err_status(vnet_crypto_async_frame_t *f, vnet_crypto_op_status_t s)
@ VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
#define CRYPTODEV_MAX_AAD_SIZE
struct rte_crypto_raw_dp_ctx * ctx
struct rte_ring * cached_frame
cryptodev_numa_data_t * per_numa_data
vnet_crypto_async_frame_t * frame
clib_error_t * cryptodev_register_cop_hdl(vlib_main_t *vm, u32 eidx)
struct rte_cryptodev_sym_session *** keys
VLIB buffer representation.