22 #include <vpp/app/version.h>
28 #include <rte_bus_vdev.h>
29 #include <rte_cryptodev.h>
30 #include <rte_crypto_sym.h>
31 #include <rte_crypto.h>
32 #include <rte_cryptodev_pmd.h>
33 #include <rte_config.h>
38 #define always_inline static inline
40 #define always_inline static inline __attribute__ ((__always_inline__))
50 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
51 memset (xform, 0,
sizeof (*xform));
52 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
55 if (
key->alg != VNET_CRYPTO_ALG_AES_128_GCM &&
56 key->alg != VNET_CRYPTO_ALG_AES_192_GCM &&
57 key->alg != VNET_CRYPTO_ALG_AES_256_GCM)
60 aead_xform->algo = RTE_CRYPTO_AEAD_AES_GCM;
62 RTE_CRYPTO_AEAD_OP_ENCRYPT : RTE_CRYPTO_AEAD_OP_DECRYPT;
63 aead_xform->aad_length = aad_len;
64 aead_xform->digest_length = 16;
66 aead_xform->iv.length = 12;
67 aead_xform->key.data =
key->data;
78 struct rte_crypto_sym_xform *xform_cipher, *xform_auth;
80 enum rte_crypto_cipher_algorithm cipher_algo = ~0;
81 enum rte_crypto_auth_algorithm auth_algo = ~0;
86 if (!key_cipher || !key_auth)
91 xform_cipher = xforms;
92 xform_auth = xforms + 1;
93 xform_cipher->cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
94 xform_auth->auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
98 xform_cipher = xforms + 1;
100 xform_cipher->cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
101 xform_auth->auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
104 xform_cipher->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
105 xform_auth->type = RTE_CRYPTO_SYM_XFORM_AUTH;
106 xforms->next = xforms + 1;
108 switch (
key->async_alg)
110 #define _(a, b, c, d, e) \
111 case VNET_CRYPTO_ALG_##a##_##d##_TAG##e: \
112 cipher_algo = RTE_CRYPTO_CIPHER_##b; \
113 auth_algo = RTE_CRYPTO_AUTH_##d##_HMAC; \
123 xform_cipher->cipher.algo = cipher_algo;
124 xform_cipher->cipher.key.data = key_cipher->
data;
125 xform_cipher->cipher.key.length =
vec_len (key_cipher->
data);
126 xform_cipher->cipher.iv.length = 16;
129 xform_auth->auth.algo = auth_algo;
130 xform_auth->auth.digest_length = digest_len;
131 xform_auth->auth.key.data = key_auth->
data;
132 xform_auth->auth.key.length =
vec_len (key_auth->
data);
145 n_devs = rte_cryptodev_count ();
147 for (
i = 0;
i < n_devs;
i++)
148 rte_cryptodev_sym_session_clear (
i, sess);
150 rte_cryptodev_sym_session_free (sess);
162 if (vcap->
xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
164 if (vcap->
cipher.algo != algo)
183 if (vcap->
xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
185 if (vcap->
auth.algo != algo)
188 if (*s == digest_size)
197 u32 digest_size,
u32 aad_size)
202 u32 key_match = 0, digest_match = 0, aad_match = 0;
206 if (vcap->
xform_type != RTE_CRYPTO_SYM_XFORM_AEAD)
208 if (vcap->
aead.algo != algo)
211 if (*s == digest_size)
230 if (key_match == 1 && digest_match == 1 && aad_match == 1)
243 switch (
key->async_alg)
245 #define _(a, b, c, d, e) \
246 case VNET_CRYPTO_ALG_##a##_##d##_TAG##e: \
247 if (check_cipher_support (RTE_CRYPTO_CIPHER_##b, c) && \
248 check_auth_support (RTE_CRYPTO_AUTH_##d##_HMAC, e)) \
257 #define _(a, b, c, d, e, f, g) \
258 if (key->alg == VNET_CRYPTO_ALG_##a) \
260 if (check_aead_support (RTE_CRYPTO_AEAD_##c, g, e, f)) \
266 if (matched < 2)
return 0;
333 struct rte_mempool *sess_pool, *sess_priv_pool;
335 struct rte_crypto_sym_xform xforms_enc[2] = { { 0 } };
336 struct rte_crypto_sym_xform xforms_dec[2] = { { 0 } };
346 rte_cryptodev_sym_session_create (sess_pool);
354 rte_cryptodev_sym_session_create (sess_pool);
377 struct rte_cryptodev *cdev = rte_cryptodev_pmd_get_dev (dev_id);
378 u32 driver_id = cdev->driver_id;
386 ret = rte_cryptodev_sym_session_init (
389 ret = rte_cryptodev_sym_session_init (
429 u32 cryptodev_inst_index,
474 cryptodev_inst_index, 1);
490 u32 inst = va_arg (*args,
u32);
493 struct rte_cryptodev_info info;
495 rte_cryptodev_info_get (cit->
dev_id, &info);
496 s =
format (s,
"%-25s%-10u", info.device->name, cit->
q_id);
513 s =
format (s,
"%s\n",
"free");
544 .path =
"show cryptodev assignment",
545 .short_help =
"show cryptodev assignment",
557 u32 thread_present = 0, inst_present = 0;
569 else if (
unformat (line_input,
"resource %u", &inst_index))
579 if (!thread_present || !inst_present)
613 .path =
"set cryptodev assignment",
614 .short_help =
"set cryptodev assignment thread <thread_index> "
615 "resource <inst_index>",
622 struct rte_cryptodev_info info;
623 u32 n_cryptodev = rte_cryptodev_count ();
626 for (
i = 0;
i < n_cryptodev;
i++)
628 rte_cryptodev_info_get (
i, &info);
629 q_count += info.max_nb_queue_pairs;
638 struct rte_cryptodev_config cfg;
639 struct rte_cryptodev_info info;
644 rte_cryptodev_info_get (cryptodev_id, &info);
646 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO))
649 cfg.socket_id = info.device->numa_node;
650 cfg.nb_queue_pairs = info.max_nb_queue_pairs;
652 rte_cryptodev_configure (cryptodev_id, &cfg);
654 for (
i = 0;
i < info.max_nb_queue_pairs;
i++)
656 struct rte_cryptodev_qp_conf qp_cfg;
658 qp_cfg.mp_session = 0;
659 qp_cfg.mp_session_private = 0;
662 ret = rte_cryptodev_queue_pair_setup (cryptodev_id,
i, &qp_cfg,
663 info.device->numa_node);
666 clib_warning (
"Cryptodev: Configure device %u queue %u failed %d",
667 cryptodev_id,
i, ret);
672 if (
i != info.max_nb_queue_pairs)
676 rte_cryptodev_start (cryptodev_id);
678 for (
i = 0;
i < info.max_nb_queue_pairs;
i++)
681 vec_add2(cmt->cryptodev_inst, cdev_inst, 1);
682 cdev_inst->
desc =
vec_new (
char, strlen (info.device->name) + 10);
683 cdev_inst->
dev_id = cryptodev_id;
686 snprintf (cdev_inst->
desc, strlen (info.device->name) + 9,
687 "%s_q%u", info.device->name,
i);
712 if (*
value == param_value)
720 u32 key_size,
u32 digest_size,
u32 aad_size)
730 if (idx->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
731 cap->
auth.algo == idx->algo.auth &&
735 if (idx->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
736 cap->
cipher.algo == idx->algo.cipher &&
740 if (idx->type == RTE_CRYPTO_SYM_XFORM_AEAD &&
741 cap->
aead.algo == idx->algo.aead &&
753 u32 param_size_max,
u32 increment)
761 for (cap_param_size = param_size_min; cap_param_size <= param_size_max;
762 cap_param_size += increment)
764 if ((*param_sizes)[
i] == cap_param_size)
787 case RTE_CRYPTO_SYM_XFORM_AUTH:
790 case RTE_CRYPTO_SYM_XFORM_CIPHER:
793 case RTE_CRYPTO_SYM_XFORM_AEAD:
807 const struct rte_cryptodev_capabilities *dev_caps)
810 const struct rte_cryptodev_capabilities *cap = &dev_caps[0];
812 while (cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED)
814 if (cap->sym.xform_type == temp_cap->
xform_type)
815 switch (cap->sym.xform_type)
817 case RTE_CRYPTO_SYM_XFORM_CIPHER:
818 if (cap->sym.cipher.algo == temp_cap->
cipher.algo)
821 &temp_cap->
cipher.key_sizes, cap->sym.cipher.key_size.min,
822 cap->sym.cipher.key_size.max,
823 cap->sym.cipher.key_size.increment);
828 case RTE_CRYPTO_SYM_XFORM_AUTH:
829 if (cap->sym.auth.algo == temp_cap->
auth.algo)
832 &temp_cap->
auth.digest_sizes, cap->sym.auth.digest_size.min,
833 cap->sym.auth.digest_size.max,
834 cap->sym.auth.digest_size.increment);
839 case RTE_CRYPTO_SYM_XFORM_AEAD:
840 if (cap->sym.aead.algo == temp_cap->
aead.algo)
843 &temp_cap->
aead.key_sizes, cap->sym.aead.key_size.min,
844 cap->sym.aead.key_size.max,
845 cap->sym.aead.key_size.increment);
847 &temp_cap->
aead.aad_sizes, cap->sym.aead.aad_size.min,
848 cap->sym.aead.aad_size.max,
849 cap->sym.aead.aad_size.increment);
851 &temp_cap->
aead.digest_sizes, cap->sym.aead.digest_size.min,
852 cap->sym.aead.digest_size.max,
853 cap->sym.aead.digest_size.increment);
876 struct rte_cryptodev_info dev_info;
877 u32 previous_dev_id, dev_id;
881 const struct rte_cryptodev_capabilities *cap;
882 const struct rte_cryptodev_capabilities *dev_caps;
888 rte_cryptodev_info_get (dev_inst->
dev_id, &dev_info);
889 cap = &dev_info.capabilities[0];
892 while (cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED)
894 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
901 switch (cap->sym.xform_type)
903 case RTE_CRYPTO_SYM_XFORM_CIPHER:
904 tmp_cap.
cipher.key_sizes = 0;
905 tmp_cap.
cipher.algo = cap->sym.cipher.algo;
906 for (param = cap->sym.cipher.key_size.min;
907 param <= cap->sym.cipher.key_size.max;
908 param += cap->sym.cipher.key_size.increment)
911 if (cap->sym.cipher.key_size.increment == 0)
915 case RTE_CRYPTO_SYM_XFORM_AUTH:
916 tmp_cap.
auth.algo = cap->sym.auth.algo;
917 tmp_cap.
auth.digest_sizes = 0;
918 for (param = cap->sym.auth.digest_size.min;
919 param <= cap->sym.auth.digest_size.max;
920 param += cap->sym.auth.digest_size.increment)
923 if (cap->sym.auth.digest_size.increment == 0)
927 case RTE_CRYPTO_SYM_XFORM_AEAD:
928 tmp_cap.
aead.key_sizes = 0;
929 tmp_cap.
aead.aad_sizes = 0;
930 tmp_cap.
aead.digest_sizes = 0;
931 tmp_cap.
aead.algo = cap->sym.aead.algo;
932 for (param = cap->sym.aead.key_size.min;
933 param <= cap->sym.aead.key_size.max;
934 param += cap->sym.aead.key_size.increment)
937 if (cap->sym.aead.key_size.increment == 0)
940 for (param = cap->sym.aead.aad_size.min;
941 param <= cap->sym.aead.aad_size.max;
942 param += cap->sym.aead.aad_size.increment)
945 if (cap->sym.aead.aad_size.increment == 0)
948 for (param = cap->sym.aead.digest_size.min;
949 param <= cap->sym.aead.digest_size.max;
950 param += cap->sym.aead.digest_size.increment)
953 if (cap->sym.aead.digest_size.increment == 0)
967 u32 cap_is_supported = 1;
972 dev_id = dev_inst->
dev_id;
973 if (previous_dev_id != dev_id)
975 previous_dev_id = dev_id;
976 rte_cryptodev_info_get (dev_id, &dev_info);
977 dev_caps = &dev_info.capabilities[0];
980 if (!cap_is_supported)
989 if (cap_is_supported)
1001 if (n_queues < n_workers)
1004 for (
i = 0;
i < rte_cryptodev_count ();
i++)
1024 u32 max_sess = 0, max_dp = 0;
1028 u32 sess_sz = rte_cryptodev_sym_get_private_session_size (cinst->
dev_id);
1029 u32 dp_sz = rte_cryptodev_get_raw_dp_ctx_size (cinst->
dev_id);
1031 max_sess =
clib_max (sess_sz, max_sess);
1035 *max_sess_sz = max_sess;
1036 *max_dp_sz = max_dp;
1054 rte_mempool_free (numa_data->
sess_pool);
1067 struct rte_mempool *mp;
1106 name =
format (0,
"vcryptodev_sess_pool_%u%c", numa, 0);
1107 mp = rte_cryptodev_sym_session_pool_create (
1120 name =
format (0,
"cryptodev_sess_pool_%u%c", numa, 0);
1123 0, 0, NULL, NULL, NULL, NULL, numa, 0);
1142 "DPDK Cryptodev Engine");