20 #include <intel-ipsec-mb.h> 24 #include <vpp/app/version.h> 43 struct gcm_key_data * key_data);
54 #define INIT_IPSEC_MB_GCM_PRE(_arch) \ 55 ipsecmb_gcm_pre_vft.ase_gcm_pre_128 = aes_gcm_pre_128_##_arch; \ 56 ipsecmb_gcm_pre_vft.ase_gcm_pre_192 = aes_gcm_pre_192_##_arch; \ 57 ipsecmb_gcm_pre_vft.ase_gcm_pre_256 = aes_gcm_pre_256_##_arch; 61 #define foreach_ipsecmb_hmac_op \ 63 _(SHA256, SHA_256, sha256) \ 64 _(SHA384, SHA_384, sha384) \ 65 _(SHA512, SHA_512, sha512) 70 #define foreach_ipsecmb_cbc_cipher_op \ 71 _(AES_128_CBC, 128, 16, 16) \ 72 _(AES_192_CBC, 192, 24, 16) \ 73 _(AES_256_CBC, 256, 32, 16) 78 #define foreach_ipsecmb_gcm_cipher_op \ 79 _(AES_128_GCM, 128, 16, 12) \ 80 _(AES_192_GCM, 192, 24, 12) \ 81 _(AES_256_GCM, 256, 32, 12) 88 u8 ipad[256],
u8 opad[256], hash_one_block_t fn)
93 if (length > block_size)
98 memset (buf, 0x36,
sizeof (buf));
99 for (i = 0; i < length; i++)
105 memset (buf, 0x5c,
sizeof (buf));
107 for (i = 0; i < length; i++)
119 if (STS_COMPLETED != job->status)
121 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
122 *n_fail = *n_fail + 1;
125 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
131 *n_fail = *n_fail + 1;
132 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
145 hash_one_block_t fn, JOB_HASH_ALG alg)
149 u8 scratch[n_ops][64];
154 for (i = 0; i < n_ops; i++)
157 u8 ipad[256], opad[256];
160 block_size, ipad, opad, fn);
162 job = IMB_GET_NEXT_JOB (ptd->
mgr);
165 job->hash_start_src_offset_in_bytes = 0;
166 job->msg_len_to_hash_in_bytes = op->
len;
168 job->auth_tag_output_len_in_bytes = op->
digest_len;
169 job->auth_tag_output = scratch[
i];
171 job->cipher_mode = NULL_CIPHER;
172 job->cipher_direction = DECRYPT;
173 job->chain_order = HASH_CIPHER;
175 job->aes_key_len_in_bytes = op->
key_len;
177 job->u.HMAC._hashed_auth_key_xor_ipad = ipad;
178 job->u.HMAC._hashed_auth_key_xor_opad = opad;
181 job = IMB_SUBMIT_JOB (ptd->
mgr);
191 while ((job = IMB_FLUSH_JOB (ptd->
mgr)))
196 return n_ops - n_fail;
200 static_always_inline u32 \ 201 ipsecmb_ops_hmac_##a (vlib_main_t * vm, \ 202 vnet_crypto_op_t * ops[], \ 205 ipsecmb_per_thread_data_t *ptd; \ 206 ipsecmb_main_t *imbm; \ 208 imbm = &ipsecmb_main; \ 209 ptd = vec_elt_at_index (imbm->per_thread_data, vm->thread_index); \ 211 return ipsecmb_ops_hmac_inline (vm, ptd, ops, n_ops, \ 213 ptd->mgr->c##_one_block, \ 219 #define EXPANDED_KEY_N_BYTES (16 * 15) 226 if (STS_COMPLETED != job->status)
228 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
229 *n_fail = *n_fail + 1;
232 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
240 keyexp_t fn, JOB_CIPHER_DIRECTION direction)
248 for (i = 0; i < n_ops; i++)
255 fn (op->
key, aes_enc_key_expanded, aes_dec_key_expanded);
257 job = IMB_GET_NEXT_JOB (ptd->
mgr);
261 job->msg_len_to_cipher_in_bytes = op->
len;
262 job->cipher_start_src_offset_in_bytes = 0;
264 job->hash_alg = NULL_HASH;
265 job->cipher_mode = CBC;
266 job->cipher_direction = direction;
267 job->chain_order = (direction == ENCRYPT ? CIPHER_HASH : HASH_CIPHER);
272 _mm_storeu_si128 ((__m128i *) op->
iv, iv);
273 ptd->
cbc_iv = _mm_aesenc_si128 (iv, iv);
276 job->aes_key_len_in_bytes = key_len;
277 job->aes_enc_key_expanded = aes_enc_key_expanded;
278 job->aes_dec_key_expanded = aes_dec_key_expanded;
280 job->iv_len_in_bytes = iv_len;
284 job = IMB_SUBMIT_JOB (ptd->
mgr);
294 while ((job = IMB_FLUSH_JOB (ptd->
mgr)))
299 return n_ops - n_fail;
302 #define _(a, b, c, d) \ 303 static_always_inline u32 \ 304 ipsecmb_ops_cbc_cipher_enc_##a (vlib_main_t * vm, \ 305 vnet_crypto_op_t * ops[], \ 308 ipsecmb_per_thread_data_t *ptd; \ 309 ipsecmb_main_t *imbm; \ 311 imbm = &ipsecmb_main; \ 312 ptd = vec_elt_at_index (imbm->per_thread_data, vm->thread_index); \ 314 return ipsecmb_ops_cbc_cipher_inline (vm, ptd, ops, n_ops, c, d, \ 315 ptd->mgr->keyexp_##b, \ 321 #define _(a, b, c, d) \ 322 static_always_inline u32 \ 323 ipsecmb_ops_cbc_cipher_dec_##a (vlib_main_t * vm, \ 324 vnet_crypto_op_t * ops[], \ 327 ipsecmb_per_thread_data_t *ptd; \ 328 ipsecmb_main_t *imbm; \ 330 imbm = &ipsecmb_main; \ 331 ptd = vec_elt_at_index (imbm->per_thread_data, vm->thread_index); \ 333 return ipsecmb_ops_cbc_cipher_inline (vm, ptd, ops, n_ops, c, d, \ 334 ptd->mgr->keyexp_##b, \ 342 u32 * n_fail, JOB_CIPHER_DIRECTION direction)
346 if (STS_COMPLETED != job->status)
348 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
349 *n_fail = *n_fail + 1;
352 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
354 if (DECRYPT == direction)
356 if ((memcmp (op->
tag, job->auth_tag_output, op->
tag_len)))
358 *n_fail = *n_fail + 1;
359 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
370 JOB_CIPHER_DIRECTION direction)
374 u8 scratch[n_ops][64];
379 for (i = 0; i < n_ops; i++)
381 struct gcm_key_data key_data;
386 fn (op->key, &key_data);
388 job = IMB_GET_NEXT_JOB (ptd->
mgr);
392 job->msg_len_to_cipher_in_bytes = op->len;
393 job->cipher_start_src_offset_in_bytes = 0;
395 job->hash_alg = AES_GMAC;
396 job->cipher_mode = GCM;
397 job->cipher_direction = direction;
398 job->chain_order = (direction == ENCRYPT ? CIPHER_HASH : HASH_CIPHER);
400 if (direction == ENCRYPT)
407 ptd->
cbc_iv = _mm_aesenc_si128 (iv, iv);
411 job->iv = (
u8 *) nonce;
420 job->aes_key_len_in_bytes = key_len;
421 job->aes_enc_key_expanded = &key_data;
422 job->aes_dec_key_expanded = &key_data;
423 job->iv_len_in_bytes = iv_len;
425 job->u.GCM.aad = op->aad;
426 job->u.GCM.aad_len_in_bytes = op->aad_len;
427 job->auth_tag_output_len_in_bytes = op->tag_len;
428 if (DECRYPT == direction)
429 job->auth_tag_output = scratch[
i];
431 job->auth_tag_output = op->tag;
434 job = IMB_SUBMIT_JOB (ptd->
mgr);
444 while ((job = IMB_FLUSH_JOB (ptd->
mgr)))
449 return n_ops - n_fail;
452 #define _(a, b, c, d) \ 453 static_always_inline u32 \ 454 ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, \ 455 vnet_crypto_op_t * ops[], \ 458 ipsecmb_per_thread_data_t *ptd; \ 459 ipsecmb_main_t *imbm; \ 461 imbm = &ipsecmb_main; \ 462 ptd = vec_elt_at_index (imbm->per_thread_data, vm->thread_index); \ 464 return ipsecmb_ops_gcm_cipher_inline (vm, ptd, ops, n_ops, c, d, \ 465 ipsecmb_gcm_pre_vft.ase_gcm_pre_##b, \ 471 #define _(a, b, c, d) \ 472 static_always_inline u32 \ 473 ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, \ 474 vnet_crypto_op_t * ops[], \ 477 ipsecmb_per_thread_data_t *ptd; \ 478 ipsecmb_main_t *imbm; \ 480 imbm = &ipsecmb_main; \ 481 ptd = vec_elt_at_index (imbm->per_thread_data, vm->thread_index); \ 483 return ipsecmb_ops_gcm_cipher_inline (vm, ptd, ops, n_ops, c, d, \ 484 ipsecmb_gcm_pre_vft.ase_gcm_pre_##b, \ 497 if ((fd = open (
"/dev/urandom", O_RDONLY)) < 0)
533 "Intel IPSEC multi-buffer");
537 if (clib_cpu_supports_avx512f ())
541 ptd->
mgr = alloc_mb_mgr (0);
542 init_mb_mgr_avx512 (ptd->
mgr);
546 else if (clib_cpu_supports_avx2 ())
550 ptd->
mgr = alloc_mb_mgr (0);
551 init_mb_mgr_avx2 (ptd->
mgr);
559 ptd->
mgr = alloc_mb_mgr (0);
560 init_mb_mgr_sse (ptd->
mgr);
570 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \ 571 ipsecmb_ops_hmac_##a); \ 575 #define _(a, b, c, d) \ 576 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \ 577 ipsecmb_ops_cbc_cipher_enc_##a); \ 581 #define _(a, b, c, d) \ 582 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \ 583 ipsecmb_ops_cbc_cipher_dec_##a); \ 587 #define _(a, b, c, d) \ 588 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \ 589 ipsecmb_ops_gcm_cipher_enc_##a); \ 593 #define _(a, b, c, d) \ 594 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \ 595 ipsecmb_ops_gcm_cipher_dec_##a); \ 608 .version = VPP_BUILD_VER,
609 .description =
"Intel IPSEC Multi-buffer Crypto Engine",
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
#define EXPANDED_KEY_N_BYTES
#define clib_memcpy_fast(a, b, c)
ase_gcm_pre_t ase_gcm_pre_256
static void ipsecmb_retire_gcm_cipher_job(JOB_AES_HMAC *job, u32 *n_fail, JOB_CIPHER_DIRECTION direction)
#define foreach_ipsecmb_hmac_op
ase_gcm_pre_t ase_gcm_pre_128
static ipsecmb_main_t ipsecmb_main
static_always_inline u32 ipsecmb_ops_cbc_cipher_inline(vlib_main_t *vm, ipsecmb_per_thread_data_t *ptd, vnet_crypto_op_t *ops[], u32 n_ops, u32 key_len, u32 iv_len, keyexp_t fn, JOB_CIPHER_DIRECTION direction)
clib_error_t * crypto_ipsecmb_iv_init(ipsecmb_main_t *imbm)
#define static_always_inline
#define VLIB_INIT_FUNCTION(x)
struct ipsecmb_main_t_ ipsecmb_main_t
void(* ase_gcm_pre_t)(const void *key, struct gcm_key_data *key_data)
AES GCM key=expansion VFT.
static clib_error_t * crypto_ipsecmb_init(vlib_main_t *vm)
static void ipsecmb_retire_cipher_job(JOB_AES_HMAC *job, u32 *n_fail)
#define vlib_call_init_function(vm, x)
ipsecmb_per_thread_data_t * per_thread_data
static void ipsecmb_retire_hmac_job(JOB_AES_HMAC *job, u32 *n_fail)
ase_gcm_pre_t ase_gcm_pre_192
#define clib_error_return_unix(e, args...)
static void hash_expand_keys(const MB_MGR *mgr, const u8 *key, u32 length, u8 block_size, u8 ipad[256], u8 opad[256], hash_one_block_t fn)
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
#define foreach_ipsecmb_cbc_cipher_op
#define VNET_CRYPTO_OP_FLAG_INIT_IV
static_always_inline u32 ipsecmb_ops_hmac_inline(vlib_main_t *vm, const ipsecmb_per_thread_data_t *ptd, vnet_crypto_op_t *ops[], u32 n_ops, u32 block_size, hash_one_block_t fn, JOB_HASH_ALG alg)
struct ipsecmb_gcm_pre_vft_t_ ipsecmb_gcm_pre_vft_t
#define foreach_ipsecmb_gcm_cipher_op
#define INIT_IPSEC_MB_GCM_PRE(_arch)
static foreach_aarch64_flags int clib_cpu_supports_aes()
vnet_crypto_op_status_t status
static vlib_thread_main_t * vlib_get_thread_main()
#define vec_foreach(var, vec)
Vector iterator.
static_always_inline u32 ipsecmb_ops_gcm_cipher_inline(vlib_main_t *vm, ipsecmb_per_thread_data_t *ptd, vnet_crypto_op_t *ops[], u32 n_ops, u32 key_len, u32 iv_len, ase_gcm_pre_t fn, JOB_CIPHER_DIRECTION direction)
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
clib_error_t * vnet_crypto_init(vlib_main_t *vm)
static ipsecmb_gcm_pre_vft_t ipsecmb_gcm_pre_vft