20 #include <intel-ipsec-mb.h>
24 #include <vpp/app/version.h>
28 #define HMAC_MAX_BLOCK_SIZE SHA_512_BLOCK_SIZE
29 #define EXPANDED_KEY_N_BYTES (16 * 15)
66 #define foreach_ipsecmb_hmac_op \
67 _(SHA1, SHA1, sha1, 64, 20, 20) \
68 _(SHA224, SHA_224, sha224, 64, 32, 28) \
69 _(SHA256, SHA_256, sha256, 64, 32, 32) \
70 _(SHA384, SHA_384, sha384, 128, 64, 48) \
71 _(SHA512, SHA_512, sha512, 128, 64, 64)
76 #define foreach_ipsecmb_cipher_op \
77 _ (AES_128_CBC, 128, CBC) \
78 _ (AES_192_CBC, 192, CBC) \
79 _ (AES_256_CBC, 256, CBC) \
80 _ (AES_128_CTR, 128, CNTR) \
81 _ (AES_192_CTR, 192, CNTR) \
82 _ (AES_256_CTR, 256, CNTR)
87 #define foreach_ipsecmb_gcm_cipher_op \
98 return VNET_CRYPTO_OP_STATUS_COMPLETED;
99 case STS_BEING_PROCESSED:
100 case STS_COMPLETED_AES:
101 case STS_COMPLETED_HMAC:
102 return VNET_CRYPTO_OP_STATUS_WORK_IN_PROGRESS;
103 case STS_INVALID_ARGS:
104 case STS_INTERNAL_ERROR:
106 return VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
109 return VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
121 *n_fail = *n_fail + 1;
127 if ((memcmp (op->
digest, job->auth_tag_output,
len)))
129 *n_fail = *n_fail + 1;
130 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
134 else if (
len == digest_size)
139 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
145 u32 digest_size, JOB_HASH_ALG alg)
152 u8 scratch[n_ops][digest_size];
157 for (
i = 0;
i < n_ops;
i++)
162 job = IMB_GET_NEXT_JOB (ptd->
mgr);
165 job->hash_start_src_offset_in_bytes = 0;
166 job->msg_len_to_hash_in_bytes = op->
len;
168 job->auth_tag_output_len_in_bytes = digest_size;
169 job->auth_tag_output = scratch[
i];
171 job->cipher_mode = NULL_CIPHER;
172 job->cipher_direction = DECRYPT;
173 job->chain_order = HASH_CIPHER;
175 job->u.HMAC._hashed_auth_key_xor_ipad = kd;
176 job->u.HMAC._hashed_auth_key_xor_opad = kd + hash_size;
179 job = IMB_SUBMIT_JOB (ptd->
mgr);
185 while ((job = IMB_FLUSH_JOB (ptd->
mgr)))
188 return n_ops - n_fail;
191 #define _(a, b, c, d, e, f) \
192 static_always_inline u32 \
193 ipsecmb_ops_hmac_##a (vlib_main_t * vm, \
194 vnet_crypto_op_t * ops[], \
196 { return ipsecmb_ops_hmac_inline (vm, ops, n_ops, d, e, f, b); } \
209 *n_fail = *n_fail + 1;
212 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
219 JOB_CIPHER_MODE cipher_mode)
227 for (
i = 0;
i < n_ops;
i++)
234 job = IMB_GET_NEXT_JOB (ptd->
mgr);
238 job->msg_len_to_cipher_in_bytes = op->
len;
239 job->cipher_start_src_offset_in_bytes = 0;
241 job->hash_alg = NULL_HASH;
242 job->cipher_mode = cipher_mode;
244 job->chain_order = (
direction == ENCRYPT ? CIPHER_HASH : HASH_CIPHER);
249 _mm_storeu_si128 ((__m128i *) op->
iv,
iv);
253 job->aes_key_len_in_bytes =
key_len / 8;
257 job->iv_len_in_bytes = AES_BLOCK_SIZE;
261 job = IMB_SUBMIT_JOB (ptd->
mgr);
267 while ((job = IMB_FLUSH_JOB (ptd->
mgr)))
270 return n_ops - n_fail;
274 static_always_inline u32 ipsecmb_ops_cipher_enc_##a ( \
275 vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \
277 return ipsecmb_ops_aes_cipher_inline (vm, ops, n_ops, b, ENCRYPT, c); \
280 static_always_inline u32 ipsecmb_ops_cipher_dec_##a ( \
281 vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \
283 return ipsecmb_ops_aes_cipher_inline (vm, ops, n_ops, b, DECRYPT, c); \
290 static_always_inline u32 \
291 ipsecmb_ops_gcm_cipher_enc_##a##_chained (vlib_main_t * vm, \
292 vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
294 ipsecmb_main_t *imbm = &ipsecmb_main; \
295 ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
297 MB_MGR *m = ptd->mgr; \
298 vnet_crypto_op_chunk_t *chp; \
301 for (i = 0; i < n_ops; i++) \
303 struct gcm_key_data *kd; \
304 struct gcm_context_data ctx; \
305 vnet_crypto_op_t *op = ops[i]; \
307 kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
308 ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
309 IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
310 chp = chunks + op->chunk_index; \
311 for (j = 0; j < op->n_chunks; j++) \
313 IMB_AES##b##_GCM_ENC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
317 IMB_AES##b##_GCM_ENC_FINALIZE(m, kd, &ctx, op->tag, op->tag_len); \
319 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
325 static_always_inline u32 \
326 ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
329 ipsecmb_main_t *imbm = &ipsecmb_main; \
330 ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
332 MB_MGR *m = ptd->mgr; \
335 for (i = 0; i < n_ops; i++) \
337 struct gcm_key_data *kd; \
338 struct gcm_context_data ctx; \
339 vnet_crypto_op_t *op = ops[i]; \
341 kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
342 IMB_AES##b##_GCM_ENC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
343 op->aad, op->aad_len, op->tag, op->tag_len); \
345 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
351 static_always_inline u32 \
352 ipsecmb_ops_gcm_cipher_dec_##a##_chained (vlib_main_t * vm, \
353 vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
355 ipsecmb_main_t *imbm = &ipsecmb_main; \
356 ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
358 MB_MGR *m = ptd->mgr; \
359 vnet_crypto_op_chunk_t *chp; \
360 u32 i, j, n_failed = 0; \
362 for (i = 0; i < n_ops; i++) \
364 struct gcm_key_data *kd; \
365 struct gcm_context_data ctx; \
366 vnet_crypto_op_t *op = ops[i]; \
369 kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
370 ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
371 IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
372 chp = chunks + op->chunk_index; \
373 for (j = 0; j < op->n_chunks; j++) \
375 IMB_AES##b##_GCM_DEC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
379 IMB_AES##b##_GCM_DEC_FINALIZE(m, kd, &ctx, scratch, op->tag_len); \
381 if ((memcmp (op->tag, scratch, op->tag_len))) \
383 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
387 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
390 return n_ops - n_failed; \
393 static_always_inline u32 \
394 ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
397 ipsecmb_main_t *imbm = &ipsecmb_main; \
398 ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
400 MB_MGR *m = ptd->mgr; \
401 u32 i, n_failed = 0; \
403 for (i = 0; i < n_ops; i++) \
405 struct gcm_key_data *kd; \
406 struct gcm_context_data ctx; \
407 vnet_crypto_op_t *op = ops[i]; \
410 kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
411 IMB_AES##b##_GCM_DEC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
412 op->aad, op->aad_len, scratch, op->tag_len); \
414 if ((memcmp (op->tag, scratch, op->tag_len))) \
416 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
420 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
423 return n_ops - n_failed; \
438 *n_fail = *n_fail + 1;
444 if (memcmp (op->
tag, job->auth_tag_output,
len))
446 *n_fail = *n_fail + 1;
447 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
454 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
459 IMB_CIPHER_DIRECTION dir)
465 MB_MGR *m = ptd->
mgr;
466 u32 i, n_fail = 0, last_key_index = ~0;
471 for (
i = 0;
i < n_ops;
i++)
476 job = IMB_GET_NEXT_JOB (m);
485 job->cipher_direction = dir;
486 job->chain_order = IMB_ORDER_HASH_CIPHER;
487 job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305;
488 job->hash_alg = IMB_AUTH_CHACHA20_POLY1305;
489 job->enc_keys = job->dec_keys =
key;
490 job->key_len_in_bytes = 32;
492 job->u.CHACHA20_POLY1305.aad = op->
aad;
493 job->u.CHACHA20_POLY1305.aad_len_in_bytes = op->
aad_len;
497 if ((dir == IMB_DIR_ENCRYPT) &&
501 _mm_storeu_si128 ((__m128i *) iv_data,
iv);
507 job->iv_len_in_bytes = 12;
508 job->msg_len_to_cipher_in_bytes = job->msg_len_to_hash_in_bytes =
510 job->cipher_start_src_offset_in_bytes =
511 job->hash_start_src_offset_in_bytes = 0;
513 job->auth_tag_output = scratch[
i];
514 job->auth_tag_output_len_in_bytes = 16;
518 job = IMB_SUBMIT_JOB_NOCHECK (ptd->
mgr);
525 while ((job = IMB_FLUSH_JOB (ptd->
mgr)))
528 return n_ops - n_fail;
548 IMB_CIPHER_DIRECTION dir)
553 MB_MGR *m = ptd->
mgr;
554 u32 i, n_fail = 0, last_key_index = ~0;
558 if (dir == IMB_DIR_ENCRYPT)
560 for (
i = 0;
i < n_ops;
i++)
563 struct chacha20_poly1305_context_data
ctx;
581 _mm_storeu_si128 ((__m128i *) iv_data,
iv);
586 IMB_CHACHA20_POLY1305_INIT (m,
key, &
ctx, op->
iv, op->
aad,
592 IMB_CHACHA20_POLY1305_ENC_UPDATE (m,
key, &
ctx, chp->dst,
597 IMB_CHACHA20_POLY1305_ENC_FINALIZE (m, &
ctx, op->
tag, op->
tag_len);
599 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
604 for (
i = 0;
i < n_ops;
i++)
607 struct chacha20_poly1305_context_data
ctx;
622 IMB_CHACHA20_POLY1305_INIT (m,
key, &
ctx, op->
iv, op->
aad,
628 IMB_CHACHA20_POLY1305_DEC_UPDATE (m,
key, &
ctx, chp->dst,
633 IMB_CHACHA20_POLY1305_DEC_FINALIZE (m, &
ctx, scratch, op->
tag_len);
638 op->
status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
641 op->
status = VNET_CRYPTO_OP_STATUS_COMPLETED;
645 return n_ops - n_fail;
673 if ((fd = open (
"/dev/urandom", O_RDONLY)) < 0)
749 u64 pad[block_qw], key_hash[block_qw];
757 for (
i = 0;
i < block_qw;
i++)
758 pad[
i] = key_hash[
i] ^ 0x3636363636363636;
761 for (
i = 0;
i < block_qw;
i++)
762 pad[
i] = key_hash[
i] ^ 0x5c5c5c5c5c5c5c5c;
787 name =
format (0,
"Intel(R) Multi-Buffer Crypto for IPsec Library %s%c",
797 ptd->
mgr = alloc_mb_mgr (0);
798 if (clib_cpu_supports_avx512f ())
799 init_mb_mgr_avx512 (ptd->
mgr);
800 else if (clib_cpu_supports_avx2 ())
801 init_mb_mgr_avx2 (ptd->
mgr);
803 init_mb_mgr_sse (ptd->
mgr);
813 #define _(a, b, c, d, e, f) \
814 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
815 ipsecmb_ops_hmac_##a); \
816 ad = imbm->alg_data + VNET_CRYPTO_ALG_HMAC_##a; \
817 ad->block_size = d; \
818 ad->data_size = e * 2; \
819 ad->hash_one_block = m-> c##_one_block; \
820 ad->hash_fn = m-> c; \
825 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
826 ipsecmb_ops_cipher_enc_##a); \
827 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
828 ipsecmb_ops_cipher_dec_##a); \
829 ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
830 ad->data_size = sizeof (ipsecmb_aes_key_data_t); \
831 ad->keyexp = m->keyexp_##b;
836 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
837 ipsecmb_ops_gcm_cipher_enc_##a); \
838 vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
839 ipsecmb_ops_gcm_cipher_dec_##a); \
840 vnet_crypto_register_chained_ops_handler \
841 (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
842 ipsecmb_ops_gcm_cipher_enc_##a##_chained); \
843 vnet_crypto_register_chained_ops_handler \
844 (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
845 ipsecmb_ops_gcm_cipher_dec_##a##_chained); \
846 ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
847 ad->data_size = sizeof (struct gcm_key_data); \
848 ad->aes_gcm_pre = m->gcm##b##_pre; \
854 VNET_CRYPTO_OP_CHACHA20_POLY1305_ENC,
857 VNET_CRYPTO_OP_CHACHA20_POLY1305_DEC,
860 vm, eidx, VNET_CRYPTO_OP_CHACHA20_POLY1305_ENC,
863 vm, eidx, VNET_CRYPTO_OP_CHACHA20_POLY1305_DEC,
865 ad = imbm->
alg_data + VNET_CRYPTO_ALG_CHACHA20_POLY1305;
875 .runs_after =
VLIB_INITS (
"vnet_crypto_init"),
882 .version = VPP_BUILD_VER,
883 .description =
"Intel IPSEC Multi-buffer Crypto Engine",