FD.io VPP  v20.09-64-g4f7b92f0a
Vector Packet Processing
quic_crypto.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #include <vnet/crypto/crypto.h>
16 #include <vppinfra/lock.h>
17 
18 #include <quic/quic.h>
19 #include <quic/quic_crypto.h>
20 
21 #include <quicly.h>
22 #include <picotls/openssl.h>
23 
24 #define QUICLY_EPOCH_1RTT 3
25 
26 extern quic_main_t quic_main;
27 extern quic_ctx_t *quic_get_conn_ctx (quicly_conn_t * conn);
28 
29 typedef void (*quicly_do_transform_fn) (ptls_cipher_context_t *, void *,
30  const void *, size_t);
31 
33 {
34  ptls_cipher_context_t super;
37 };
38 
40 {
41  ptls_aead_context_t super;
44 };
45 
46 static size_t
48  ptls_aead_context_t * _ctx, void *_output,
49  const void *input, size_t inlen,
50  uint64_t decrypted_pn, const void *aad,
51  size_t aadlen);
52 
54 
55 void
57 {
59 
60  if (batch_ctx->nb_tx_packets <= 0)
61  return;
62 
65  batch_ctx->nb_tx_packets);
67 
68  for (int i = 0; i < batch_ctx->nb_tx_packets; i++)
70 
71  batch_ctx->nb_tx_packets = 0;
72 }
73 
74 void
76 {
78 
79  if (batch_ctx->nb_rx_packets <= 0)
80  return;
81 
84  batch_ctx->nb_rx_packets);
86 
87  for (int i = 0; i < batch_ctx->nb_rx_packets; i++)
89 
90  batch_ctx->nb_rx_packets = 0;
91 }
92 
93 void
94 build_iv (ptls_aead_context_t * ctx, uint8_t * iv, uint64_t seq)
95 {
96  size_t iv_size = ctx->algo->iv_size, i;
97  const uint8_t *s = ctx->static_iv;
98  uint8_t *d = iv;
99  /* build iv */
100  for (i = iv_size - 8; i != 0; --i)
101  *d++ = *s++;
102  i = 64;
103  do
104  {
105  i -= 8;
106  *d++ = *s++ ^ (uint8_t) (seq >> i);
107  }
108  while (i != 0);
109 }
110 
111 static void
112 do_finalize_send_packet (ptls_cipher_context_t * hp,
113  quicly_datagram_t * packet,
114  size_t first_byte_at, size_t payload_from)
115 {
116  uint8_t hpmask[1 + QUICLY_SEND_PN_SIZE] = {
117  0
118  };
119  size_t i;
120 
121  ptls_cipher_init (hp,
122  packet->data.base + payload_from - QUICLY_SEND_PN_SIZE +
123  QUICLY_MAX_PN_SIZE);
124  ptls_cipher_encrypt (hp, hpmask, hpmask, sizeof (hpmask));
125 
126  packet->data.base[first_byte_at] ^=
127  hpmask[0] &
128  (QUICLY_PACKET_IS_LONG_HEADER (packet->data.base[first_byte_at]) ? 0xf :
129  0x1f);
130 
131  for (i = 0; i != QUICLY_SEND_PN_SIZE; ++i)
132  packet->data.base[payload_from + i - QUICLY_SEND_PN_SIZE] ^=
133  hpmask[i + 1];
134 }
135 
136 void
137 quic_crypto_finalize_send_packet (quicly_datagram_t * packet)
138 {
139  quic_encrypt_cb_ctx *encrypt_cb_ctx =
140  (quic_encrypt_cb_ctx *) ((uint8_t *) packet + sizeof (*packet));
141 
142  for (int i = 0; i < encrypt_cb_ctx->snd_ctx_count; i++)
143  {
144  do_finalize_send_packet (encrypt_cb_ctx->snd_ctx[i].hp,
145  packet,
146  encrypt_cb_ctx->snd_ctx[i].first_byte_at,
147  encrypt_cb_ctx->snd_ctx[i].payload_from);
148  }
149  encrypt_cb_ctx->snd_ctx_count = 0;
150 }
151 
152 static int
153 quic_crypto_setup_cipher (quicly_crypto_engine_t * engine,
154  quicly_conn_t * conn, size_t epoch, int is_enc,
155  ptls_cipher_context_t ** hp_ctx,
156  ptls_aead_context_t ** aead_ctx,
157  ptls_aead_algorithm_t * aead,
158  ptls_hash_algorithm_t * hash, const void *secret)
159 {
160  uint8_t hpkey[PTLS_MAX_SECRET_SIZE];
161  int ret;
162 
163  *aead_ctx = NULL;
164  /* generate new header protection key */
165  if (hp_ctx != NULL)
166  {
167  *hp_ctx = NULL;
168  ret = ptls_hkdf_expand_label (hash, hpkey, aead->ctr_cipher->key_size,
169  ptls_iovec_init (secret,
170  hash->digest_size),
171  "quic hp", ptls_iovec_init (NULL, 0),
172  NULL);
173  if (ret)
174  goto Exit;
175  *hp_ctx = ptls_cipher_new (aead->ctr_cipher, is_enc, hpkey);
176  if (NULL == *hp_ctx)
177  {
178  ret = PTLS_ERROR_NO_MEMORY;
179  goto Exit;
180  }
181  }
182 
183  /* generate new AEAD context */
184  *aead_ctx = ptls_aead_new (aead, hash, is_enc, secret,
185  QUICLY_AEAD_BASE_LABEL);
186  if (NULL == *aead_ctx)
187  {
188  ret = PTLS_ERROR_NO_MEMORY;
189  goto Exit;
190  }
191 
192  if (epoch == QUICLY_EPOCH_1RTT && !is_enc)
193  {
194  quic_ctx_t *qctx = quic_get_conn_ctx (conn);
195  if (qctx->ingress_keys.aead_ctx != NULL)
196  qctx->key_phase_ingress++;
197 
198  qctx->ingress_keys.aead_ctx = *aead_ctx;
199  if (hp_ctx != NULL)
200  qctx->ingress_keys.hp_ctx = *hp_ctx;
201  }
202 
203  ret = 0;
204 
205 Exit:
206  if (ret)
207  {
208  if (*aead_ctx != NULL)
209  {
210  ptls_aead_free (*aead_ctx);
211  *aead_ctx = NULL;
212  }
213  if (hp_ctx && *hp_ctx != NULL)
214  {
215  ptls_cipher_free (*hp_ctx);
216  *hp_ctx = NULL;
217  }
218  }
219  ptls_clear_memory (hpkey, sizeof (hpkey));
220  return ret;
221 }
222 
223 void
224 quic_crypto_finalize_send_packet_cb (struct st_quicly_crypto_engine_t
225  *engine, quicly_conn_t * conn,
226  ptls_cipher_context_t * hp,
227  ptls_aead_context_t * aead,
228  quicly_datagram_t * packet,
229  size_t first_byte_at,
230  size_t payload_from, int coalesced)
231 {
232  quic_encrypt_cb_ctx *encrypt_cb_ctx =
233  (quic_encrypt_cb_ctx *) ((uint8_t *) packet + sizeof (*packet));
234 
235  encrypt_cb_ctx->snd_ctx[encrypt_cb_ctx->snd_ctx_count].hp = hp;
236  encrypt_cb_ctx->snd_ctx[encrypt_cb_ctx->snd_ctx_count].first_byte_at =
237  first_byte_at;
238  encrypt_cb_ctx->snd_ctx[encrypt_cb_ctx->snd_ctx_count].payload_from =
239  payload_from;
240  encrypt_cb_ctx->snd_ctx_count++;
241 }
242 
243 void
245 {
246  ptls_cipher_context_t *header_protection = NULL;
247  ptls_aead_context_t *aead = NULL;
248  int pn;
249 
250  /* Long Header packets are not decrypted by vpp */
251  if (QUICLY_PACKET_IS_LONG_HEADER (pctx->packet.octets.base[0]))
252  return;
253 
254  uint64_t next_expected_packet_number =
255  quicly_get_next_expected_packet_number (qctx->conn);
256  if (next_expected_packet_number == UINT64_MAX)
257  return;
258 
259  aead = qctx->ingress_keys.aead_ctx;
260  header_protection = qctx->ingress_keys.hp_ctx;
261 
262  if (!aead || !header_protection)
263  return;
264 
265  size_t encrypted_len = pctx->packet.octets.len - pctx->packet.encrypted_off;
266  uint8_t hpmask[5] = { 0 };
267  uint32_t pnbits = 0;
268  size_t pnlen, ptlen, i;
269 
270  /* decipher the header protection, as well as obtaining pnbits, pnlen */
271  if (encrypted_len < header_protection->algo->iv_size + QUICLY_MAX_PN_SIZE)
272  return;
273  ptls_cipher_init (header_protection,
274  pctx->packet.octets.base + pctx->packet.encrypted_off +
275  QUICLY_MAX_PN_SIZE);
276  ptls_cipher_encrypt (header_protection, hpmask, hpmask, sizeof (hpmask));
277  pctx->packet.octets.base[0] ^=
278  hpmask[0] & (QUICLY_PACKET_IS_LONG_HEADER (pctx->packet.octets.base[0]) ?
279  0xf : 0x1f);
280  pnlen = (pctx->packet.octets.base[0] & 0x3) + 1;
281  for (i = 0; i != pnlen; ++i)
282  {
283  pctx->packet.octets.base[pctx->packet.encrypted_off + i] ^=
284  hpmask[i + 1];
285  pnbits =
286  (pnbits << 8) | pctx->packet.octets.base[pctx->packet.encrypted_off +
287  i];
288  }
289 
290  size_t aead_off = pctx->packet.encrypted_off + pnlen;
291 
292  pn =
293  quicly_determine_packet_number (pnbits, pnlen * 8,
294  next_expected_packet_number);
295 
296  int key_phase_bit =
297  (pctx->packet.octets.base[0] & QUICLY_KEY_PHASE_BIT) != 0;
298 
299  if (key_phase_bit != (qctx->key_phase_ingress & 1))
300  {
301  pctx->packet.octets.base[0] ^=
302  hpmask[0] &
303  (QUICLY_PACKET_IS_LONG_HEADER (pctx->packet.octets.base[0]) ? 0xf :
304  0x1f);
305  for (i = 0; i != pnlen; ++i)
306  {
307  pctx->packet.octets.base[pctx->packet.encrypted_off + i] ^=
308  hpmask[i + 1];
309  }
310  return;
311  }
312 
313  if ((ptlen =
315  pctx->packet.octets.base + aead_off,
316  pctx->packet.octets.base + aead_off,
317  pctx->packet.octets.len - aead_off,
318  pn, pctx->packet.octets.base,
319  aead_off)) == SIZE_MAX)
320  {
321  fprintf (stderr,
322  "%s: aead decryption failure (pn: %d)\n", __FUNCTION__, pn);
323  return;
324  }
325 
326  pctx->packet.encrypted_off = aead_off;
327  pctx->packet.octets.len = ptlen + aead_off;
328 
329  pctx->packet.decrypted.pn = pn;
330  pctx->packet.decrypted.key_phase = qctx->key_phase_ingress;
331 }
332 
333 #ifdef QUIC_HP_CRYPTO
334 static void
335 quic_crypto_cipher_do_init (ptls_cipher_context_t * _ctx, const void *iv)
336 {
337  struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
339  if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
340  {
341  id = VNET_CRYPTO_OP_AES_128_CTR_ENC;
342  }
343  else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
344  {
345  id = VNET_CRYPTO_OP_AES_256_CTR_ENC;
346  }
347  else
348  {
349  QUIC_DBG (1, "%s, Invalid crypto cipher : ", __FUNCTION__,
350  _ctx->algo->name);
351  assert (0);
352  }
353  vnet_crypto_op_init (&ctx->op, id);
354  ctx->op.iv = (u8 *) iv;
355  ctx->op.key_index = ctx->key_index;
356 }
357 
358 static void
359 quic_crypto_cipher_dispose (ptls_cipher_context_t * _ctx)
360 {
361  /* Do nothing */
362 }
363 
364 static void
365 quic_crypto_cipher_encrypt (ptls_cipher_context_t * _ctx, void *output,
366  const void *input, size_t _len)
367 {
369  struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
370 
371  ctx->op.src = (u8 *) input;
372  ctx->op.dst = output;
373  ctx->op.len = _len;
374 
375  vnet_crypto_process_ops (vm, &ctx->op, 1);
376 }
377 
378 static int
379 quic_crypto_cipher_setup_crypto (ptls_cipher_context_t * _ctx, int is_enc,
380  const void *key, const EVP_CIPHER * cipher,
381  quicly_do_transform_fn do_transform)
382 {
383  struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
384 
385  ctx->super.do_dispose = quic_crypto_cipher_dispose;
386  ctx->super.do_init = quic_crypto_cipher_do_init;
387  ctx->super.do_transform = do_transform;
388 
390  vnet_crypto_alg_t algo;
391  if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
392  {
393  algo = VNET_CRYPTO_ALG_AES_128_CTR;
394  }
395  else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
396  {
397  algo = VNET_CRYPTO_ALG_AES_256_CTR;
398  }
399  else
400  {
401  QUIC_DBG (1, "%s, Invalid crypto cipher : ", __FUNCTION__,
402  _ctx->algo->name);
403  assert (0);
404  }
405 
406  ctx->key_index = vnet_crypto_key_add (vm, algo,
407  (u8 *) key, _ctx->algo->key_size);
408 
409  return 0;
410 }
411 
412 static int
413 quic_crypto_aes128ctr_setup_crypto (ptls_cipher_context_t * ctx, int is_enc,
414  const void *key)
415 {
416  return quic_crypto_cipher_setup_crypto (ctx, 1, key, EVP_aes_128_ctr (),
417  quic_crypto_cipher_encrypt);
418 }
419 
420 static int
421 quic_crypto_aes256ctr_setup_crypto (ptls_cipher_context_t * ctx, int is_enc,
422  const void *key)
423 {
424  return quic_crypto_cipher_setup_crypto (ctx, 1, key, EVP_aes_256_ctr (),
425  quic_crypto_cipher_encrypt);
426 }
427 
428 #endif // QUIC_HP_CRYPTO
429 
430 void
431 quic_crypto_aead_encrypt_init (ptls_aead_context_t * _ctx, const void *iv,
432  const void *aad, size_t aadlen)
433 {
434  quic_main_t *qm = &quic_main;
435  u32 thread_index = vlib_get_thread_index ();
436 
437  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
438 
440  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
441  {
442  id = VNET_CRYPTO_OP_AES_128_GCM_ENC;
443  }
444  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
445  {
446  id = VNET_CRYPTO_OP_AES_256_GCM_ENC;
447  }
448  else
449  {
450  assert (0);
451  }
452 
453  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
454  &qm->wrk_ctx[thread_index].crypto_context_batch;
455 
456  vnet_crypto_op_t *vnet_op =
457  &quic_crypto_batch_ctx->aead_crypto_tx_packets_ops
458  [quic_crypto_batch_ctx->nb_tx_packets];
459  vnet_crypto_op_init (vnet_op, id);
460  vnet_op->aad = (u8 *) aad;
461  vnet_op->aad_len = aadlen;
462  vnet_op->iv = clib_mem_alloc (PTLS_MAX_IV_SIZE);
463  clib_memcpy (vnet_op->iv, iv, PTLS_MAX_IV_SIZE);
464  vnet_op->key_index = ctx->key_index;
465 }
466 
467 size_t
468 quic_crypto_aead_encrypt_update (ptls_aead_context_t * _ctx, void *output,
469  const void *input, size_t inlen)
470 {
471  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
472 
473  quic_main_t *qm = &quic_main;
474  u32 thread_index = vlib_get_thread_index ();
475  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
476  &qm->wrk_ctx[thread_index].crypto_context_batch;
477 
478  vnet_crypto_op_t *vnet_op =
479  &quic_crypto_batch_ctx->aead_crypto_tx_packets_ops
480  [quic_crypto_batch_ctx->nb_tx_packets];
481  vnet_op->src = (u8 *) input;
482  vnet_op->dst = output;
483  vnet_op->len = inlen;
484  vnet_op->tag_len = ctx->super.algo->tag_size;
485 
486  vnet_op->tag = vnet_op->src + inlen;
487 
488  return 0;
489 }
490 
491 size_t
492 quic_crypto_aead_encrypt_final (ptls_aead_context_t * _ctx, void *output)
493 {
494  quic_main_t *qm = &quic_main;
495  u32 thread_index = vlib_get_thread_index ();
496  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
497  &qm->wrk_ctx[thread_index].crypto_context_batch;
498 
499  vnet_crypto_op_t *vnet_op =
500  &quic_crypto_batch_ctx->
501  aead_crypto_tx_packets_ops[quic_crypto_batch_ctx->nb_tx_packets];
502  quic_crypto_batch_ctx->nb_tx_packets++;
503  return vnet_op->len + vnet_op->tag_len;
504 }
505 
506 size_t
507 quic_crypto_aead_decrypt (ptls_aead_context_t * _ctx, void *_output,
508  const void *input, size_t inlen, const void *iv,
509  const void *aad, size_t aadlen)
510 {
512  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
513 
515  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
516  {
517  id = VNET_CRYPTO_OP_AES_128_GCM_DEC;
518  }
519  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
520  {
521  id = VNET_CRYPTO_OP_AES_256_GCM_DEC;
522  }
523  else
524  {
525  assert (0);
526  }
527 
528  vnet_crypto_op_init (&ctx->op, id);
529  ctx->op.aad = (u8 *) aad;
530  ctx->op.aad_len = aadlen;
531  ctx->op.iv = (u8 *) iv;
532 
533  ctx->op.src = (u8 *) input;
534  ctx->op.dst = _output;
535  ctx->op.key_index = ctx->key_index;
536  ctx->op.len = inlen - ctx->super.algo->tag_size;
537 
538  ctx->op.tag_len = ctx->super.algo->tag_size;
539  ctx->op.tag = ctx->op.src + ctx->op.len;
540 
541  vnet_crypto_process_ops (vm, &ctx->op, 1);
542 
543  if (ctx->op.status != VNET_CRYPTO_OP_STATUS_COMPLETED)
544  return SIZE_MAX;
545 
546  return ctx->op.len;
547 }
548 
549 static size_t
551  ptls_aead_context_t * _ctx, void *_output,
552  const void *input, size_t inlen,
553  uint64_t decrypted_pn, const void *aad,
554  size_t aadlen)
555 {
556  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
558  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
559  {
560  id = VNET_CRYPTO_OP_AES_128_GCM_DEC;
561  }
562  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
563  {
564  id = VNET_CRYPTO_OP_AES_256_GCM_DEC;
565  }
566  else
567  {
568  return SIZE_MAX;
569  }
570 
571  quic_main_t *qm = &quic_main;
572  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
573  &qm->wrk_ctx[qctx->c_thread_index].crypto_context_batch;
574 
575  vnet_crypto_op_t *vnet_op =
576  &quic_crypto_batch_ctx->aead_crypto_rx_packets_ops
577  [quic_crypto_batch_ctx->nb_rx_packets];
578 
579  vnet_crypto_op_init (vnet_op, id);
580  vnet_op->aad = (u8 *) aad;
581  vnet_op->aad_len = aadlen;
582  vnet_op->iv = clib_mem_alloc (PTLS_MAX_IV_SIZE);
583  build_iv (_ctx, vnet_op->iv, decrypted_pn);
584  vnet_op->src = (u8 *) input;
585  vnet_op->dst = _output;
586  vnet_op->key_index = ctx->key_index;
587  vnet_op->len = inlen - ctx->super.algo->tag_size;
588  vnet_op->tag_len = ctx->super.algo->tag_size;
589  vnet_op->tag = vnet_op->src + vnet_op->len;
590  quic_crypto_batch_ctx->nb_rx_packets++;
591  return vnet_op->len;
592 }
593 
594 static void
595 quic_crypto_aead_dispose_crypto (ptls_aead_context_t * _ctx)
596 {
597 
598 }
599 
600 static int
601 quic_crypto_aead_setup_crypto (ptls_aead_context_t * _ctx, int is_enc,
602  const void *key, const EVP_CIPHER * cipher)
603 {
605  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
606 
607  vnet_crypto_alg_t algo;
608  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
609  {
610  algo = VNET_CRYPTO_ALG_AES_128_GCM;
611  }
612  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
613  {
614  algo = VNET_CRYPTO_ALG_AES_256_GCM;
615  }
616  else
617  {
618  QUIC_DBG (1, "%s, invalied aead cipher %s", __FUNCTION__,
619  _ctx->algo->name);
620  assert (0);
621  }
622 
623  if (quic_main.vnet_crypto_enabled)
624  {
625  ctx->super.do_decrypt = quic_crypto_aead_decrypt;
626 
627  ctx->super.do_encrypt_init = quic_crypto_aead_encrypt_init;
628  ctx->super.do_encrypt_update = quic_crypto_aead_encrypt_update;
629  ctx->super.do_encrypt_final = quic_crypto_aead_encrypt_final;
630  ctx->super.dispose_crypto = quic_crypto_aead_dispose_crypto;
631 
633  ctx->key_index = vnet_crypto_key_add (vm, algo,
634  (u8 *) key, _ctx->algo->key_size);
636  }
637  else
638  {
639  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
640  ptls_openssl_aes128gcm.setup_crypto (_ctx, is_enc, key);
641  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
642  ptls_openssl_aes256gcm.setup_crypto (_ctx, is_enc, key);
643  }
644 
645  return 0;
646 }
647 
648 static int
649 quic_crypto_aead_aes128gcm_setup_crypto (ptls_aead_context_t * ctx,
650  int is_enc, const void *key)
651 {
652  return quic_crypto_aead_setup_crypto (ctx, is_enc, key, EVP_aes_128_gcm ());
653 }
654 
655 static int
656 quic_crypto_aead_aes256gcm_setup_crypto (ptls_aead_context_t * ctx,
657  int is_enc, const void *key)
658 {
659  return quic_crypto_aead_setup_crypto (ctx, is_enc, key, EVP_aes_256_gcm ());
660 }
661 
662 #ifdef QUIC_HP_CRYPTO
663 ptls_cipher_algorithm_t quic_crypto_aes128ctr = {
664  "AES128-CTR",
665  PTLS_AES128_KEY_SIZE,
666  1, PTLS_AES_IV_SIZE,
667  sizeof (struct cipher_context_t), aes128ctr_setup_crypto
668 };
669 
670 ptls_cipher_algorithm_t quic_crypto_aes256ctr = {
671  "AES256-CTR", PTLS_AES256_KEY_SIZE, 1 /* block size */ ,
672  PTLS_AES_IV_SIZE, sizeof (struct cipher_context_t), aes256ctr_setup_crypto
673 };
674 #endif
675 
676 ptls_aead_algorithm_t quic_crypto_aes128gcm = {
677  "AES128-GCM",
678 #ifdef QUIC_HP_CRYPTO
679  &quic_crypto_aes128ctr,
680 #else
681  &ptls_openssl_aes128ctr,
682 #endif
683  &ptls_openssl_aes128ecb,
684  PTLS_AES128_KEY_SIZE,
685  PTLS_AESGCM_IV_SIZE,
686  PTLS_AESGCM_TAG_SIZE,
687  sizeof (struct aead_crypto_context_t),
688  quic_crypto_aead_aes128gcm_setup_crypto
689 };
690 
691 ptls_aead_algorithm_t quic_crypto_aes256gcm = {
692  "AES256-GCM",
693 #ifdef QUIC_HP_CRYPTO
694  &quic_crypto_aes256ctr,
695 #else
696  &ptls_openssl_aes256ctr,
697 #endif
698  &ptls_openssl_aes256ecb,
699  PTLS_AES256_KEY_SIZE,
700  PTLS_AESGCM_IV_SIZE,
701  PTLS_AESGCM_TAG_SIZE,
702  sizeof (struct aead_crypto_context_t),
703  quic_crypto_aead_aes256gcm_setup_crypto
704 };
705 
706 ptls_cipher_suite_t quic_crypto_aes128gcmsha256 = {
707  PTLS_CIPHER_SUITE_AES_128_GCM_SHA256,
708  &quic_crypto_aes128gcm, &ptls_openssl_sha256
709 };
710 
711 ptls_cipher_suite_t quic_crypto_aes256gcmsha384 = {
712  PTLS_CIPHER_SUITE_AES_256_GCM_SHA384,
713  &quic_crypto_aes256gcm, &ptls_openssl_sha384
714 };
715 
716 ptls_cipher_suite_t *quic_crypto_cipher_suites[] = {
718 };
719 
720 quicly_crypto_engine_t quic_crypto_engine = {
721  quic_crypto_setup_cipher, quic_crypto_finalize_send_packet_cb
722 };
723 
724 int
725 quic_encrypt_ticket_cb (ptls_encrypt_ticket_t * _self, ptls_t * tls,
726  int is_encrypt, ptls_buffer_t * dst, ptls_iovec_t src)
727 {
728  quic_session_cache_t *self = (void *) _self;
729  int ret;
730 
731  if (is_encrypt)
732  {
733 
734  /* replace the cached entry along with a newly generated session id */
735  clib_mem_free (self->data.base);
736  if ((self->data.base = clib_mem_alloc (src.len)) == NULL)
737  return PTLS_ERROR_NO_MEMORY;
738 
739  ptls_get_context (tls)->random_bytes (self->id, sizeof (self->id));
740  clib_memcpy (self->data.base, src.base, src.len);
741  self->data.len = src.len;
742 
743  /* store the session id in buffer */
744  if ((ret = ptls_buffer_reserve (dst, sizeof (self->id))) != 0)
745  return ret;
746  clib_memcpy (dst->base + dst->off, self->id, sizeof (self->id));
747  dst->off += sizeof (self->id);
748 
749  }
750  else
751  {
752 
753  /* check if session id is the one stored in cache */
754  if (src.len != sizeof (self->id))
755  return PTLS_ERROR_SESSION_NOT_FOUND;
756  if (clib_memcmp (self->id, src.base, sizeof (self->id)) != 0)
757  return PTLS_ERROR_SESSION_NOT_FOUND;
758 
759  /* return the cached value */
760  if ((ret = ptls_buffer_reserve (dst, self->data.len)) != 0)
761  return ret;
762  clib_memcpy (dst->base + dst->off, self->data.base, self->data.len);
763  dst->off += self->data.len;
764  }
765 
766  return 0;
767 }
768 
769 /*
770  * fd.io coding-style-patch-verification: ON
771  *
772  * Local Variables:
773  * eval: (c-set-style "gnu")
774  * End:
775  */
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
Definition: crypto.c:99
static void clib_rwlock_reader_lock(clib_rwlock_t *p)
Definition: lock.h:167
ptls_cipher_context_t super
Definition: quic_crypto.c:34
quic_worker_ctx_t * wrk_ctx
Definition: quic.h:263
static void clib_rwlock_writer_lock(clib_rwlock_t *p)
Definition: lock.h:190
int key_phase_ingress
Definition: quic.h:176
static void do_finalize_send_packet(ptls_cipher_context_t *hp, quicly_datagram_t *packet, size_t first_byte_at, size_t payload_from)
Definition: quic_crypto.c:112
struct quic_ctx_::@685 ingress_keys
ptls_cipher_context_t * hp_ctx
Definition: quic.h:173
size_t snd_ctx_count
Definition: quic.h:222
vl_api_address_t src
Definition: gre.api:54
vlib_main_t * vm
Definition: in2out_ed.c:1582
unsigned char u8
Definition: types.h:56
size_t quic_crypto_aead_decrypt(ptls_aead_context_t *_ctx, void *_output, const void *input, size_t inlen, const void *iv, const void *aad, size_t aadlen)
Definition: quic_crypto.c:507
#define QUIC_DBG(_lvl, _fmt, _args...)
Definition: quic.h:80
u8 id[64]
Definition: dhcp.api:160
size_t quic_crypto_aead_encrypt_update(ptls_aead_context_t *_ctx, void *output, const void *input, size_t inlen)
Definition: quic_crypto.c:468
#define clib_memcpy(d, s, n)
Definition: string.h:180
#define assert(x)
Definition: dlmalloc.c:31
clib_rwlock_t crypto_keys_quic_rw_lock
Definition: quic.h:281
static_always_inline void vnet_crypto_op_init(vnet_crypto_op_t *op, vnet_crypto_op_id_t type)
Definition: crypto.h:496
size_t quic_crypto_aead_encrypt_final(ptls_aead_context_t *_ctx, void *output)
Definition: quic_crypto.c:492
static int quic_crypto_aead_setup_crypto(ptls_aead_context_t *_ctx, int is_enc, const void *key, const EVP_CIPHER *cipher)
Definition: quic_crypto.c:601
static size_t quic_crypto_offload_aead_decrypt(quic_ctx_t *qctx, ptls_aead_context_t *_ctx, void *_output, const void *input, size_t inlen, uint64_t decrypted_pn, const void *aad, size_t aadlen)
Definition: quic_crypto.c:550
struct quic_encrypt_cb_ctx_::quic_finalize_send_packet_cb_ctx_ snd_ctx[QUIC_MAX_COALESCED_PACKET]
unsigned int u32
Definition: types.h:88
vnet_crypto_op_t op
Definition: quic_crypto.c:35
static int quic_crypto_aead_aes256gcm_setup_crypto(ptls_aead_context_t *ctx, int is_enc, const void *key)
Definition: quic_crypto.c:656
vnet_crypto_op_t aead_crypto_rx_packets_ops[QUIC_RCV_MAX_BATCH_PACKETS]
Definition: quic.h:227
u32 vnet_crypto_key_add(vlib_main_t *vm, vnet_crypto_alg_t alg, u8 *data, u16 length)
Definition: crypto.c:345
vnet_crypto_alg_t
Definition: crypto.h:124
quic_main_t quic_main
Definition: quic.c:46
static u8 iv[]
Definition: aes_cbc.c:24
ptls_cipher_suite_t quic_crypto_aes256gcmsha384
Definition: quic_crypto.c:711
static void clib_rwlock_reader_unlock(clib_rwlock_t *p)
Definition: lock.h:182
long ctx[MAX_CONNS]
Definition: main.c:144
void quic_crypto_batch_rx_packets(quic_crypto_batch_ctx_t *batch_ctx)
Definition: quic_crypto.c:75
void quic_crypto_aead_encrypt_init(ptls_aead_context_t *_ctx, const void *iv, const void *aad, size_t aadlen)
Definition: quic_crypto.c:431
static int quic_crypto_aead_aes128gcm_setup_crypto(ptls_aead_context_t *ctx, int is_enc, const void *key)
Definition: quic_crypto.c:649
vl_api_address_t dst
Definition: gre.api:55
static void clib_rwlock_writer_unlock(clib_rwlock_t *p)
Definition: lock.h:204
quicly_conn_t * conn
QUIC ctx case.
Definition: quic.h:146
static_always_inline uword vlib_get_thread_index(void)
Definition: threads.h:219
#define clib_memcmp(s1, s2, m1)
Definition: string.h:720
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:317
void(* quicly_do_transform_fn)(ptls_cipher_context_t *, void *, const void *, size_t)
Definition: quic_crypto.c:29
void quic_crypto_decrypt_packet(quic_ctx_t *qctx, quic_rx_packet_ctx_t *pctx)
Definition: quic_crypto.c:244
size_t nb_rx_packets
Definition: quic.h:229
ptls_cipher_suite_t quic_crypto_aes128gcmsha256
Definition: quic_crypto.c:706
u8 vnet_crypto_enabled
Definition: quic.h:279
static void clib_mem_free(void *p)
Definition: mem.h:215
#define QUICLY_EPOCH_1RTT
Definition: quic_crypto.c:24
static void * clib_mem_alloc(uword size)
Definition: mem.h:157
static vlib_main_t * vlib_get_main(void)
Definition: global_funcs.h:23
vnet_crypto_op_t aead_crypto_tx_packets_ops[QUIC_SEND_MAX_BATCH_PACKETS]
Definition: quic.h:227
typedef key
Definition: ipsec_types.api:85
static void quic_crypto_aead_dispose_crypto(ptls_aead_context_t *_ctx)
Definition: quic_crypto.c:595
void quic_crypto_batch_tx_packets(quic_crypto_batch_ctx_t *batch_ctx)
Definition: quic_crypto.c:56
ptls_aead_context_t * aead_ctx
Definition: quic.h:174
vnet_crypto_op_t op
Definition: quic_crypto.c:42
quicly_decoded_packet_t packet
Definition: quic.h:245
ptls_aead_algorithm_t quic_crypto_aes256gcm
Definition: quic_crypto.c:691
ptls_aead_context_t super
Definition: quic_crypto.c:41
void quic_crypto_finalize_send_packet_cb(struct st_quicly_crypto_engine_t *engine, quicly_conn_t *conn, ptls_cipher_context_t *hp, ptls_aead_context_t *aead, quicly_datagram_t *packet, size_t first_byte_at, size_t payload_from, int coalesced)
Definition: quic_crypto.c:224
vnet_crypto_op_status_t status
Definition: crypto.h:235
vnet_crypto_op_id_t
Definition: crypto.h:196
static int quic_crypto_setup_cipher(quicly_crypto_engine_t *engine, quicly_conn_t *conn, size_t epoch, int is_enc, ptls_cipher_context_t **hp_ctx, ptls_aead_context_t **aead_ctx, ptls_aead_algorithm_t *aead, ptls_hash_algorithm_t *hash, const void *secret)
Definition: quic_crypto.c:153
void build_iv(ptls_aead_context_t *ctx, uint8_t *iv, uint64_t seq)
Definition: quic_crypto.c:94
quic_crypto_batch_ctx_t crypto_context_batch
Definition: quic.h:240
void quic_crypto_finalize_send_packet(quicly_datagram_t *packet)
Definition: quic_crypto.c:137
int quic_encrypt_ticket_cb(ptls_encrypt_ticket_t *_self, ptls_t *tls, int is_encrypt, ptls_buffer_t *dst, ptls_iovec_t src)
Definition: quic_crypto.c:725
vnet_crypto_main_t crypto_main
Definition: crypto.c:20
ptls_aead_algorithm_t quic_crypto_aes128gcm
Definition: quic_crypto.c:676
quic_ctx_t * quic_get_conn_ctx(quicly_conn_t *conn)
Definition: quic.c:416
size_t nb_tx_packets
Definition: quic.h:229