FD.io VPP  v21.01.1
Vector Packet Processing
main.c
Go to the documentation of this file.
1 /*
2  *------------------------------------------------------------------
3  * Copyright (c) 2019 Cisco and/or its affiliates.
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  *------------------------------------------------------------------
16  */
17 
18 #include <openssl/evp.h>
19 #include <openssl/hmac.h>
20 #include <openssl/rand.h>
21 
22 #include <vlib/vlib.h>
23 #include <vnet/plugin/plugin.h>
24 #include <vnet/crypto/crypto.h>
25 #include <vpp/app/version.h>
26 
27 typedef struct
28 {
29  CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
30  EVP_CIPHER_CTX *evp_cipher_ctx;
31  HMAC_CTX *hmac_ctx;
32 #if OPENSSL_VERSION_NUMBER < 0x10100000L
33  HMAC_CTX _hmac_ctx;
34 #endif
36 
37 static openssl_per_thread_data_t *per_thread_data = 0;
38 
39 #define foreach_openssl_aes_evp_op \
40  _(cbc, DES_CBC, EVP_des_cbc) \
41  _(cbc, 3DES_CBC, EVP_des_ede3_cbc) \
42  _(cbc, AES_128_CBC, EVP_aes_128_cbc) \
43  _(cbc, AES_192_CBC, EVP_aes_192_cbc) \
44  _(cbc, AES_256_CBC, EVP_aes_256_cbc) \
45  _(gcm, AES_128_GCM, EVP_aes_128_gcm) \
46  _(gcm, AES_192_GCM, EVP_aes_192_gcm) \
47  _(gcm, AES_256_GCM, EVP_aes_256_gcm) \
48  _(cbc, AES_128_CTR, EVP_aes_128_ctr) \
49  _(cbc, AES_192_CTR, EVP_aes_192_ctr) \
50  _(cbc, AES_256_CTR, EVP_aes_256_ctr) \
51 
52 #define foreach_openssl_chacha20_evp_op \
53  _(chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305) \
54 
55 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
56 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op \
57  foreach_openssl_chacha20_evp_op
58 #else
59 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op
60 #endif
61 
62 #ifndef EVP_CTRL_AEAD_GET_TAG
63 #define EVP_CTRL_AEAD_GET_TAG EVP_CTRL_GCM_GET_TAG
64 #endif
65 
66 #ifndef EVP_CTRL_AEAD_SET_TAG
67 #define EVP_CTRL_AEAD_SET_TAG EVP_CTRL_GCM_SET_TAG
68 #endif
69 
70 #define foreach_openssl_hmac_op \
71  _(MD5, EVP_md5) \
72  _(SHA1, EVP_sha1) \
73  _(SHA224, EVP_sha224) \
74  _(SHA256, EVP_sha256) \
75  _(SHA384, EVP_sha384) \
76  _(SHA512, EVP_sha512)
77 
80  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
81  const EVP_CIPHER * cipher)
82 {
83  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
84  vm->thread_index);
85  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
87  u32 i, j, curr_len = 0;
88  u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
89 
90  for (i = 0; i < n_ops; i++)
91  {
92  vnet_crypto_op_t *op = ops[i];
94  int out_len = 0;
95  int iv_len;
96 
97  if (op->op == VNET_CRYPTO_OP_3DES_CBC_ENC
98  || op->op == VNET_CRYPTO_OP_DES_CBC_ENC)
99  iv_len = 8;
100  else
101  iv_len = 16;
102 
104  RAND_bytes (op->iv, iv_len);
105 
106  EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
107 
109  EVP_CIPHER_CTX_set_padding (ctx, 0);
110 
112  {
113  chp = chunks + op->chunk_index;
114  u32 offset = 0;
115  for (j = 0; j < op->n_chunks; j++)
116  {
117  EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
118  chp->len);
119  curr_len = chp->len;
120  offset += out_len;
121  chp += 1;
122  }
123  if (out_len < curr_len)
124  EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
125 
126  offset = 0;
127  chp = chunks + op->chunk_index;
128  for (j = 0; j < op->n_chunks; j++)
129  {
130  clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
131  offset += chp->len;
132  chp += 1;
133  }
134  }
135  else
136  {
137  EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
138  if (out_len < op->len)
139  EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
140  }
141  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
142  }
143  return n_ops;
144 }
145 
148  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
149  const EVP_CIPHER * cipher)
150 {
151  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
152  vm->thread_index);
153  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
155  u32 i, j, curr_len = 0;
156  u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
157 
158  for (i = 0; i < n_ops; i++)
159  {
160  vnet_crypto_op_t *op = ops[i];
162  int out_len = 0;
163 
164  EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
165 
167  EVP_CIPHER_CTX_set_padding (ctx, 0);
168 
170  {
171  chp = chunks + op->chunk_index;
172  u32 offset = 0;
173  for (j = 0; j < op->n_chunks; j++)
174  {
175  EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
176  chp->len);
177  curr_len = chp->len;
178  offset += out_len;
179  chp += 1;
180  }
181  if (out_len < curr_len)
182  EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
183 
184  offset = 0;
185  chp = chunks + op->chunk_index;
186  for (j = 0; j < op->n_chunks; j++)
187  {
188  clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
189  offset += chp->len;
190  chp += 1;
191  }
192  }
193  else
194  {
195  EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
196  if (out_len < op->len)
197  EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
198  }
199  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
200  }
201  return n_ops;
202 }
203 
206  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
207  const EVP_CIPHER * cipher, int is_gcm)
208 {
209  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
210  vm->thread_index);
211  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
213  u32 i, j;
214  for (i = 0; i < n_ops; i++)
215  {
216  vnet_crypto_op_t *op = ops[i];
218  int len = 0;
219 
221  RAND_bytes (op->iv, 8);
222 
223  EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
224  if (is_gcm)
225  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
226  EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
227  if (op->aad_len)
228  EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
230  {
231  chp = chunks + op->chunk_index;
232  for (j = 0; j < op->n_chunks; j++)
233  {
234  EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
235  chp += 1;
236  }
237  }
238  else
239  EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
240  EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
241  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag);
242  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
243  }
244  return n_ops;
245 }
246 
249  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
250  const EVP_CIPHER * cipher)
251 {
252  return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
253  /* is_gcm */ 1);
254 }
255 
256 static_always_inline __clib_unused u32
258  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
259  const EVP_CIPHER * cipher)
260 {
261  return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
262  /* is_gcm */ 0);
263 }
264 
267  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
268  const EVP_CIPHER * cipher, int is_gcm)
269 {
270  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
271  vm->thread_index);
272  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
274  u32 i, j, n_fail = 0;
275  for (i = 0; i < n_ops; i++)
276  {
277  vnet_crypto_op_t *op = ops[i];
279  int len = 0;
280 
281  EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
282  if (is_gcm)
283  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0);
284  EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
285  if (op->aad_len)
286  EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
288  {
289  chp = chunks + op->chunk_index;
290  for (j = 0; j < op->n_chunks; j++)
291  {
292  EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
293  chp += 1;
294  }
295  }
296  else
297  EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
298  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag);
299 
300  if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
301  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
302  else
303  {
304  n_fail++;
305  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
306  }
307  }
308  return n_ops - n_fail;
309 }
310 
313  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
314  const EVP_CIPHER * cipher)
315 {
316  return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
317  /* is_gcm */ 1);
318 }
319 
320 static_always_inline __clib_unused u32
322  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
323  const EVP_CIPHER * cipher)
324 {
325  return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
326  /* is_gcm */ 0);
327 }
328 
331  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
332  const EVP_MD * md)
333 {
334  u8 buffer[64];
335  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
336  vm->thread_index);
337  HMAC_CTX *ctx = ptd->hmac_ctx;
339  u32 i, j, n_fail = 0;
340  for (i = 0; i < n_ops; i++)
341  {
342  vnet_crypto_op_t *op = ops[i];
344  unsigned int out_len = 0;
345  size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
346 
347  HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
349  {
350  chp = chunks + op->chunk_index;
351  for (j = 0; j < op->n_chunks; j++)
352  {
353  HMAC_Update (ctx, chp->src, chp->len);
354  chp += 1;
355  }
356  }
357  else
358  HMAC_Update (ctx, op->src, op->len);
359  HMAC_Final (ctx, buffer, &out_len);
360 
362  {
363  if ((memcmp (op->digest, buffer, sz)))
364  {
365  n_fail++;
366  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
367  continue;
368  }
369  }
370  else
371  clib_memcpy_fast (op->digest, buffer, sz);
372  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
373  }
374  return n_ops - n_fail;
375 }
376 
377 #define _(m, a, b) \
378 static u32 \
379 openssl_ops_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
380 { return openssl_ops_enc_##m (vm, ops, 0, n_ops, b ()); } \
381  \
382 u32 \
383 openssl_ops_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
384 { return openssl_ops_dec_##m (vm, ops, 0, n_ops, b ()); } \
385  \
386 static u32 \
387 openssl_ops_enc_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
388  vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
389 { return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b ()); } \
390  \
391 static u32 \
392 openssl_ops_dec_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
393  vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
394 { return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b ()); }
395 
397 #undef _
398 
399 #define _(a, b) \
400 static u32 \
401 openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
402 { return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
403 static u32 \
404 openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
405  vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
406 { return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
407 
409 #undef _
410 
411 
412 clib_error_t *
414 {
417  u8 *seed_data = 0;
418  time_t t;
419  pid_t pid;
420 
421  u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
422 
423 #define _(m, a, b) \
424  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
425  openssl_ops_enc_##a, \
426  openssl_ops_enc_chained_##a); \
427  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
428  openssl_ops_dec_##a, \
429  openssl_ops_dec_chained_##a); \
430 
432 #undef _
433 
434 #define _(a, b) \
435  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
436  openssl_ops_hmac_##a, \
437  openssl_ops_hmac_chained_##a); \
438 
440 #undef _
441 
442  vec_validate_aligned (per_thread_data, tm->n_vlib_mains - 1,
444 
445  vec_foreach (ptd, per_thread_data)
446  {
447  ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new ();
448 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
449  ptd->hmac_ctx = HMAC_CTX_new ();
450 #else
451  HMAC_CTX_init (&(ptd->_hmac_ctx));
452  ptd->hmac_ctx = &ptd->_hmac_ctx;
453 #endif
454  }
455 
456  t = time (NULL);
457  pid = getpid ();
458  vec_add (seed_data, &t, sizeof (t));
459  vec_add (seed_data, &pid, sizeof (pid));
460  vec_add (seed_data, seed_data, sizeof (seed_data));
461 
462  RAND_seed ((const void *) seed_data, vec_len (seed_data));
463 
464  vec_free (seed_data);
465 
466  return 0;
467 }
468 
469 /* *INDENT-OFF* */
471 {
472  .runs_after = VLIB_INITS ("vnet_crypto_init"),
473 };
474 /* *INDENT-ON* */
475 
476 
477 /* *INDENT-OFF* */
479  .version = VPP_BUILD_VER,
480  .description = "OpenSSL Crypto Engine",
481 };
482 /* *INDENT-ON* */
483 
484 /*
485  * fd.io coding-style-patch-verification: ON
486  *
487  * Local Variables:
488  * eval: (c-set-style "gnu")
489  * End:
490  */
#define CLIB_CACHE_LINE_ALIGN_MARK(mark)
Definition: cache.h:60
static_always_inline u32 openssl_ops_hmac(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
Definition: main.c:330
VLIB_PLUGIN_REGISTER()
#define clib_memcpy_fast(a, b, c)
Definition: string.h:81
static_always_inline u32 openssl_ops_enc_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:79
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
Definition: crypto.h:239
u32 thread_index
Definition: main.h:250
vlib_main_t * vm
Definition: in2out_ed.c:1580
static_always_inline u32 openssl_ops_enc_aead(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, int is_gcm)
Definition: main.c:205
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
Definition: vec.h:520
static_always_inline u32 openssl_ops_dec_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:147
HMAC_CTX * hmac_ctx
Definition: main.c:31
unsigned char u8
Definition: types.h:56
#define vec_add(V, E, N)
Add N elements to end of vector V (no header, unspecified alignment)
Definition: vec.h:668
#define static_always_inline
Definition: clib.h:109
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:173
static_always_inline u32 openssl_ops_dec_gcm(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:312
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
unsigned int u32
Definition: types.h:88
vnet_crypto_op_id_t op
Definition: crypto.h:234
long ctx[MAX_CONNS]
Definition: main.c:144
static_always_inline u32 openssl_ops_dec_aead(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, int is_gcm)
Definition: main.c:266
static_always_inline __clib_unused u32 openssl_ops_enc_chacha20_poly1305(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:257
clib_error_t * crypto_openssl_init(vlib_main_t *vm)
Definition: main.c:413
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
Definition: crypto.h:238
EVP_CIPHER_CTX * evp_cipher_ctx
Definition: main.c:30
u8 len
Definition: ip_types.api:103
#define foreach_openssl_hmac_op
Definition: main.c:70
#define VNET_CRYPTO_OP_FLAG_INIT_IV
Definition: crypto.h:237
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:317
#define vec_free(V)
Free vector&#39;s memory (no header).
Definition: vec.h:380
char * buffer
Definition: cJSON.h:163
static_always_inline __clib_unused u32 openssl_ops_dec_chacha20_poly1305(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:321
typedef key
Definition: ipsec_types.api:86
template key/value backing page structure
Definition: bihash_doc.h:44
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
Definition: crypto.h:516
#define foreach_openssl_evp_op
Definition: main.c:56
#define VLIB_BUFFER_DEFAULT_DATA_SIZE
Definition: buffer.h:53
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 pid
Definition: dhcp.api:164
vnet_crypto_op_status_t status
Definition: crypto.h:235
static vlib_thread_main_t * vlib_get_thread_main()
Definition: global_funcs.h:32
#define vec_foreach(var, vec)
Vector iterator.
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
Definition: crypto.c:112
#define VLIB_INITS(...)
Definition: init.h:357
static_always_inline u32 openssl_ops_enc_gcm(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:248