FD.io VPP  v21.06-3-gbb25fbf28
Vector Packet Processing
sha2.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef included_sha2_h
17 #define included_sha2_h
18 
19 #include <vppinfra/clib.h>
20 
21 #define SHA224_DIGEST_SIZE 28
22 #define SHA224_BLOCK_SIZE 64
23 
24 #define SHA256_DIGEST_SIZE 32
25 #define SHA256_BLOCK_SIZE 64
26 #define SHA256_ROTR(x, y) ((x >> y) | (x << (32 - y)))
27 #define SHA256_CH(a, b, c) ((a & b) ^ (~a & c))
28 #define SHA256_MAJ(a, b, c) ((a & b) ^ (a & c) ^ (b & c))
29 #define SHA256_CSIGMA0(x) (SHA256_ROTR(x, 2) ^ \
30  SHA256_ROTR(x, 13) ^ \
31  SHA256_ROTR(x, 22));
32 #define SHA256_CSIGMA1(x) (SHA256_ROTR(x, 6) ^ \
33  SHA256_ROTR(x, 11) ^ \
34  SHA256_ROTR(x, 25));
35 #define SHA256_SSIGMA0(x) (SHA256_ROTR (x, 7) ^ \
36  SHA256_ROTR (x, 18) ^ \
37  (x >> 3))
38 #define SHA256_SSIGMA1(x) (SHA256_ROTR (x, 17) ^ \
39  SHA256_ROTR (x, 19) ^ \
40  (x >> 10))
41 
42 #define SHA256_MSG_SCHED(w, j) \
43 { \
44  w[j] = w[j - 7] + w[j - 16]; \
45  w[j] += SHA256_SSIGMA0 (w[j - 15]); \
46  w[j] += SHA256_SSIGMA1 (w[j - 2]); \
47 }
48 
49 #define SHA256_TRANSFORM(s, w, i, k) \
50 { \
51  __typeof__(s[0]) t1, t2; \
52  t1 = k + w[i] + s[7]; \
53  t1 += SHA256_CSIGMA1 (s[4]); \
54  t1 += SHA256_CH (s[4], s[5], s[6]); \
55  t2 = SHA256_CSIGMA0 (s[0]); \
56  t2 += SHA256_MAJ (s[0], s[1], s[2]); \
57  s[7] = s[6]; \
58  s[6] = s[5]; \
59  s[5] = s[4]; \
60  s[4] = s[3] + t1; \
61  s[3] = s[2]; \
62  s[2] = s[1]; \
63  s[1] = s[0]; \
64  s[0] = t1 + t2; \
65 }
66 
67 #define SHA512_224_DIGEST_SIZE 28
68 #define SHA512_224_BLOCK_SIZE 128
69 
70 #define SHA512_256_DIGEST_SIZE 32
71 #define SHA512_256_BLOCK_SIZE 128
72 
73 #define SHA384_DIGEST_SIZE 48
74 #define SHA384_BLOCK_SIZE 128
75 
76 #define SHA512_DIGEST_SIZE 64
77 #define SHA512_BLOCK_SIZE 128
78 #define SHA512_ROTR(x, y) ((x >> y) | (x << (64 - y)))
79 #define SHA512_CH(a, b, c) ((a & b) ^ (~a & c))
80 #define SHA512_MAJ(a, b, c) ((a & b) ^ (a & c) ^ (b & c))
81 #define SHA512_CSIGMA0(x) (SHA512_ROTR (x, 28) ^ \
82  SHA512_ROTR (x, 34) ^ \
83  SHA512_ROTR (x, 39))
84 #define SHA512_CSIGMA1(x) (SHA512_ROTR (x, 14) ^ \
85  SHA512_ROTR (x, 18) ^ \
86  SHA512_ROTR (x, 41))
87 #define SHA512_SSIGMA0(x) (SHA512_ROTR (x, 1) ^ \
88  SHA512_ROTR (x, 8) ^ \
89  (x >> 7))
90 #define SHA512_SSIGMA1(x) (SHA512_ROTR (x, 19) ^ \
91  SHA512_ROTR (x, 61) ^ \
92  (x >> 6))
93 
94 #define SHA512_MSG_SCHED(w, j) \
95 { \
96  w[j] = w[j - 7] + w[j - 16]; \
97  w[j] += SHA512_SSIGMA0 (w[j - 15]); \
98  w[j] += SHA512_SSIGMA1 (w[j - 2]); \
99 }
100 
101 #define SHA512_TRANSFORM(s, w, i, k) \
102 { \
103  __typeof__(s[0]) t1, t2; \
104  t1 = k + w[i] + s[7]; \
105  t1 += SHA512_CSIGMA1 (s[4]); \
106  t1 += SHA512_CH (s[4], s[5], s[6]); \
107  t2 = SHA512_CSIGMA0 (s[0]); \
108  t2 += SHA512_MAJ (s[0], s[1], s[2]); \
109  s[7] = s[6]; \
110  s[6] = s[5]; \
111  s[5] = s[4]; \
112  s[4] = s[3] + t1; \
113  s[3] = s[2]; \
114  s[2] = s[1]; \
115  s[1] = s[0]; \
116  s[0] = t1 + t2; \
117 }
118 
119 static const u32 sha224_h[8] = {
120  0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939,
121  0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4
122 };
123 
124 static const u32 sha256_h[8] = {
125  0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
126  0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19
127 };
128 
129 static const u32 sha256_k[64] = {
130  0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
131  0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
132  0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
133  0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
134  0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
135  0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
136  0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
137  0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
138  0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
139  0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
140  0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
141  0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
142  0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
143  0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
144  0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
145  0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
146 };
147 
148 static const u64 sha384_h[8] = {
149  0xcbbb9d5dc1059ed8, 0x629a292a367cd507,
150  0x9159015a3070dd17, 0x152fecd8f70e5939,
151  0x67332667ffc00b31, 0x8eb44a8768581511,
152  0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4
153 };
154 
155 static const u64 sha512_h[8] = {
156  0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
157  0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
158  0x510e527fade682d1, 0x9b05688c2b3e6c1f,
159  0x1f83d9abfb41bd6b, 0x5be0cd19137e2179
160 };
161 
162 static const u64 sha512_224_h[8] = {
163  0x8c3d37c819544da2, 0x73e1996689dcd4d6,
164  0x1dfab7ae32ff9c82, 0x679dd514582f9fcf,
165  0x0f6d2b697bd44da8, 0x77e36f7304c48942,
166  0x3f9d85a86a1d36c8, 0x1112e6ad91d692a1
167 };
168 
169 static const u64 sha512_256_h[8] = {
170  0x22312194fc2bf72c, 0x9f555fa3c84c64c2,
171  0x2393b86b6f53b151, 0x963877195940eabd,
172  0x96283ee2a88effe3, 0xbe5e1e2553863992,
173  0x2b0199fc2c85b8aa, 0x0eb72ddc81c52ca2
174 };
175 
176 static const u64 sha512_k[80] = {
177  0x428a2f98d728ae22, 0x7137449123ef65cd,
178  0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc,
179  0x3956c25bf348b538, 0x59f111f1b605d019,
180  0x923f82a4af194f9b, 0xab1c5ed5da6d8118,
181  0xd807aa98a3030242, 0x12835b0145706fbe,
182  0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2,
183  0x72be5d74f27b896f, 0x80deb1fe3b1696b1,
184  0x9bdc06a725c71235, 0xc19bf174cf692694,
185  0xe49b69c19ef14ad2, 0xefbe4786384f25e3,
186  0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65,
187  0x2de92c6f592b0275, 0x4a7484aa6ea6e483,
188  0x5cb0a9dcbd41fbd4, 0x76f988da831153b5,
189  0x983e5152ee66dfab, 0xa831c66d2db43210,
190  0xb00327c898fb213f, 0xbf597fc7beef0ee4,
191  0xc6e00bf33da88fc2, 0xd5a79147930aa725,
192  0x06ca6351e003826f, 0x142929670a0e6e70,
193  0x27b70a8546d22ffc, 0x2e1b21385c26c926,
194  0x4d2c6dfc5ac42aed, 0x53380d139d95b3df,
195  0x650a73548baf63de, 0x766a0abb3c77b2a8,
196  0x81c2c92e47edaee6, 0x92722c851482353b,
197  0xa2bfe8a14cf10364, 0xa81a664bbc423001,
198  0xc24b8b70d0f89791, 0xc76c51a30654be30,
199  0xd192e819d6ef5218, 0xd69906245565a910,
200  0xf40e35855771202a, 0x106aa07032bbd1b8,
201  0x19a4c116b8d2d0c8, 0x1e376c085141ab53,
202  0x2748774cdf8eeb99, 0x34b0bcb5e19b48a8,
203  0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb,
204  0x5b9cca4f7763e373, 0x682e6ff3d6b2b8a3,
205  0x748f82ee5defb2fc, 0x78a5636f43172f60,
206  0x84c87814a1f0ab72, 0x8cc702081a6439ec,
207  0x90befffa23631e28, 0xa4506cebde82bde9,
208  0xbef9a3f7b2c67915, 0xc67178f2e372532b,
209  0xca273eceea26619c, 0xd186b8c721c0c207,
210  0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178,
211  0x06f067aa72176fba, 0x0a637dc5a2c898a6,
212  0x113f9804bef90dae, 0x1b710b35131c471b,
213  0x28db77f523047d84, 0x32caab7b40c72493,
214  0x3c9ebe0a15c9bebc, 0x431d67c49c100d4c,
215  0x4cc5d4becb3e42b6, 0x597f299cfc657e2a,
216  0x5fcb6fab3ad6faec, 0x6c44198c4a475817
217 };
218 
219 typedef enum
220 {
228 
229 #define SHA2_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
230 #define SHA2_MAX_DIGEST_SIZE SHA512_DIGEST_SIZE
231 
232 typedef struct
233 {
238  union
239  {
240  u32 h32[8];
241  u64 h64[8];
242 #if defined(__SHA__) && defined (__x86_64__)
243  u32x4 h32x4[2];
244 #endif
245  };
246  union
247  {
250  uword as_uword[SHA2_MAX_BLOCK_SIZE / sizeof (uword)];
251  }
252  pending;
253 }
255 
258 {
259  const u32 *h32 = 0;
260  const u64 *h64 = 0;
261 
262  ctx->total_bytes = 0;
263  ctx->n_pending = 0;
264 
265  switch (type)
266  {
267  case CLIB_SHA2_224:
268  h32 = sha224_h;
269  ctx->block_size = SHA224_BLOCK_SIZE;
270  ctx->digest_size = SHA224_DIGEST_SIZE;
271  break;
272  case CLIB_SHA2_256:
273  h32 = sha256_h;
274  ctx->block_size = SHA256_BLOCK_SIZE;
275  ctx->digest_size = SHA256_DIGEST_SIZE;
276  break;
277  case CLIB_SHA2_384:
278  h64 = sha384_h;
279  ctx->block_size = SHA384_BLOCK_SIZE;
280  ctx->digest_size = SHA384_DIGEST_SIZE;
281  break;
282  case CLIB_SHA2_512:
283  h64 = sha512_h;
284  ctx->block_size = SHA512_BLOCK_SIZE;
285  ctx->digest_size = SHA512_DIGEST_SIZE;
286  break;
287  case CLIB_SHA2_512_224:
288  h64 = sha512_224_h;
289  ctx->block_size = SHA512_224_BLOCK_SIZE;
290  ctx->digest_size = SHA512_224_DIGEST_SIZE;
291  break;
292  case CLIB_SHA2_512_256:
293  h64 = sha512_256_h;
294  ctx->block_size = SHA512_256_BLOCK_SIZE;
295  ctx->digest_size = SHA512_256_DIGEST_SIZE;
296  break;
297  }
298  if (h32)
299  for (int i = 0; i < 8; i++)
300  ctx->h32[i] = h32[i];
301 
302  if (h64)
303  for (int i = 0; i < 8; i++)
304  ctx->h64[i] = h64[i];
305 }
306 
307 #if defined(__SHA__) && defined (__x86_64__)
308 static inline void
309 shani_sha256_cycle_w (u32x4 cw[], u8 a, u8 b, u8 c, u8 d)
310 {
311  cw[a] = (u32x4) _mm_sha256msg1_epu32 ((__m128i) cw[a], (__m128i) cw[b]);
312  cw[a] += (u32x4) _mm_alignr_epi8 ((__m128i) cw[d], (__m128i) cw[c], 4);
313  cw[a] = (u32x4) _mm_sha256msg2_epu32 ((__m128i) cw[a], (__m128i) cw[d]);
314 }
315 
316 static inline void
317 shani_sha256_4_rounds (u32x4 cw, u8 n, u32x4 s[])
318 {
319  u32x4 r = *(u32x4 *) (sha256_k + 4 * n) + cw;
320  s[0] = (u32x4) _mm_sha256rnds2_epu32 ((__m128i) s[0], (__m128i) s[1],
321  (__m128i) r);
322  r = (u32x4) u64x2_interleave_hi ((u64x2) r, (u64x2) r);
323  s[1] = (u32x4) _mm_sha256rnds2_epu32 ((__m128i) s[1], (__m128i) s[0],
324  (__m128i) r);
325 }
326 
327 static inline void
328 shani_sha256_shuffle (u32x4 d[2], u32x4 s[2])
329 {
330  /* {0, 1, 2, 3}, {4, 5, 6, 7} -> {7, 6, 3, 2}, {5, 4, 1, 0} */
331  d[0] = (u32x4) _mm_shuffle_ps ((__m128) s[1], (__m128) s[0], 0xbb);
332  d[1] = (u32x4) _mm_shuffle_ps ((__m128) s[1], (__m128) s[0], 0x11);
333 }
334 #endif
335 
336 void
337 clib_sha256_block (clib_sha2_ctx_t * ctx, const u8 * msg, uword n_blocks)
338 {
339 #if defined(__SHA__) && defined (__x86_64__)
340  u32x4 h[2], s[2], w[4];
341 
342  shani_sha256_shuffle (h, ctx->h32x4);
343 
344  while (n_blocks)
345  {
346  w[0] = u32x4_byte_swap (u32x4_load_unaligned ((u8 *) msg + 0));
347  w[1] = u32x4_byte_swap (u32x4_load_unaligned ((u8 *) msg + 16));
348  w[2] = u32x4_byte_swap (u32x4_load_unaligned ((u8 *) msg + 32));
349  w[3] = u32x4_byte_swap (u32x4_load_unaligned ((u8 *) msg + 48));
350 
351  s[0] = h[0];
352  s[1] = h[1];
353 
354  shani_sha256_4_rounds (w[0], 0, s);
355  shani_sha256_4_rounds (w[1], 1, s);
356  shani_sha256_4_rounds (w[2], 2, s);
357  shani_sha256_4_rounds (w[3], 3, s);
358 
359  shani_sha256_cycle_w (w, 0, 1, 2, 3);
360  shani_sha256_4_rounds (w[0], 4, s);
361  shani_sha256_cycle_w (w, 1, 2, 3, 0);
362  shani_sha256_4_rounds (w[1], 5, s);
363  shani_sha256_cycle_w (w, 2, 3, 0, 1);
364  shani_sha256_4_rounds (w[2], 6, s);
365  shani_sha256_cycle_w (w, 3, 0, 1, 2);
366  shani_sha256_4_rounds (w[3], 7, s);
367 
368  shani_sha256_cycle_w (w, 0, 1, 2, 3);
369  shani_sha256_4_rounds (w[0], 8, s);
370  shani_sha256_cycle_w (w, 1, 2, 3, 0);
371  shani_sha256_4_rounds (w[1], 9, s);
372  shani_sha256_cycle_w (w, 2, 3, 0, 1);
373  shani_sha256_4_rounds (w[2], 10, s);
374  shani_sha256_cycle_w (w, 3, 0, 1, 2);
375  shani_sha256_4_rounds (w[3], 11, s);
376 
377  shani_sha256_cycle_w (w, 0, 1, 2, 3);
378  shani_sha256_4_rounds (w[0], 12, s);
379  shani_sha256_cycle_w (w, 1, 2, 3, 0);
380  shani_sha256_4_rounds (w[1], 13, s);
381  shani_sha256_cycle_w (w, 2, 3, 0, 1);
382  shani_sha256_4_rounds (w[2], 14, s);
383  shani_sha256_cycle_w (w, 3, 0, 1, 2);
384  shani_sha256_4_rounds (w[3], 15, s);
385 
386  h[0] += s[0];
387  h[1] += s[1];
388 
389  /* next */
390  msg += SHA256_BLOCK_SIZE;
391  n_blocks--;
392  }
393 
394  shani_sha256_shuffle (ctx->h32x4, h);
395 #else
396  u32 w[64], s[8], i;
397 
398  while (n_blocks)
399  {
400  for (i = 0; i < 8; i++)
401  s[i] = ctx->h32[i];
402 
403  for (i = 0; i < 16; i++)
404  {
405  w[i] = clib_net_to_host_u32 (*((u32 *) msg + i));
406  SHA256_TRANSFORM (s, w, i, sha256_k[i]);
407  }
408 
409  for (i = 16; i < 64; i++)
410  {
411  SHA256_MSG_SCHED (w, i);
412  SHA256_TRANSFORM (s, w, i, sha256_k[i]);
413  }
414 
415  for (i = 0; i < 8; i++)
416  ctx->h32[i] += s[i];
417 
418  /* next */
419  msg += SHA256_BLOCK_SIZE;
420  n_blocks--;
421  }
422 #endif
423 }
424 
426 clib_sha512_block (clib_sha2_ctx_t * ctx, const u8 * msg, uword n_blocks)
427 {
428  u64 w[80], s[8], i;
429 
430  while (n_blocks)
431  {
432  for (i = 0; i < 8; i++)
433  s[i] = ctx->h64[i];
434 
435  for (i = 0; i < 16; i++)
436  {
437  w[i] = clib_net_to_host_u64 (*((u64 *) msg + i));
438  SHA512_TRANSFORM (s, w, i, sha512_k[i]);
439  }
440 
441  for (i = 16; i < 80; i++)
442  {
443  SHA512_MSG_SCHED (w, i);
444  SHA512_TRANSFORM (s, w, i, sha512_k[i]);
445  }
446 
447  for (i = 0; i < 8; i++)
448  ctx->h64[i] += s[i];
449 
450  /* next */
451  msg += SHA512_BLOCK_SIZE;
452  n_blocks--;
453  }
454 }
455 
458 {
459  uword n_blocks;
460  if (ctx->n_pending)
461  {
462  uword n_left = ctx->block_size - ctx->n_pending;
463  if (n_bytes < n_left)
464  {
465  clib_memcpy_fast (ctx->pending.as_u8 + ctx->n_pending, msg,
466  n_bytes);
467  ctx->n_pending += n_bytes;
468  return;
469  }
470  else
471  {
472  clib_memcpy_fast (ctx->pending.as_u8 + ctx->n_pending, msg, n_left);
473  if (ctx->block_size == SHA512_BLOCK_SIZE)
474  clib_sha512_block (ctx, ctx->pending.as_u8, 1);
475  else
476  clib_sha256_block (ctx, ctx->pending.as_u8, 1);
477  ctx->n_pending = 0;
478  ctx->total_bytes += ctx->block_size;
479  n_bytes -= n_left;
480  msg += n_left;
481  }
482  }
483 
484  if ((n_blocks = n_bytes / ctx->block_size))
485  {
486  if (ctx->block_size == SHA512_BLOCK_SIZE)
487  clib_sha512_block (ctx, msg, n_blocks);
488  else
489  clib_sha256_block (ctx, msg, n_blocks);
490  n_bytes -= n_blocks * ctx->block_size;
491  msg += n_blocks * ctx->block_size;
492  ctx->total_bytes += n_blocks * ctx->block_size;
493  }
494 
495  if (n_bytes)
496  {
497  clib_memset_u8 (ctx->pending.as_u8, 0, ctx->block_size);
498  clib_memcpy_fast (ctx->pending.as_u8, msg, n_bytes);
499  ctx->n_pending = n_bytes;
500  }
501  else
502  ctx->n_pending = 0;
503 }
504 
507 {
508  int i;
509 
510  ctx->total_bytes += ctx->n_pending;
511  if (ctx->n_pending == 0)
512  {
513  clib_memset (ctx->pending.as_u8, 0, ctx->block_size);
514  ctx->pending.as_u8[0] = 0x80;
515  }
516  else if (ctx->n_pending + sizeof (u64) + sizeof (u8) > ctx->block_size)
517  {
518  ctx->pending.as_u8[ctx->n_pending] = 0x80;
519  if (ctx->block_size == SHA512_BLOCK_SIZE)
520  clib_sha512_block (ctx, ctx->pending.as_u8, 1);
521  else
522  clib_sha256_block (ctx, ctx->pending.as_u8, 1);
523  clib_memset (ctx->pending.as_u8, 0, ctx->block_size);
524  }
525  else
526  ctx->pending.as_u8[ctx->n_pending] = 0x80;
527 
528  ctx->pending.as_u64[ctx->block_size / 8 - 1] =
529  clib_net_to_host_u64 (ctx->total_bytes * 8);
530  if (ctx->block_size == SHA512_BLOCK_SIZE)
531  clib_sha512_block (ctx, ctx->pending.as_u8, 1);
532  else
533  clib_sha256_block (ctx, ctx->pending.as_u8, 1);
534 
535  if (ctx->block_size == SHA512_BLOCK_SIZE)
536  {
537  for (i = 0; i < ctx->digest_size / sizeof (u64); i++)
538  *((u64 *) digest + i) = clib_net_to_host_u64 (ctx->h64[i]);
539 
540  /* sha512-224 case - write half of u64 */
541  if (i * sizeof (u64) < ctx->digest_size)
542  *((u32 *) digest + 2 * i) = clib_net_to_host_u32 (ctx->h64[i] >> 32);
543  }
544  else
545  for (i = 0; i < ctx->digest_size / sizeof (u32); i++)
546  *((u32 *) digest + i) = clib_net_to_host_u32 (ctx->h32[i]);
547 }
548 
550 clib_sha2 (clib_sha2_type_t type, const u8 * msg, uword len, u8 * digest)
551 {
553  clib_sha2_init (&ctx, type);
554  clib_sha2_update (&ctx, msg, len);
555  clib_sha2_final (&ctx, digest);
556 }
557 
558 #define clib_sha224(...) clib_sha2 (CLIB_SHA2_224, __VA_ARGS__)
559 #define clib_sha256(...) clib_sha2 (CLIB_SHA2_256, __VA_ARGS__)
560 #define clib_sha384(...) clib_sha2 (CLIB_SHA2_384, __VA_ARGS__)
561 #define clib_sha512(...) clib_sha2 (CLIB_SHA2_512, __VA_ARGS__)
562 #define clib_sha512_224(...) clib_sha2 (CLIB_SHA2_512_224, __VA_ARGS__)
563 #define clib_sha512_256(...) clib_sha2 (CLIB_SHA2_512_256, __VA_ARGS__)
564 
567  const u8 * msg, uword len, u8 * digest)
568 {
569  clib_sha2_ctx_t _ctx, *ctx = &_ctx;
570  uword key_data[SHA2_MAX_BLOCK_SIZE / sizeof (uword)];
571  u8 i_digest[SHA2_MAX_DIGEST_SIZE];
572  int i, n_words;
573 
575  n_words = ctx->block_size / sizeof (uword);
576 
577  /* key */
578  if (key_len > ctx->block_size)
579  {
580  /* key is longer than block, calculate hash of key */
582  for (i = (ctx->digest_size / sizeof (uword)) / 2; i < n_words; i++)
583  key_data[i] = 0;
584  clib_sha2_final (ctx, (u8 *) key_data);
586  }
587  else
588  {
589  for (i = 0; i < n_words; i++)
590  key_data[i] = 0;
591  clib_memcpy_fast (key_data, key, key_len);
592  }
593 
594  /* ipad */
595  for (i = 0; i < n_words; i++)
596  ctx->pending.as_uword[i] = key_data[i] ^ (uword) 0x3636363636363636;
597  if (ctx->block_size == SHA512_BLOCK_SIZE)
598  clib_sha512_block (ctx, ctx->pending.as_u8, 1);
599  else
600  clib_sha256_block (ctx, ctx->pending.as_u8, 1);
601  ctx->total_bytes += ctx->block_size;
602 
603  /* message */
604  clib_sha2_update (ctx, msg, len);
605  clib_sha2_final (ctx, i_digest);
606 
607  /* opad */
609  for (i = 0; i < n_words; i++)
610  ctx->pending.as_uword[i] = key_data[i] ^ (uword) 0x5c5c5c5c5c5c5c5c;
611  if (ctx->block_size == SHA512_BLOCK_SIZE)
612  clib_sha512_block (ctx, ctx->pending.as_u8, 1);
613  else
614  clib_sha256_block (ctx, ctx->pending.as_u8, 1);
615  ctx->total_bytes += ctx->block_size;
616 
617  /* digest */
618  clib_sha2_update (ctx, i_digest, ctx->digest_size);
619  clib_sha2_final (ctx, digest);
620 }
621 
622 #define clib_hmac_sha224(...) clib_hmac_sha2 (CLIB_SHA2_224, __VA_ARGS__)
623 #define clib_hmac_sha256(...) clib_hmac_sha2 (CLIB_SHA2_256, __VA_ARGS__)
624 #define clib_hmac_sha384(...) clib_hmac_sha2 (CLIB_SHA2_384, __VA_ARGS__)
625 #define clib_hmac_sha512(...) clib_hmac_sha2 (CLIB_SHA2_512, __VA_ARGS__)
626 #define clib_hmac_sha512_224(...) clib_hmac_sha2 (CLIB_SHA2_512_224, __VA_ARGS__)
627 #define clib_hmac_sha512_256(...) clib_hmac_sha2 (CLIB_SHA2_512_256, __VA_ARGS__)
628 
629 #endif /* included_sha2_h */
630 
631 /*
632  * fd.io coding-style-patch-verification: ON
633  *
634  * Local Variables:
635  * eval: (c-set-style "gnu")
636  * End:
637  */
clib_sha2_ctx_t::total_bytes
u64 total_bytes
Definition: sha2.h:234
clib_sha256_block
void clib_sha256_block(clib_sha2_ctx_t *ctx, const u8 *msg, uword n_blocks)
Definition: sha2.h:337
clib_sha2_ctx_t::digest_size
u8 digest_size
Definition: sha2.h:237
clib_sha2_type_t
clib_sha2_type_t
Definition: sha2.h:219
clib_sha512_block
static_always_inline void clib_sha512_block(clib_sha2_ctx_t *ctx, const u8 *msg, uword n_blocks)
Definition: sha2.h:426
clib_sha2_update
static_always_inline void clib_sha2_update(clib_sha2_ctx_t *ctx, const u8 *msg, uword n_bytes)
Definition: sha2.h:457
CLIB_SHA2_224
@ CLIB_SHA2_224
Definition: sha2.h:221
u64x2_interleave_hi
static u64x2 u64x2_interleave_hi(u64x2 a, u64x2 b)
Definition: vector_sse42.h:138
SHA256_DIGEST_SIZE
#define SHA256_DIGEST_SIZE
Definition: sha2.h:24
clib.h
SHA512_MSG_SCHED
#define SHA512_MSG_SCHED(w, j)
Definition: sha2.h:94
clib_sha2_ctx_t::n_pending
u16 n_pending
Definition: sha2.h:235
clib_sha2
static_always_inline void clib_sha2(clib_sha2_type_t type, const u8 *msg, uword len, u8 *digest)
Definition: sha2.h:550
SHA256_BLOCK_SIZE
#define SHA256_BLOCK_SIZE
Definition: sha2.h:25
u16
unsigned short u16
Definition: types.h:57
u64x2
epu8_epi32 epu16_epi32 u64x2
Definition: vector_sse42.h:641
SHA512_224_BLOCK_SIZE
#define SHA512_224_BLOCK_SIZE
Definition: sha2.h:68
sha512_256_h
static const u64 sha512_256_h[8]
Definition: sha2.h:169
CLIB_SHA2_512_256
@ CLIB_SHA2_512_256
Definition: sha2.h:226
SHA512_224_DIGEST_SIZE
#define SHA512_224_DIGEST_SIZE
Definition: sha2.h:67
r
vnet_hw_if_output_node_runtime_t * r
Definition: interface_output.c:1071
clib_memcpy_fast
static_always_inline void * clib_memcpy_fast(void *restrict dst, const void *restrict src, size_t n)
Definition: string.h:92
h
h
Definition: flowhash_template.h:372
sha512_224_h
static const u64 sha512_224_h[8]
Definition: sha2.h:162
sha384_h
static const u64 sha384_h[8]
Definition: sha2.h:148
key
typedef key
Definition: ipsec_types.api:88
u32x4_byte_swap
static_always_inline u32x4 u32x4_byte_swap(u32x4 v)
Definition: vector_neon.h:107
SHA384_DIGEST_SIZE
#define SHA384_DIGEST_SIZE
Definition: sha2.h:73
CLIB_SHA2_512_224
@ CLIB_SHA2_512_224
Definition: sha2.h:225
len
u8 len
Definition: ip_types.api:103
sha512_h
static const u64 sha512_h[8]
Definition: sha2.h:155
clib_sha2_init
static_always_inline void clib_sha2_init(clib_sha2_ctx_t *ctx, clib_sha2_type_t type)
Definition: sha2.h:257
c
svmdb_client_t * c
Definition: vpp_get_metrics.c:48
static_always_inline
#define static_always_inline
Definition: clib.h:112
sha256_h
static const u32 sha256_h[8]
Definition: sha2.h:124
uword
u64 uword
Definition: types.h:112
sha224_h
static const u32 sha224_h[8]
Definition: sha2.h:119
clib_sha2_final
static_always_inline void clib_sha2_final(clib_sha2_ctx_t *ctx, u8 *digest)
Definition: sha2.h:506
if
if(node->flags &VLIB_NODE_FLAG_TRACE) vnet_interface_output_trace(vm
SHA384_BLOCK_SIZE
#define SHA384_BLOCK_SIZE
Definition: sha2.h:74
SHA512_256_BLOCK_SIZE
#define SHA512_256_BLOCK_SIZE
Definition: sha2.h:71
i
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:261
CLIB_SHA2_512
@ CLIB_SHA2_512
Definition: sha2.h:224
clib_hmac_sha2
static_always_inline void clib_hmac_sha2(clib_sha2_type_t type, const u8 *key, uword key_len, const u8 *msg, uword len, u8 *digest)
Definition: sha2.h:566
clib_memset_u8
static_always_inline void clib_memset_u8(void *p, u8 val, uword count)
Definition: string.h:441
SHA256_MSG_SCHED
#define SHA256_MSG_SCHED(w, j)
Definition: sha2.h:42
SHA224_DIGEST_SIZE
#define SHA224_DIGEST_SIZE
Definition: sha2.h:21
SHA512_BLOCK_SIZE
#define SHA512_BLOCK_SIZE
Definition: sha2.h:77
u64
unsigned long u64
Definition: types.h:89
CLIB_SHA2_256
@ CLIB_SHA2_256
Definition: sha2.h:222
clib_sha2_ctx_t
Definition: sha2.h:232
SHA2_MAX_BLOCK_SIZE
#define SHA2_MAX_BLOCK_SIZE
Definition: sha2.h:229
u32
unsigned int u32
Definition: types.h:88
SHA256_TRANSFORM
#define SHA256_TRANSFORM(s, w, i, k)
Definition: sha2.h:49
n_bytes
u32 n_bytes
Definition: interface_output.c:401
ctx
long ctx[MAX_CONNS]
Definition: main.c:144
for
for(i=1;i<=collision_buckets;i++)
Definition: flowhash_template.h:378
n_left
u32 n_left
Definition: interface_output.c:1078
u32x4
unsigned long long u32x4
Definition: ixge.c:28
as_u64
u64 as_u64
Definition: bihash_doc.h:63
SHA512_TRANSFORM
#define SHA512_TRANSFORM(s, w, i, k)
Definition: sha2.h:101
clib_memset
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
SHA512_DIGEST_SIZE
#define SHA512_DIGEST_SIZE
Definition: sha2.h:76
b
vlib_buffer_t ** b
Definition: nat44_ei_out2in.c:717
SHA512_256_DIGEST_SIZE
#define SHA512_256_DIGEST_SIZE
Definition: sha2.h:70
u8
unsigned char u8
Definition: types.h:56
a
a
Definition: bitmap.h:544
sha256_k
static const u32 sha256_k[64]
Definition: sha2.h:129
key_len
u16 key_len
Definition: ikev2_types.api:95
CLIB_SHA2_384
@ CLIB_SHA2_384
Definition: sha2.h:223
SHA224_BLOCK_SIZE
#define SHA224_BLOCK_SIZE
Definition: sha2.h:22
sha512_k
static const u64 sha512_k[80]
Definition: sha2.h:176
type
vl_api_fib_path_type_t type
Definition: fib_types.api:123
clib_sha2_ctx_t::block_size
u8 block_size
Definition: sha2.h:236
SHA2_MAX_DIGEST_SIZE
#define SHA2_MAX_DIGEST_SIZE
Definition: sha2.h:230