FD.io VPP  v17.07.01-10-g3be13f0
Vector Packet Processing
vhash.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /*
16  Copyright (c) 2010 Eliot Dresselhaus
17 
18  Permission is hereby granted, free of charge, to any person obtaining
19  a copy of this software and associated documentation files (the
20  "Software"), to deal in the Software without restriction, including
21  without limitation the rights to use, copy, modify, merge, publish,
22  distribute, sublicense, and/or sell copies of the Software, and to
23  permit persons to whom the Software is furnished to do so, subject to
24  the following conditions:
25 
26  The above copyright notice and this permission notice shall be
27  included in all copies or substantial portions of the Software.
28 
29  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
30  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
31  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
32  NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
33  LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
34  OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
35  WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
36 */
37 
38 #include <vppinfra/vhash.h>
39 
40 #ifdef CLIB_HAVE_VEC128
41 
42 /* Overflow search buckets have an extra u32x4 for saving key_hash data.
43  This makes it easier to refill main search bucket from overflow vector. */
44 typedef struct
45 {
46  /* 4 results for this bucket. */
47  u32x4_union_t result;
48 
49  /* 4 hash codes for this bucket. These are used to refill main
50  search buckets from overflow buckets when space becomes available. */
51  u32x4_union_t key_hash;
52 
53  /* n_key_u32s u32x4s of key data follow. */
54  u32x4_union_t key[0];
55 } vhash_overflow_search_bucket_t;
56 
57 always_inline void
58 set_overflow_result (vhash_overflow_search_bucket_t * b,
59  u32 i, u32 result, u32 key_hash)
60 {
61  b->result.as_u32[i] = result;
62  b->key_hash.as_u32[i] = key_hash;
63 }
64 
65 always_inline void
66 free_overflow_bucket (vhash_overflow_buckets_t * ob,
67  vhash_overflow_search_bucket_t * b, u32 i)
68 {
69  u32 o = (u32x4_union_t *) b - ob->search_buckets;
70  ASSERT (o < vec_len (ob->search_buckets));
71  vec_add1 (ob->free_indices, 4 * o + i);
72 }
73 
74 always_inline vhash_overflow_search_bucket_t *
75 get_overflow_search_bucket (vhash_overflow_buckets_t * obs, u32 i,
76  u32 n_key_u32s)
77 {
78  return ((vhash_overflow_search_bucket_t *)
79  vec_elt_at_index (obs->search_buckets, i));
80 }
81 
82 always_inline vhash_overflow_search_bucket_t *
83 next_overflow_bucket (vhash_overflow_search_bucket_t * b, u32 n_key_u32s)
84 {
85  return (vhash_overflow_search_bucket_t *) & b->key[n_key_u32s];
86 }
87 
88 #define foreach_vhash_overflow_bucket(b,ob,n_key_u32s) \
89  for ((b) = (vhash_overflow_search_bucket_t *) ob->search_buckets; \
90  (u32x4_union_t *) (b) < vec_end (ob->search_buckets); \
91  b = next_overflow_bucket (b, n_key_u32s))
92 
93 u32
94 vhash_get_overflow (vhash_t * h, u32 key_hash, u32 vi, u32 n_key_u32s)
95 {
96  vhash_overflow_buckets_t *ob = vhash_get_overflow_buckets (h, key_hash);
97  vhash_overflow_search_bucket_t *b;
98  u32 i, result = 0;
99 
100  foreach_vhash_overflow_bucket (b, ob, n_key_u32s)
101  {
102  u32x4 r = b->result.as_u32x4;
103 
104  for (i = 0; i < n_key_u32s; i++)
105  r &= vhash_bucket_compare (h, &b->key[0], i, vi);
106 
107  result = vhash_merge_results (r);
108  if (result)
109  break;
110  }
111 
112  return result;
113 }
114 
115 u32
116 vhash_set_overflow (vhash_t * h,
117  u32 key_hash, u32 vi, u32 new_result, u32 n_key_u32s)
118 {
119  vhash_overflow_buckets_t *ob = vhash_get_overflow_buckets (h, key_hash);
120  vhash_overflow_search_bucket_t *b;
121  u32 i_set, i, old_result;
122 
123  foreach_vhash_overflow_bucket (b, ob, n_key_u32s)
124  {
125  u32x4 r;
126 
127  r = b->result.as_u32x4;
128  for (i = 0; i < n_key_u32s; i++)
129  r &= vhash_bucket_compare (h, &b->key[0], i, vi);
130 
131  old_result = vhash_merge_results (r);
132  if (old_result)
133  {
134  i_set = vhash_non_empty_result_index (r);
135  set_overflow_result (b, i_set, new_result, key_hash);
136  return old_result;
137  }
138  }
139 
140  /* Check free list. */
141  if (vec_len (ob->free_indices) == 0)
142  {
143  /* Out of free overflow buckets. Resize. */
144  u32 j, *p;
145  i = vec_len (ob->search_buckets);
146  vec_resize_aligned (ob->search_buckets,
147  sizeof (b[0]) / sizeof (u32x4) + n_key_u32s,
149  vec_add2 (ob->free_indices, p, 4);
150  for (j = 0; j < 4; j++)
151  p[j] = 4 * i + j;
152  }
153 
154  i = vec_pop (ob->free_indices);
155 
156  i_set = i & 3;
157  b = ((vhash_overflow_search_bucket_t *)
158  vec_elt_at_index (ob->search_buckets, i / 4));
159 
160  /* Insert result. */
161  set_overflow_result (b, i_set, new_result, key_hash);
162 
163  /* Insert key. */
164  for (i = 0; i < n_key_u32s; i++)
165  b->key[i].as_u32[i_set] = vhash_get_key_word (h, i, vi);
166 
167  ob->n_overflow++;
168  h->n_elts++;
169 
170  return /* old result was invalid */ 0;
171 }
172 
173 u32
174 vhash_unset_overflow (vhash_t * h, u32 key_hash, u32 vi, u32 n_key_u32s)
175 {
176  vhash_overflow_buckets_t *ob = vhash_get_overflow_buckets (h, key_hash);
177  vhash_overflow_search_bucket_t *b;
178  u32 i_set, i, old_result;
179 
180  foreach_vhash_overflow_bucket (b, ob, n_key_u32s)
181  {
182  u32x4 r;
183 
184  r = b->result.as_u32x4;
185  for (i = 0; i < n_key_u32s; i++)
186  r &= vhash_bucket_compare (h, &b->key[0], i, vi);
187 
188  old_result = vhash_merge_results (r);
189  if (old_result)
190  {
191  i_set = vhash_non_empty_result_index (r);
192 
193  /* Invalidate result and invert key hash so that this will
194  never match since all keys in this overflow bucket have
195  matching key hashs. */
196  set_overflow_result (b, i_set, 0, ~key_hash);
197 
198  free_overflow_bucket (ob, b, i_set);
199 
200  ASSERT (ob->n_overflow > 0);
201  ob->n_overflow--;
202  h->n_elts--;
203  return old_result;
204  }
205  }
206 
207  /* Could not find key. */
208  return 0;
209 }
210 
211 void
212 vhash_unset_refill_from_overflow (vhash_t * h,
213  vhash_search_bucket_t * sb,
214  u32 key_hash, u32 n_key_u32s)
215 {
216  vhash_overflow_buckets_t *obs = vhash_get_overflow_buckets (h, key_hash);
217  vhash_overflow_search_bucket_t *ob;
218  u32 i, j, i_refill, bucket_mask = h->bucket_mask.as_u32[0];
219 
220  /* Find overflow element with matching key hash. */
221  foreach_vhash_overflow_bucket (ob, obs, n_key_u32s)
222  {
223  for (i = 0; i < 4; i++)
224  {
225  if (!ob->result.as_u32[i])
226  continue;
227  if ((ob->key_hash.as_u32[i] & bucket_mask)
228  != (key_hash & bucket_mask))
229  continue;
230 
231  i_refill = vhash_empty_result_index (sb->result.as_u32x4);
232  sb->result.as_u32[i_refill] = ob->result.as_u32[i];
233  for (j = 0; j < n_key_u32s; j++)
234  sb->key[j].as_u32[i_refill] = ob->key[j].as_u32[i];
235  set_overflow_result (ob, i, 0, ~key_hash);
236  free_overflow_bucket (obs, ob, i);
237  return;
238  }
239  }
240 }
241 
242 void
243 vhash_init (vhash_t * h, u32 log2_n_keys, u32 n_key_u32, u32 * hash_seeds)
244 {
245  uword i, j, m;
246  vhash_search_bucket_t *b;
247 
248  memset (h, 0, sizeof (h[0]));
249 
250  /* Must have at least 4 keys (e.g. one search bucket). */
251  log2_n_keys = clib_max (log2_n_keys, 2);
252 
253  h->log2_n_keys = log2_n_keys;
254  h->n_key_u32 = n_key_u32;
255  m = pow2_mask (h->log2_n_keys) & ~3;
256  for (i = 0; i < VECTOR_WORD_TYPE_LEN (u32); i++)
257  h->bucket_mask.as_u32[i] = m;
258 
259  /* Allocate and zero search buckets. */
260  i = (sizeof (b[0]) / sizeof (u32x4) + n_key_u32) << (log2_n_keys - 2);
261  vec_validate_aligned (h->search_buckets, i - 1, CLIB_CACHE_LINE_BYTES);
262 
263  for (i = 0; i < ARRAY_LEN (h->find_first_zero_table); i++)
264  h->find_first_zero_table[i] = min_log2 (first_set (~i));
265 
266  for (i = 0; i < ARRAY_LEN (h->hash_seeds); i++)
267  for (j = 0; j < VECTOR_WORD_TYPE_LEN (u32); j++)
268  h->hash_seeds[i].as_u32[j] = hash_seeds[i];
269 }
270 
272 vhash_main_key_gather (void *_vm, u32 vi, u32 wi, u32 n_key_u32)
273 {
274  vhash_main_t *vm = _vm;
275  return vec_elt (vm->keys, vi * n_key_u32 + wi);
276 }
277 
279 vhash_main_4key_gather (void *_vm, u32 vi, u32 wi, u32 n_key_u32s)
280 {
281  vhash_main_t *vm = _vm;
282  u32x4_union_t x;
283 
284  ASSERT (n_key_u32s == vm->n_key_u32);
285  ASSERT (wi < n_key_u32s);
286 
287  x.as_u32[0] = vec_elt (vm->keys, (vi + 0) * n_key_u32s + wi);
288  x.as_u32[1] = vec_elt (vm->keys, (vi + 1) * n_key_u32s + wi);
289  x.as_u32[2] = vec_elt (vm->keys, (vi + 2) * n_key_u32s + wi);
290  x.as_u32[3] = vec_elt (vm->keys, (vi + 3) * n_key_u32s + wi);
291  return x.as_u32x4;
292 }
293 
295 vhash_main_set_result (void *_vm, u32 vi, u32 old_result, u32 n_key_u32)
296 {
297  vhash_main_t *vm = _vm;
298  u32 *p = vec_elt_at_index (vm->results, vi);
299  u32 new_result = p[0];
300  p[0] = old_result;
301  return new_result;
302 }
303 
305 vhash_main_get_result (void *_vm, u32 vi, u32 old_result, u32 n_key_u32)
306 {
307  vhash_main_t *vm = _vm;
308  vec_elt (vm->results, vi) = old_result;
309  return old_result;
310 }
311 
313 vhash_main_get_4result (void *_vm, u32 vi, u32x4 old_result, u32 n_key_u32)
314 {
315  vhash_main_t *vm = _vm;
316  u32x4 *p = (u32x4 *) vec_elt_at_index (vm->results, vi);
317  p[0] = old_result;
318  return old_result;
319 }
320 
321 #define _(N_KEY_U32) \
322  static_always_inline u32 \
323  vhash_main_key_gather_##N_KEY_U32 (void * _vm, u32 vi, u32 i) \
324  { return vhash_main_key_gather (_vm, vi, i, N_KEY_U32); } \
325  \
326  static_always_inline u32x4 \
327  vhash_main_4key_gather_##N_KEY_U32 (void * _vm, u32 vi, u32 i) \
328  { return vhash_main_4key_gather (_vm, vi, i, N_KEY_U32); } \
329  \
330  clib_pipeline_stage_static \
331  (vhash_main_gather_keys_stage_##N_KEY_U32, \
332  vhash_main_t *, vm, i, \
333  { \
334  vhash_gather_4key_stage \
335  (vm->vhash, \
336  /* vector_index */ i, \
337  vhash_main_4key_gather_##N_KEY_U32, \
338  vm, \
339  N_KEY_U32); \
340  }) \
341  \
342  clib_pipeline_stage_no_inline \
343  (vhash_main_gather_keys_mod_stage_##N_KEY_U32, \
344  vhash_main_t *, vm, i, \
345  { \
346  vhash_gather_key_stage \
347  (vm->vhash, \
348  /* vector_index */ vm->n_vectors_div_4, \
349  /* n_vectors */ vm->n_vectors_mod_4, \
350  vhash_main_key_gather_##N_KEY_U32, \
351  vm, \
352  N_KEY_U32); \
353  }) \
354  \
355  clib_pipeline_stage \
356  (vhash_main_hash_finalize_stage_##N_KEY_U32, \
357  vhash_main_t *, vm, i, \
358  { \
359  vhash_finalize_stage (vm->vhash, i, N_KEY_U32); \
360  }) \
361  \
362  clib_pipeline_stage_no_inline \
363  (vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \
364  vhash_main_t *, vm, i, \
365  { \
366  vhash_finalize_stage (vm->vhash, vm->n_vectors_div_4, N_KEY_U32); \
367  }) \
368  \
369  clib_pipeline_stage_static \
370  (vhash_main_get_stage_##N_KEY_U32, \
371  vhash_main_t *, vm, i, \
372  { \
373  vhash_get_4_stage (vm->vhash, \
374  /* vector_index */ i, \
375  vhash_main_get_4result, \
376  vm, N_KEY_U32); \
377  }) \
378  \
379  clib_pipeline_stage_no_inline \
380  (vhash_main_get_mod_stage_##N_KEY_U32, \
381  vhash_main_t *, vm, i, \
382  { \
383  vhash_get_stage (vm->vhash, \
384  /* vector_index */ vm->n_vectors_div_4, \
385  /* n_vectors */ vm->n_vectors_mod_4, \
386  vhash_main_get_result, \
387  vm, N_KEY_U32); \
388  }) \
389  \
390  clib_pipeline_stage_static \
391  (vhash_main_set_stage_##N_KEY_U32, \
392  vhash_main_t *, vm, i, \
393  { \
394  vhash_set_stage (vm->vhash, \
395  /* vector_index */ i, \
396  /* n_vectors */ VECTOR_WORD_TYPE_LEN (u32), \
397  vhash_main_set_result, \
398  vm, N_KEY_U32); \
399  }) \
400  \
401  clib_pipeline_stage_no_inline \
402  (vhash_main_set_mod_stage_##N_KEY_U32, \
403  vhash_main_t *, vm, i, \
404  { \
405  vhash_set_stage (vm->vhash, \
406  /* vector_index */ vm->n_vectors_div_4, \
407  /* n_vectors */ vm->n_vectors_mod_4, \
408  vhash_main_set_result, \
409  vm, N_KEY_U32); \
410  }) \
411  \
412  clib_pipeline_stage_static \
413  (vhash_main_unset_stage_##N_KEY_U32, \
414  vhash_main_t *, vm, i, \
415  { \
416  vhash_unset_stage (vm->vhash, \
417  /* vector_index */ i, \
418  /* n_vectors */ VECTOR_WORD_TYPE_LEN (u32), \
419  vhash_main_get_result, \
420  vm, N_KEY_U32); \
421  }) \
422  \
423  clib_pipeline_stage_no_inline \
424  (vhash_main_unset_mod_stage_##N_KEY_U32, \
425  vhash_main_t *, vm, i, \
426  { \
427  vhash_unset_stage (vm->vhash, \
428  /* vector_index */ vm->n_vectors_div_4, \
429  /* n_vectors */ vm->n_vectors_mod_4, \
430  vhash_main_get_result, \
431  vm, N_KEY_U32); \
432  })
433 
434 _(1);
435 _(2);
436 _(3);
437 _(4);
438 _(5);
439 _(6);
440 
441 #undef _
442 
443 #define _(N_KEY_U32) \
444  clib_pipeline_stage \
445  (vhash_main_hash_mix_stage_##N_KEY_U32, \
446  vhash_main_t *, vm, i, \
447  { \
448  vhash_mix_stage (vm->vhash, i, N_KEY_U32); \
449  }) \
450  \
451  clib_pipeline_stage_no_inline \
452  (vhash_main_hash_mix_mod_stage_##N_KEY_U32, \
453  vhash_main_t *, vm, i, \
454  { \
455  vhash_mix_stage (vm->vhash, vm->n_vectors_div_4, N_KEY_U32); \
456  })
457 
458 _(4);
459 _(5);
460 _(6);
461 
462 #undef _
463 
464 typedef enum
465 {
466  GET, SET, UNSET,
467 } vhash_main_op_t;
468 
469 static void
470 vhash_main_op (vhash_main_t * vm, vhash_main_op_t op)
471 {
472  u32 n_keys = vec_len (vm->results);
473 
474  vm->n_key_u32 = vm->vhash->n_key_u32;
475 
476  vhash_validate_sizes (vm->vhash, vm->n_key_u32, n_keys);
477 
478  vm->n_vectors_div_4 = n_keys / 4;
479  vm->n_vectors_mod_4 = n_keys % 4;
480 
481  if (vm->n_vectors_div_4 > 0)
482  {
483  switch (vm->n_key_u32)
484  {
485  default:
486  ASSERT (0);
487  break;
488 
489 #define _(N_KEY_U32) \
490  case N_KEY_U32: \
491  if (op == GET) \
492  clib_pipeline_run_3_stage \
493  (vm->n_vectors_div_4, \
494  vm, \
495  vhash_main_gather_keys_stage_##N_KEY_U32, \
496  vhash_main_hash_finalize_stage_##N_KEY_U32, \
497  vhash_main_get_stage_##N_KEY_U32); \
498  else if (op == SET) \
499  clib_pipeline_run_3_stage \
500  (vm->n_vectors_div_4, \
501  vm, \
502  vhash_main_gather_keys_stage_##N_KEY_U32, \
503  vhash_main_hash_finalize_stage_##N_KEY_U32, \
504  vhash_main_set_stage_##N_KEY_U32); \
505  else \
506  clib_pipeline_run_3_stage \
507  (vm->n_vectors_div_4, \
508  vm, \
509  vhash_main_gather_keys_stage_##N_KEY_U32, \
510  vhash_main_hash_finalize_stage_##N_KEY_U32, \
511  vhash_main_unset_stage_##N_KEY_U32); \
512  break;
513 
514  _(1);
515  _(2);
516  _(3);
517 
518 #undef _
519 
520 #define _(N_KEY_U32) \
521  case N_KEY_U32: \
522  if (op == GET) \
523  clib_pipeline_run_4_stage \
524  (vm->n_vectors_div_4, \
525  vm, \
526  vhash_main_gather_keys_stage_##N_KEY_U32, \
527  vhash_main_hash_mix_stage_##N_KEY_U32, \
528  vhash_main_hash_finalize_stage_##N_KEY_U32, \
529  vhash_main_get_stage_##N_KEY_U32); \
530  else if (op == SET) \
531  clib_pipeline_run_4_stage \
532  (vm->n_vectors_div_4, \
533  vm, \
534  vhash_main_gather_keys_stage_##N_KEY_U32, \
535  vhash_main_hash_mix_stage_##N_KEY_U32, \
536  vhash_main_hash_finalize_stage_##N_KEY_U32, \
537  vhash_main_set_stage_##N_KEY_U32); \
538  else \
539  clib_pipeline_run_4_stage \
540  (vm->n_vectors_div_4, \
541  vm, \
542  vhash_main_gather_keys_stage_##N_KEY_U32, \
543  vhash_main_hash_mix_stage_##N_KEY_U32, \
544  vhash_main_hash_finalize_stage_##N_KEY_U32, \
545  vhash_main_unset_stage_##N_KEY_U32); \
546  break;
547 
548  _(4);
549  _(5);
550  _(6);
551 
552 #undef _
553  }
554  }
555 
556 
557  if (vm->n_vectors_mod_4 > 0)
558  {
559  switch (vm->n_key_u32)
560  {
561  default:
562  ASSERT (0);
563  break;
564 
565 #define _(N_KEY_U32) \
566  case N_KEY_U32: \
567  if (op == GET) \
568  clib_pipeline_run_3_stage \
569  (1, \
570  vm, \
571  vhash_main_gather_keys_mod_stage_##N_KEY_U32, \
572  vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \
573  vhash_main_get_mod_stage_##N_KEY_U32); \
574  else if (op == SET) \
575  clib_pipeline_run_3_stage \
576  (1, \
577  vm, \
578  vhash_main_gather_keys_mod_stage_##N_KEY_U32, \
579  vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \
580  vhash_main_set_mod_stage_##N_KEY_U32); \
581  else \
582  clib_pipeline_run_3_stage \
583  (1, \
584  vm, \
585  vhash_main_gather_keys_mod_stage_##N_KEY_U32, \
586  vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \
587  vhash_main_unset_mod_stage_##N_KEY_U32); \
588  break;
589 
590  _(1);
591  _(2);
592  _(3);
593 
594 #undef _
595 
596 #define _(N_KEY_U32) \
597  case N_KEY_U32: \
598  if (op == GET) \
599  clib_pipeline_run_4_stage \
600  (1, \
601  vm, \
602  vhash_main_gather_keys_mod_stage_##N_KEY_U32, \
603  vhash_main_hash_mix_mod_stage_##N_KEY_U32, \
604  vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \
605  vhash_main_get_mod_stage_##N_KEY_U32); \
606  else if (op == SET) \
607  clib_pipeline_run_4_stage \
608  (1, \
609  vm, \
610  vhash_main_gather_keys_mod_stage_##N_KEY_U32, \
611  vhash_main_hash_mix_mod_stage_##N_KEY_U32, \
612  vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \
613  vhash_main_set_mod_stage_##N_KEY_U32); \
614  else \
615  clib_pipeline_run_4_stage \
616  (1, \
617  vm, \
618  vhash_main_gather_keys_mod_stage_##N_KEY_U32, \
619  vhash_main_hash_mix_mod_stage_##N_KEY_U32, \
620  vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \
621  vhash_main_unset_mod_stage_##N_KEY_U32); \
622  break;
623 
624  _(4);
625  _(5);
626  _(6);
627 
628 #undef _
629  }
630  }
631 }
632 
633 void
634 vhash_main_get (vhash_main_t * vm)
635 {
636  vhash_main_op (vm, GET);
637 }
638 
639 void
640 vhash_main_set (vhash_main_t * vm)
641 {
642  vhash_main_op (vm, SET);
643 }
644 
645 void
646 vhash_main_unset (vhash_main_t * vm)
647 {
648  vhash_main_op (vm, UNSET);
649 }
650 
651 u32
652 vhash_resize_incremental (vhash_resize_t * vr, u32 vector_index,
653  u32 n_keys_this_call)
654 {
655  vhash_t *old = vr->old;
656  vhash_main_t *vm = &vr->new;
657  vhash_t *new = vm->vhash;
658  uword i, j, n_key_u32;
659 
660  n_key_u32 = old->n_key_u32;
661 
662  if (vector_index == 0)
663  {
664  u32 hash_seeds[3];
665  hash_seeds[0] = old->hash_seeds[0].as_u32[0];
666  hash_seeds[1] = old->hash_seeds[1].as_u32[0];
667  hash_seeds[2] = old->hash_seeds[2].as_u32[0];
668  vhash_init (new, old->log2_n_keys + 1, n_key_u32, hash_seeds);
669  }
670 
671  vec_reset_length (vm->keys);
672  vec_reset_length (vm->results);
673 
674  if (0 == (vector_index >> old->log2_n_keys))
675  {
676  for (i = vector_index; 0 == (i >> (old->log2_n_keys - 2)); i++)
677  {
678  vhash_search_bucket_t *b =
679  vhash_get_search_bucket_with_index (old, 4 * i, n_key_u32);
680  u32 r, *k;
681 
682 #define _(I) \
683  if ((r = b->result.as_u32[I]) != 0) \
684  { \
685  vec_add1 (vm->results, r - 1); \
686  vec_add2 (vm->keys, k, n_key_u32); \
687  for (j = 0; j < n_key_u32; j++) \
688  k[j] = b->key[j].as_u32[I]; \
689  }
690 
691  _(0);
692  _(1);
693  _(2);
694  _(3);
695 
696 #undef _
697 
698  if (vec_len (vm->results) >= n_keys_this_call)
699  {
700  vhash_main_op (vm, SET);
701  return i;
702  }
703  }
704  }
705 
706  /* Add overflow buckets. */
707  {
708  vhash_overflow_buckets_t *ob;
709  vhash_overflow_search_bucket_t *b;
710 
711  for (ob = old->overflow_buckets;
712  ob < old->overflow_buckets + ARRAY_LEN (old->overflow_buckets); ob++)
713  {
714  foreach_vhash_overflow_bucket (b, ob, old->n_key_u32)
715  {
716  u32 r, *k;
717 
718 #define _(I) \
719  if ((r = b->result.as_u32[I]) != 0) \
720  { \
721  vec_add1 (vm->results, r - 1); \
722  vec_add2 (vm->keys, k, n_key_u32); \
723  for (j = 0; j < n_key_u32; j++) \
724  k[j] = b->key[j].as_u32[I]; \
725  }
726 
727  _(0);
728  _(1);
729  _(2);
730  _(3);
731 
732 #undef _
733  }
734  }
735  }
736 
737  vhash_main_op (vm, SET);
738 
739  /* Let caller know we are done. */
740  return ~0;
741 }
742 
743 void
744 vhash_resize (vhash_t * old, u32 log2_n_keys)
745 {
746  static vhash_resize_t vr;
747  vhash_t new;
748  u32 i = 0;
749 
750  vr.old = old;
751  vr.new.vhash = &new;
752 
753  while (1)
754  {
755  i = vhash_resize_incremental (&vr, i, 1024);
756  if (i == ~0)
757  break;
758  }
759 
760  vhash_free (old);
761  *old = new;
762 }
763 
764 #endif /* CLIB_HAVE_VEC128 */
765 
766 /*
767  * fd.io coding-style-patch-verification: ON
768  *
769  * Local Variables:
770  * eval: (c-set-style "gnu")
771  * End:
772  */
sll srl srl sll sra u16x4 i
Definition: vector_sse2.h:337
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
Definition: vec.h:522
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
Definition: vec.h:561
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
Definition: vec.h:447
#define vec_pop(V)
Returns last element of a vector and decrements its length.
Definition: vec.h:616
static uword min_log2(uword x)
Definition: clib.h:189
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
#define static_always_inline
Definition: clib.h:85
#define always_inline
Definition: clib.h:84
static uword pow2_mask(uword x)
Definition: clib.h:257
unsigned long long u32x4
Definition: ixge.c:28
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
Definition: hash.c:470
#define VECTOR_WORD_TYPE_LEN(t)
Definition: vector.h:153
Definition: hash.c:471
#define ARRAY_LEN(x)
Definition: clib.h:59
static uword first_set(uword x)
Definition: clib.h:290
#define ASSERT(truth)
unsigned int u32
Definition: types.h:88
#define clib_max(x, y)
Definition: clib.h:325
u64 uword
Definition: types.h:112
#define vec_elt(v, i)
Get vector value at index i.
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
Definition: hash.c:472
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:67
#define vec_resize_aligned(V, N, A)
Resize a vector (no header, alignment specified).
Definition: vec.h:254