FD.io VPP  v21.10.1-2-g0a485f517
Vector Packet Processing
valloc.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2018 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <vppinfra/valloc.h>
17 
18 /** @file
19  @brief Simple first-fit virtual space allocator
20 */
21 
22 /** Add a chunk of memory to a virtual allocation arena
23  @param vam - clib_valloc_main_t * pointer to the allocation arena
24  @param template - clib_valloc_chunk_t * pointer to a template chunk which
25  describes the virtual address range to add
26 
27  @note only the baseva and size member of the template chunk are significant
28  It's perfectly OK for the new chunk to be discontinuous with previous
29  chunks, the chunk fusion algorithm won't merge them.
30  */
31 
32 __clib_export void
34 {
35  clib_valloc_chunk_t *ch, *new_ch;
36  u32 index;
37 
39 
41 
42  /* Add at the beginning, or at the end... */
43  index = vam->first_index;
44 
45  /*
46  * Make sure we're not trying to add an overlapping chunk..
47  * It's worth checking, because someone will eventually do that.
48  */
49  if (CLIB_DEBUG > 0 && index != ~0)
50  {
51  while (index != ~0)
52  {
53  ch = pool_elt_at_index (vam->chunks, index);
54  ASSERT (template->baseva < ch->baseva || template->baseva >=
55  (ch->baseva + ch->size));
56  ASSERT (template->baseva + template->size < ch->baseva ||
57  template->baseva + template->size >=
58  (ch->baseva + ch->size));
59  index = ch->next;
60  }
61  index = vam->first_index;
62  }
63 
64  if (index != ~0)
65  ch = pool_elt_at_index (vam->chunks, index);
66 
67  if (index == ~0 || template->baseva < ch->baseva)
68  {
69  pool_get (vam->chunks, new_ch);
70  clib_memset (new_ch, 0, sizeof (*new_ch));
71 
72  if (index != ~0)
73  {
74  ch = pool_elt_at_index (vam->chunks, index);
75 
76  new_ch->next = index;
77  new_ch->prev = ~0;
78  ch->prev = new_ch - vam->chunks;
79  }
80  else
81  {
82  new_ch->next = new_ch->prev = ~0;
83  }
84 
85  new_ch->baseva = template->baseva;
86  new_ch->size = template->size;
87 
88  vam->first_index = new_ch - vam->chunks;
89 
90  hash_set (vam->chunk_index_by_baseva, new_ch->baseva, vam->first_index);
91  }
92  else
93  {
94  /* Walk to the end of the chunk chain */
95  while (index != ~0)
96  {
97  ch = pool_elt_at_index (vam->chunks, index);
98  index = ch->next;
99  }
100  /* we want the last chunk index */
101  index = ch - vam->chunks;
102 
103  pool_get (vam->chunks, new_ch);
104  clib_memset (new_ch, 0, sizeof (*new_ch));
105 
106  ch = pool_elt_at_index (vam->chunks, index);
107 
108  new_ch->next = ~0;
109  new_ch->prev = index;
110  ch->next = new_ch - vam->chunks;
111 
112  new_ch->baseva = template->baseva;
113  new_ch->size = template->size;
114 
115  hash_set (vam->chunk_index_by_baseva, new_ch->baseva,
116  new_ch - vam->chunks);
117  }
118 
120 }
121 
122 /** Initialize a virtual memory allocation arena
123  @param vam - clib_valloc_main_t * pointer to the arena to initialize
124  @param template - clib_valloc_chunk_t * pointer to a template chunk which
125  describes the initial virtual address range
126 */
127 __clib_export void
129  int need_lock)
130 {
131  ASSERT (template && template->baseva && template->size);
132  clib_memset (vam, 0, sizeof (*vam));
133  if (need_lock)
134  clib_spinlock_init (&vam->lock);
135 
136  vam->chunk_index_by_baseva = hash_create (0, sizeof (uword));
137  vam->first_index = ~0;
139 
140  clib_valloc_add_chunk (vam, template);
141 }
142 
143 /** Allocate virtual space
144  @param vam - clib_valloc_main_t * pointer to the allocation arena
145  @param size - u64 number of bytes to allocate
146  @os_out_of_memory_on_failure - 1=> panic on allocation failure
147  @return uword allocated space, 0=> failure
148 */
149 __clib_export uword
151  int os_out_of_memory_on_failure)
152 {
153  clib_valloc_chunk_t *ch, *new_ch, *next_ch;
154  u32 index;
155 
157 
158  index = vam->first_index;
159 
160  while (index != ~0)
161  {
162  ch = pool_elt_at_index (vam->chunks, index);
163  /* If the chunk is free... */
164  if ((ch->flags & CLIB_VALLOC_BUSY) == 0)
165  {
166  /* Too small? */
167  if (ch->size < size)
168  goto next_chunk;
169  /* Exact match? */
170  if (ch->size == size)
171  {
172  ch->flags |= CLIB_VALLOC_BUSY;
174  return ch->baseva;
175  }
176  /*
177  * The current free chunk is larger than necessary, split the block.
178  */
179  pool_get (vam->chunks, new_ch);
180  /* ch might have just moved */
181  ch = pool_elt_at_index (vam->chunks, index);
182  clib_memset (new_ch, 0, sizeof (*new_ch));
183  new_ch->next = new_ch->prev = ~0;
184  new_ch->baseva = ch->baseva + size;
185  new_ch->size = ch->size - size;
186  ch->size = size;
187 
188  /* Insert into doubly-linked list */
189  new_ch->next = ch->next;
190  new_ch->prev = ch - vam->chunks;
191 
192  if (ch->next != ~0)
193  {
194  next_ch = pool_elt_at_index (vam->chunks, ch->next);
195  next_ch->prev = new_ch - vam->chunks;
196  }
197  ch->next = new_ch - vam->chunks;
198 
199  hash_set (vam->chunk_index_by_baseva, new_ch->baseva,
200  new_ch - vam->chunks);
201 
202  ch->flags |= CLIB_VALLOC_BUSY;
204  return ch->baseva;
205  }
206 
207  next_chunk:
208  index = ch->next;
209  }
211 
212  if (os_out_of_memory_on_failure)
213  os_out_of_memory ();
214 
215  return 0;
216 }
217 
218 
219 /** Free virtual space
220  @param vam - clib_valloc_main_t * pointer to the allocation arena
221  @param baseva - uword base virtual address of the returned space
222  @return uword - size of the freed virtual chunk
223  @note the size is returned since we know it / in case the caller
224  doesn't memorize chunk sizes
225 */
226 __clib_export uword
228 {
229  clib_valloc_chunk_t *ch, *prev_ch, *next_ch, *n2_ch;
230  u32 index;
231  uword return_size = 0;
232  uword *p;
233 
235 
236  p = hash_get (vam->chunk_index_by_baseva, baseva);
237 
238  /* Check even in production images */
239  if (p == 0)
240  os_panic ();
241 
242  index = p[0];
243 
244  ch = pool_elt_at_index (vam->chunks, index);
245 
246  return_size = ch->size;
247 
248  ASSERT (ch->baseva == baseva);
249  ASSERT ((ch->flags & CLIB_VALLOC_BUSY) != 0);
250 
251  ch->flags &= ~CLIB_VALLOC_BUSY;
252 
253  /* combine with previous entry? */
254  if (ch->prev != ~0)
255  {
256  prev_ch = pool_elt_at_index (vam->chunks, ch->prev);
257  /*
258  * Previous item must be free as well, and
259  * tangent to this block.
260  */
261  if ((prev_ch->flags & CLIB_VALLOC_BUSY) == 0
262  && ((prev_ch->baseva + prev_ch->size) == ch->baseva))
263  {
264  hash_unset (vam->chunk_index_by_baseva, baseva);
265  prev_ch->size += ch->size;
266  prev_ch->next = ch->next;
267  if (ch->next != ~0)
268  {
269  next_ch = pool_elt_at_index (vam->chunks, ch->next);
270  next_ch->prev = ch->prev;
271  }
272  ASSERT (ch - vam->chunks != vam->first_index);
273  clib_memset (ch, 0xfe, sizeof (*ch));
274  pool_put (vam->chunks, ch);
275  /* See about combining with next elt */
276  ch = prev_ch;
277  }
278  }
279 
280  /* Combine with next entry? */
281  if (ch->next != ~0)
282  {
283  next_ch = pool_elt_at_index (vam->chunks, ch->next);
284 
285  if ((next_ch->flags & CLIB_VALLOC_BUSY) == 0
286  && ((ch->baseva + ch->size) == next_ch->baseva))
287  {
288  hash_unset (vam->chunk_index_by_baseva, next_ch->baseva);
289  ch->size += next_ch->size;
290  ch->next = next_ch->next;
291  if (ch->next != ~0)
292  {
293  n2_ch = pool_elt_at_index (vam->chunks, next_ch->next);
294  n2_ch->prev = ch - vam->chunks;
295  }
296  ASSERT (next_ch - vam->chunks != vam->first_index);
297  clib_memset (next_ch, 0xfe, sizeof (*ch));
298  pool_put (vam->chunks, next_ch);
299  }
300  }
301 
303  return return_size;
304 }
305 
306 /** format a virtual allocation arena (varargs)
307  @param vam - clib_valloc_main_t pointer to the arena to format
308  @param verbose - int - verbosity level
309  @return u8 vector
310 */
311 __clib_export u8 *
312 format_valloc (u8 *s, va_list *va)
313 {
314  clib_valloc_main_t *vam = va_arg (*va, clib_valloc_main_t *);
315  int verbose = va_arg (*va, int);
317  u32 index;
318  uword *p;
319 
321 
322  s = format (s, "%d chunks, first index %d\n",
323  pool_elts (vam->chunks), vam->first_index);
324 
325  if (verbose)
326  {
327  index = vam->first_index;
328  while (index != ~0)
329  {
330  ch = pool_elt_at_index (vam->chunks, index);
331 
332  s = format (s, "[%d] base %llx size %llx (%lld) prev %d %s\n",
333  index, ch->baseva, ch->size, ch->size, ch->prev,
334  (ch->flags & CLIB_VALLOC_BUSY) ? "busy" : "free");
335 
336  p = hash_get (vam->chunk_index_by_baseva, ch->baseva);
337  if (p == 0)
338  {
339  s = format (s, " BUG: baseva not in hash table!\n");
340  }
341  else if (p[0] != index)
342  {
343  s = format (s, " BUG: baseva in hash table %d not %d!\n",
344  p[0], index);
345  }
346  index = ch->next;
347  }
348  }
349 
351 
352  return s;
353 }
354 
355 /*
356  * fd.io coding-style-patch-verification: ON
357  *
358  * Local Variables:
359  * eval: (c-set-style "gnu")
360  * End:
361  */
clib_spinlock_init
static void clib_spinlock_init(clib_spinlock_t *p)
Definition: lock.h:65
hash_set
#define hash_set(h, key, value)
Definition: hash.h:255
os_panic
void os_panic(void)
Definition: pnat_test_stubs.h:19
clib_valloc_main_t
Definition: valloc.h:39
clib_spinlock_lock_if_init
static_always_inline void clib_spinlock_lock_if_init(clib_spinlock_t *p)
Definition: lock.h:106
pool_elt_at_index
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
Definition: pool.h:549
clib_valloc_alloc
__clib_export uword clib_valloc_alloc(clib_valloc_main_t *vam, uword size, int os_out_of_memory_on_failure)
Allocate virtual space.
Definition: valloc.c:150
os_out_of_memory
void os_out_of_memory(void)
Definition: unix-misc.c:219
pool_put
#define pool_put(P, E)
Free an object E in pool P.
Definition: pool.h:305
format_valloc
__clib_export u8 * format_valloc(u8 *s, va_list *va)
format a virtual allocation arena (varargs)
Definition: valloc.c:312
clib_valloc_chunk_t::baseva
uword baseva
base VA for this chunk
Definition: valloc.h:32
hash_unset
#define hash_unset(h, key)
Definition: hash.h:261
clib_valloc_chunk_t::size
uword size
size in bytes of this chunk
Definition: valloc.h:33
clib_valloc_chunk_t::prev
u32 prev
previous chunk pool index
Definition: valloc.h:31
clib_valloc_add_chunk
__clib_export void clib_valloc_add_chunk(clib_valloc_main_t *vam, clib_valloc_chunk_t *template)
Add a chunk of memory to a virtual allocation arena.
Definition: valloc.c:33
hash_get
#define hash_get(h, key)
Definition: hash.h:249
clib_valloc_chunk_t::next
u32 next
next chunk pool index
Definition: valloc.h:30
uword
u64 uword
Definition: types.h:112
clib_valloc_main_t::first_index
u32 first_index
pool index of first chunk in list
Definition: valloc.h:45
pool_get
#define pool_get(P, E)
Allocate an object E from a pool P (unspecified alignment).
Definition: pool.h:255
next_chunk
#define next_chunk(p)
Definition: dlmalloc.c:859
valloc.h
Simple first-fit virtual space allocator.
clib_valloc_free
__clib_export uword clib_valloc_free(clib_valloc_main_t *vam, uword baseva)
Free virtual space.
Definition: valloc.c:227
size
u32 size
Definition: vhost_user.h:125
index
u32 index
Definition: flow_types.api:221
format
description fragment has unexpected format
Definition: map.api:433
ASSERT
#define ASSERT(truth)
Definition: error_bootstrap.h:69
u32
unsigned int u32
Definition: types.h:88
clib_valloc_chunk_t::flags
uword flags
flags (free/busy)
Definition: valloc.h:34
pool_elts
static uword pool_elts(void *v)
Number of active elements in a pool.
Definition: pool.h:127
clib_valloc_chunk_t
Definition: valloc.h:28
CLIB_VALLOC_BUSY
#define CLIB_VALLOC_BUSY
chunk is in use
Definition: valloc.h:37
clib_valloc_main_t::flags
uword flags
flags
Definition: valloc.h:44
clib_valloc_init
__clib_export void clib_valloc_init(clib_valloc_main_t *vam, clib_valloc_chunk_t *template, int need_lock)
Initialize a virtual memory allocation arena.
Definition: valloc.c:128
clib_memset
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
clib_valloc_main_t::chunks
clib_valloc_chunk_t * chunks
pool of virtual chunks
Definition: valloc.h:41
u8
unsigned char u8
Definition: types.h:56
clib_valloc_main_t::chunk_index_by_baseva
uword * chunk_index_by_baseva
chunk by baseva hash
Definition: valloc.h:42
clib_spinlock_unlock_if_init
static_always_inline void clib_spinlock_unlock_if_init(clib_spinlock_t *p)
Definition: lock.h:129
clib_valloc_main_t::lock
clib_spinlock_t lock
spinlock
Definition: valloc.h:43
hash_create
#define hash_create(elts, value_bytes)
Definition: hash.h:695
CLIB_VALLOC_INITIALIZED
#define CLIB_VALLOC_INITIALIZED
object has been initialized
Definition: valloc.h:48