32 n_chunk = c->
length - tail_idx;
38 while ((to_copy -= n_chunk))
62 n_chunk = c->
length - head_idx;
68 while ((to_copy -= n_chunk))
83 #ifndef CLIB_MARCH_VARIANT 176 next->prev = cur->
prev;
186 f->ooos_list_head = cur->
next;
200 u32 new_index, s_end_pos, s_index;
201 u32 offset_pos, offset_end_pos;
205 offset_pos = (tail +
offset) % f->size;
206 offset_end_pos = (tail + offset + length) % f->size;
213 f->ooos_list_head = s - f->ooo_segments;
214 f->ooos_newest = f->ooos_list_head;
238 s_index = s - f->ooo_segments;
245 new_index = new_s - f->ooo_segments;
253 prev->
next = new_index;
258 f->ooos_list_head = new_index;
261 new_s->
next = s_index;
263 f->ooos_newest = new_index;
267 else if (
position_gt (f, offset_pos, s_end_pos, tail))
270 new_index = new_s - f->ooo_segments;
278 new_s->
prev = s_index;
280 f->ooos_newest = new_index;
292 s->
start = offset_pos;
294 f->ooos_newest = s - f->ooo_segments;
300 if (
position_gt (f, offset_end_pos, s_end_pos, tail))
307 offset_end_pos, tail))
321 f->ooos_newest = s - f->ooo_segments;
332 u32 s_index, bytes = 0;
339 ASSERT (diff != n_bytes_enqueued);
341 if (diff > n_bytes_enqueued)
345 while (0 <= diff && diff < n_bytes_enqueued)
347 s_index = s - f->ooo_segments;
353 *tail = (*tail + bytes) % f->size;
373 ASSERT (bytes <= f->nitems);
399 f->nitems = f->size - 1;
403 f->head = f->tail = f->flags = 0;
404 f->head_chunk = f->tail_chunk = f->ooo_enq = f->ooo_deq = f->start_chunk;
412 if (f->start_chunk->next == f->start_chunk)
419 f->start_chunk->start_byte = 0;
420 prev = f->start_chunk;
423 while (c != f->start_chunk)
438 u32 rounded_data_size;
449 rounded_data_size = (1 << (
max_log2 (data_size_in_bytes)));
460 c->
length = data_size_in_bytes;
461 f->start_chunk = f->end_chunk =
c;
477 rounded_size = (1 << (
max_log2 (size)));
510 while (pos != cur->
key)
554 f->end_chunk->
next =
c;
562 prev->
next = f->start_chunk;
563 f->size += add_bytes;
564 f->nitems = f->size - 1;
571 if (new_head > f->tail)
587 ASSERT (f->start_chunk->next == f->start_chunk);
600 prev = f->new_chunks;
627 if (!f->new_chunks && f->head_chunk != f->tail_chunk)
629 u32 head = 0, tail = 0;
634 prev = f->tail_chunk;
637 while (cur != f->start_chunk)
645 f->tail_chunk->
next =
c;
660 while (cur != f->start_chunk)
669 f->size += add_bytes;
670 f->nitems = f->size - 1;
686 prev = f->new_chunks;
721 list = f->new_chunks;
736 u32 len_to_shrink = 0, tail_pos,
len, last_pos;
746 if (f->size_decrement)
752 f->size_decrement -=
len;
759 if (tail_pos >= head && tail_pos < f->end_chunk->start_byte)
764 last_pos = tail_pos > 0 ? tail_pos - 1 : tail_pos;
777 while (next != f->start_chunk)
781 len_to_shrink += cur->
length;
785 f->size -= len_to_shrink;
787 prev->
next = f->start_chunk;
789 cur->
next = f->new_chunks;
790 f->new_chunks = start;
794 if (!f->size_decrement && f->size == f->nitems + 1)
798 if (f->start_chunk == f->start_chunk->next)
814 if (len >= f->size || f->size > f->nitems + 1
823 while (cur != f->start_chunk)
825 actual_len += cur->
length;
829 ASSERT (actual_len <= len);
833 f->size_decrement = actual_len;
857 if (--f->refcnt == 0)
869 u32 head, tail, head_idx;
872 ASSERT (len <= f->nitems);
877 n_chunk = c->
length - head_idx;
890 u32 tail, head, free_count;
905 tail = (tail +
len) % f->size;
933 u32 tail, head, free_count, tail_idx;
944 if ((len + offset) > free_count)
950 tail_idx = (tail +
offset) % f->size;
971 tail = (tail +
len) % f->size;
983 u32 tail, head, cursize;
995 head = (head +
len) % f->size;
1009 u32 tail, head, cursize, head_idx;
1019 len =
clib_min (cursize - offset, len);
1020 head_idx = (head +
offset) % f->size;
1031 u32 total_drop_bytes, tail, head, cursize;
1041 total_drop_bytes =
clib_min (cursize, len);
1046 head = (head + total_drop_bytes) % f->size;
1057 return total_drop_bytes;
1079 u32 cursize, head, tail, head_idx;
1093 fs[0].
len = f->size - head_idx;
1094 fs[0].
data = f->head_chunk->data + head_idx;
1095 fs[1].
len = cursize - fs[0].
len;
1096 fs[1].
data = f->head_chunk->data;
1100 fs[0].
len = cursize;
1101 fs[0].
data = f->head_chunk->data + head_idx;
1116 ASSERT (fs[0].
data == f->head_chunk->data + head);
1117 head = (head + fs[0].
len + fs[1].
len) % f->size;
1158 head = head % f->size;
1159 tail = tail % f->size;
1167 f->head_chunk = f->ooo_deq =
c;
1170 f->tail_chunk = f->ooo_enq =
c;
1179 f->subscribers[f->n_subscribers++] = subscriber;
1187 for (i = 0; i < f->n_subscribers; i++)
1189 if (f->subscribers[i] != subscriber)
1191 f->subscribers[
i] = f->subscribers[f->n_subscribers - 1];
1207 if (f->start_chunk->next != f->start_chunk)
1227 while (c != f->start_chunk);
1229 if (size != f->size)
1260 u32 normalized_start = (seg->
start + f->nitems - f->tail) % f->size;
1261 s =
format (s,
"[%u, %u], len %u, next %d, prev %d", normalized_start,
1262 (normalized_start + seg->
length) % f->size, seg->
length,
1312 clib_memset (f->head_chunk->data, 0xFF, f->nitems);
1314 for (i = 0; i <
vec_len (data); i++)
1317 for (i = 0; i < trace_len; i++)
1320 if (trace[i].action == 1)
1323 s =
format (s,
"adding [%u, %u]:", trace[i].offset,
1324 (trace[i].offset + trace[i].
len) % dummy_fifo->size);
1326 trace[i].len, &data[offset]);
1328 else if (trace[i].action == 2)
1331 s =
format (s,
"adding [%u, %u]:", 0, trace[i].
len);
1337 s =
format (s,
"read: %u", trace[i].
len);
1353 u32 indent = va_arg (*args,
u32);
1354 u32 ooo_segment_index = f->ooos_list_head;
1362 ooo_segment_index = seg->
next;
1372 int verbose = va_arg (*args,
int);
1379 s =
format (s,
"cursize %u nitems %u has_event %d\n",
1382 indent, (f->head % f->size), (f->tail % f->size),
1383 f->segment_manager);
1386 s =
format (s,
"%Uvpp session %d thread %d app session %d thread %d\n",
1388 f->master_thread_index, f->client_session_index,
1389 f->client_thread_index);
1393 s =
format (s,
"%Uooo pool %d active elts newest %u\n",
u32 length
length of chunk in bytes
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
static void svm_fifo_copy_to_chunk(svm_fifo_t *f, svm_fifo_chunk_t *c, u32 tail_idx, const u8 *src, u32 len, svm_fifo_chunk_t **last)
rb_node_t * rb_tree_predecessor(rb_tree_t *rt, rb_node_t *x)
static void f_load_head_tail_all_acq(svm_fifo_t *f, u32 *head, u32 *tail)
Load head and tail independent of producer/consumer role.
static vlib_cli_command_t trace
(constructor) VLIB_CLI_COMMAND (trace)
svm_fifo_chunk_t * svm_fifo_collect_chunks(svm_fifo_t *f)
Removes chunks that are after fifo end byte.
static int ooo_segment_try_collect(svm_fifo_t *f, u32 n_bytes_enqueued, u32 *tail)
Removes segments that can now be enqueued because the fifo's tail has advanced.
int svm_fifo_segments(svm_fifo_t *f, svm_fifo_seg_t *fs)
static u32 svm_fifo_max_enqueue_prod(svm_fifo_t *f)
Maximum number of bytes that can be enqueued into fifo.
static ooo_segment_t * ooo_segment_next(svm_fifo_t *f, ooo_segment_t *s)
static rb_node_t * rb_node_left(rb_tree_t *rt, rb_node_t *n)
void svm_fifo_free_chunk_lookup(svm_fifo_t *f)
Cleanup fifo chunk lookup rb tree.
static u8 svm_fifo_has_ooo_data(svm_fifo_t *f)
Check if fifo has out-of-order data.
void svm_fifo_init_pointers(svm_fifo_t *f, u32 head, u32 tail)
Set fifo pointers to requested offset.
static u32 f_free_count(svm_fifo_t *f, u32 head, u32 tail)
Fifo free bytes, i.e., number of free bytes.
void svm_fifo_segments_free(svm_fifo_t *f, svm_fifo_seg_t *fs)
static u8 position_leq(svm_fifo_t *f, u32 a, u32 b, u32 tail)
void svm_fifo_free(svm_fifo_t *f)
Free fifo and associated state.
#define clib_memcpy_fast(a, b, c)
u32 prev
Previous linked-list element pool index.
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
static void f_load_head_tail_cons(svm_fifo_t *f, u32 *head, u32 *tail)
Load head and tail optimized for consumer.
static u8 position_gt(svm_fifo_t *f, u32 a, u32 b, u32 tail)
static void svm_fifo_copy_from_chunk(svm_fifo_t *f, svm_fifo_chunk_t *c, u32 head_idx, u8 *dst, u32 len, svm_fifo_chunk_t **last)
#define CLIB_MARCH_FN_SELECT(fn)
static heap_elt_t * last(heap_header_t *h)
int svm_fifo_peek(svm_fifo_t *f, u32 offset, u32 len, u8 *dst)
Peek data from fifo.
void svm_fifo_enqueue_nocopy(svm_fifo_t *f, u32 len)
Advance tail.
void svm_fifo_init(svm_fifo_t *f, u32 size)
Initialize fifo.
static rb_node_t * rb_node(rb_tree_t *rt, rb_node_index_t ri)
ooo_segment_t * svm_fifo_first_ooo_segment(svm_fifo_t *f)
First out-of-order segment for fifo.
void svm_fifo_dequeue_drop_all(svm_fifo_t *f)
Dequeue and drop all bytes from fifo.
static ooo_segment_t * ooo_segment_last(svm_fifo_t *f)
static rb_node_t * rb_node_right(rb_tree_t *rt, rb_node_t *n)
void svm_fifo_overwrite_head(svm_fifo_t *f, u8 *src, u32 len)
Overwrite fifo head with new data.
#define SVM_FIFO_INVALID_INDEX
void rb_tree_free_nodes(rb_tree_t *rt)
#define pool_get(P, E)
Allocate an object E from a pool P (unspecified alignment).
rb_node_index_t rb_tree_add2(rb_tree_t *rt, u32 key, uword opaque)
u8 svm_fifo_set_single_thread_owned(svm_fifo_t *f)
Declare this fifo is used by only a single thread.
struct _svm_fifo svm_fifo_t
void svm_fifo_init_chunks(svm_fifo_t *f)
Initialize fifo chunks and rbtree.
static ooo_segment_t * ooo_segment_alloc(svm_fifo_t *f, u32 start, u32 length)
static svm_fifo_chunk_t * svm_fifo_find_chunk(svm_fifo_t *f, u32 pos)
Find chunk for given byte position.
void svm_fifo_clone(svm_fifo_t *df, svm_fifo_t *sf)
Clones fifo.
static u32 svm_fifo_max_dequeue(svm_fifo_t *f)
Fifo max bytes to dequeue.
svm_fifo_chunk_t * svm_fifo_chunk_alloc(u32 size)
Creates a fifo chunk in the current heap.
u8 * format_ooo_list(u8 *s, va_list *args)
void svm_fifo_free_ooo_data(svm_fifo_t *f)
Cleanup fifo ooo data.
static void ooo_segment_free(svm_fifo_t *f, u32 index)
int svm_fifo_dequeue(svm_fifo_t *f, u32 len, u8 *dst)
Dequeue data from fifo.
void rb_tree_del(rb_tree_t *rt, u32 key)
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
uword opaque
value stored by node
static ooo_segment_t * ooo_segment_prev(svm_fifo_t *f, ooo_segment_t *s)
struct svm_fifo_chunk_ * next
pointer to next chunk in linked-lists
static void svm_fifo_grow(svm_fifo_t *f, svm_fifo_chunk_t *c)
#define pool_put(P, E)
Free an object E in pool P.
int svm_fifo_enqueue(svm_fifo_t *f, u32 len, const u8 *src)
Enqueue data to fifo.
void rb_tree_init(rb_tree_t *rt)
#define svm_fifo_trace_add(_f, _s, _l, _t)
u8 * svm_fifo_replay(u8 *s, svm_fifo_t *f, u8 no_read, u8 verbose)
CLIB_MARCH_FN(svm_fifo_copy_to_chunk, void, svm_fifo_t *f, svm_fifo_chunk_t *c, u32 tail_idx, const u8 *src, u32 len, svm_fifo_chunk_t **last)
void svm_fifo_add_subscriber(svm_fifo_t *f, u8 subscriber)
Add io events subscriber to list.
#define pool_free(p)
Free a pool.
#define SVM_FIFO_MAX_EVT_SUBSCRIBERS
static u32 f_cursize(svm_fifo_t *f, u32 head, u32 tail)
Fifo current size, i.e., number of bytes enqueued.
static u32 position_diff(svm_fifo_t *f, u32 a, u32 b, u32 tail)
int svm_fifo_reduce_size(svm_fifo_t *f, u32 len, u8 try_shrink)
Request to reduce fifo size by amount of bytes.
void svm_fifo_add_chunk(svm_fifo_t *f, svm_fifo_chunk_t *c)
Grow fifo size by adding chunk to chunk list.
static void f_load_head_tail_prod(svm_fifo_t *f, u32 *head, u32 *tail)
Load head and tail optimized for producer.
u32 start_byte
chunk start byte
u8 svm_fifo_is_sane(svm_fifo_t *f)
Check if fifo is sane.
#define OOO_SEGMENT_INVALID_INDEX
u8 * format_ooo_segment(u8 *s, va_list *args)
u32 svm_fifo_n_ooo_segments(svm_fifo_t *f)
Number of out-of-order segments for fifo.
static void * clib_mem_alloc_aligned_or_null(uword size, uword align)
#define uword_to_pointer(u, type)
u8 * format_svm_fifo(u8 *s, va_list *args)
static void clib_mem_free(void *p)
u8 data[0]
start of chunk data
void svm_fifo_del_subscriber(svm_fifo_t *f, u8 subscriber)
Remove io events subscriber form list.
int svm_fifo_enqueue_with_offset(svm_fifo_t *f, u32 offset, u32 len, u8 *src)
Enqueue a future segment.
static u8 svm_fifo_is_wrapped(svm_fifo_t *f)
Check if fifo is wrapped.
static uword pointer_to_uword(const void *p)
static u8 svm_fifo_chunk_includes_pos(svm_fifo_chunk_t *c, u32 pos)
#define clib_atomic_store_rel_n(a, b)
static u32 f_distance_to(svm_fifo_t *f, u32 a, u32 b)
Distance to a from b, i.e., a - b in the fifo.
u32 length
Length of segment.
u8 * svm_fifo_dump_trace(u8 *s, svm_fifo_t *f)
u32 next
Next linked-list element pool index.
template key/value backing page structure
static u8 rb_node_is_tnil(rb_tree_t *rt, rb_node_t *n)
static u32 ooo_segment_end_pos(svm_fifo_t *f, ooo_segment_t *s)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
static uword max_log2(uword x)
static void svm_fifo_try_grow(svm_fifo_t *f, u32 new_head)
static u32 f_distance_from(svm_fifo_t *f, u32 a, u32 b)
Distance from a to b, i.e., b - a in the fifo.
static_always_inline uword os_get_thread_index(void)
void svm_fifo_try_shrink(svm_fifo_t *f, u32 head, u32 tail)
Try to shrink fifo size.
struct clib_bihash_value offset
template key/value backing page structure
#define vec_foreach(var, vec)
Vector iterator.
static u8 position_lt(svm_fifo_t *f, u32 a, u32 b, u32 tail)
#define CLIB_CACHE_LINE_BYTES
#define clib_atomic_load_acq_n(a)
int svm_fifo_dequeue_drop(svm_fifo_t *f, u32 len)
Dequeue and drop bytes from fifo.
rb_node_index_t root
root index
u32 start
Start of segment, normalized.
static void ooo_segment_add(svm_fifo_t *f, u32 offset, u32 head, u32 tail, u32 length)
Add segment to fifo's out-of-order segment list.
svm_fifo_t * svm_fifo_create(u32 data_size_in_bytes)
Creates a fifo in the current heap.
static uword pool_elts(void *v)
Number of active elements in a pool.