1*2d9fd380Sjfb8856606 /* SPDX-License-Identifier: BSD-3-Clause
2*2d9fd380Sjfb8856606 * Copyright(c) 2020 Intel Corporation
3*2d9fd380Sjfb8856606 */
4*2d9fd380Sjfb8856606
5*2d9fd380Sjfb8856606 #include "test_ring_stress.h"
6*2d9fd380Sjfb8856606
7*2d9fd380Sjfb8856606 /**
8*2d9fd380Sjfb8856606 * Stress test for ring enqueue/dequeue operations.
9*2d9fd380Sjfb8856606 * Performs the following pattern on each worker:
10*2d9fd380Sjfb8856606 * dequeue/read-write data from the dequeued objects/enqueue.
11*2d9fd380Sjfb8856606 * Serves as both functional and performance test of ring
12*2d9fd380Sjfb8856606 * enqueue/dequeue operations under high contention
13*2d9fd380Sjfb8856606 * (for both over committed and non-over committed scenarios).
14*2d9fd380Sjfb8856606 */
15*2d9fd380Sjfb8856606
16*2d9fd380Sjfb8856606 #define RING_NAME "RING_STRESS"
17*2d9fd380Sjfb8856606 #define BULK_NUM 32
18*2d9fd380Sjfb8856606 #define RING_SIZE (2 * BULK_NUM * RTE_MAX_LCORE)
19*2d9fd380Sjfb8856606
20*2d9fd380Sjfb8856606 enum {
21*2d9fd380Sjfb8856606 WRK_CMD_STOP,
22*2d9fd380Sjfb8856606 WRK_CMD_RUN,
23*2d9fd380Sjfb8856606 };
24*2d9fd380Sjfb8856606
25*2d9fd380Sjfb8856606 static volatile uint32_t wrk_cmd __rte_cache_aligned;
26*2d9fd380Sjfb8856606
27*2d9fd380Sjfb8856606 /* test run-time in seconds */
28*2d9fd380Sjfb8856606 static const uint32_t run_time = 60;
29*2d9fd380Sjfb8856606 static const uint32_t verbose;
30*2d9fd380Sjfb8856606
31*2d9fd380Sjfb8856606 struct lcore_stat {
32*2d9fd380Sjfb8856606 uint64_t nb_cycle;
33*2d9fd380Sjfb8856606 struct {
34*2d9fd380Sjfb8856606 uint64_t nb_call;
35*2d9fd380Sjfb8856606 uint64_t nb_obj;
36*2d9fd380Sjfb8856606 uint64_t nb_cycle;
37*2d9fd380Sjfb8856606 uint64_t max_cycle;
38*2d9fd380Sjfb8856606 uint64_t min_cycle;
39*2d9fd380Sjfb8856606 } op;
40*2d9fd380Sjfb8856606 };
41*2d9fd380Sjfb8856606
42*2d9fd380Sjfb8856606 struct lcore_arg {
43*2d9fd380Sjfb8856606 struct rte_ring *rng;
44*2d9fd380Sjfb8856606 struct lcore_stat stats;
45*2d9fd380Sjfb8856606 } __rte_cache_aligned;
46*2d9fd380Sjfb8856606
47*2d9fd380Sjfb8856606 struct ring_elem {
48*2d9fd380Sjfb8856606 uint32_t cnt[RTE_CACHE_LINE_SIZE / sizeof(uint32_t)];
49*2d9fd380Sjfb8856606 } __rte_cache_aligned;
50*2d9fd380Sjfb8856606
51*2d9fd380Sjfb8856606 /*
52*2d9fd380Sjfb8856606 * redefinable functions
53*2d9fd380Sjfb8856606 */
54*2d9fd380Sjfb8856606 static uint32_t
55*2d9fd380Sjfb8856606 _st_ring_dequeue_bulk(struct rte_ring *r, void **obj, uint32_t n,
56*2d9fd380Sjfb8856606 uint32_t *avail);
57*2d9fd380Sjfb8856606
58*2d9fd380Sjfb8856606 static uint32_t
59*2d9fd380Sjfb8856606 _st_ring_enqueue_bulk(struct rte_ring *r, void * const *obj, uint32_t n,
60*2d9fd380Sjfb8856606 uint32_t *free);
61*2d9fd380Sjfb8856606
62*2d9fd380Sjfb8856606 static int
63*2d9fd380Sjfb8856606 _st_ring_init(struct rte_ring *r, const char *name, uint32_t num);
64*2d9fd380Sjfb8856606
65*2d9fd380Sjfb8856606
66*2d9fd380Sjfb8856606 static void
lcore_stat_update(struct lcore_stat * ls,uint64_t call,uint64_t obj,uint64_t tm,int32_t prcs)67*2d9fd380Sjfb8856606 lcore_stat_update(struct lcore_stat *ls, uint64_t call, uint64_t obj,
68*2d9fd380Sjfb8856606 uint64_t tm, int32_t prcs)
69*2d9fd380Sjfb8856606 {
70*2d9fd380Sjfb8856606 ls->op.nb_call += call;
71*2d9fd380Sjfb8856606 ls->op.nb_obj += obj;
72*2d9fd380Sjfb8856606 ls->op.nb_cycle += tm;
73*2d9fd380Sjfb8856606 if (prcs) {
74*2d9fd380Sjfb8856606 ls->op.max_cycle = RTE_MAX(ls->op.max_cycle, tm);
75*2d9fd380Sjfb8856606 ls->op.min_cycle = RTE_MIN(ls->op.min_cycle, tm);
76*2d9fd380Sjfb8856606 }
77*2d9fd380Sjfb8856606 }
78*2d9fd380Sjfb8856606
79*2d9fd380Sjfb8856606 static void
lcore_op_stat_aggr(struct lcore_stat * ms,const struct lcore_stat * ls)80*2d9fd380Sjfb8856606 lcore_op_stat_aggr(struct lcore_stat *ms, const struct lcore_stat *ls)
81*2d9fd380Sjfb8856606 {
82*2d9fd380Sjfb8856606
83*2d9fd380Sjfb8856606 ms->op.nb_call += ls->op.nb_call;
84*2d9fd380Sjfb8856606 ms->op.nb_obj += ls->op.nb_obj;
85*2d9fd380Sjfb8856606 ms->op.nb_cycle += ls->op.nb_cycle;
86*2d9fd380Sjfb8856606 ms->op.max_cycle = RTE_MAX(ms->op.max_cycle, ls->op.max_cycle);
87*2d9fd380Sjfb8856606 ms->op.min_cycle = RTE_MIN(ms->op.min_cycle, ls->op.min_cycle);
88*2d9fd380Sjfb8856606 }
89*2d9fd380Sjfb8856606
90*2d9fd380Sjfb8856606 static void
lcore_stat_aggr(struct lcore_stat * ms,const struct lcore_stat * ls)91*2d9fd380Sjfb8856606 lcore_stat_aggr(struct lcore_stat *ms, const struct lcore_stat *ls)
92*2d9fd380Sjfb8856606 {
93*2d9fd380Sjfb8856606 ms->nb_cycle = RTE_MAX(ms->nb_cycle, ls->nb_cycle);
94*2d9fd380Sjfb8856606 lcore_op_stat_aggr(ms, ls);
95*2d9fd380Sjfb8856606 }
96*2d9fd380Sjfb8856606
97*2d9fd380Sjfb8856606 static void
lcore_stat_dump(FILE * f,uint32_t lc,const struct lcore_stat * ls)98*2d9fd380Sjfb8856606 lcore_stat_dump(FILE *f, uint32_t lc, const struct lcore_stat *ls)
99*2d9fd380Sjfb8856606 {
100*2d9fd380Sjfb8856606 long double st;
101*2d9fd380Sjfb8856606
102*2d9fd380Sjfb8856606 st = (long double)rte_get_timer_hz() / US_PER_S;
103*2d9fd380Sjfb8856606
104*2d9fd380Sjfb8856606 if (lc == UINT32_MAX)
105*2d9fd380Sjfb8856606 fprintf(f, "%s(AGGREGATE)={\n", __func__);
106*2d9fd380Sjfb8856606 else
107*2d9fd380Sjfb8856606 fprintf(f, "%s(lcore=%u)={\n", __func__, lc);
108*2d9fd380Sjfb8856606
109*2d9fd380Sjfb8856606 fprintf(f, "\tnb_cycle=%" PRIu64 "(%.2Lf usec),\n",
110*2d9fd380Sjfb8856606 ls->nb_cycle, (long double)ls->nb_cycle / st);
111*2d9fd380Sjfb8856606
112*2d9fd380Sjfb8856606 fprintf(f, "\tDEQ+ENQ={\n");
113*2d9fd380Sjfb8856606
114*2d9fd380Sjfb8856606 fprintf(f, "\t\tnb_call=%" PRIu64 ",\n", ls->op.nb_call);
115*2d9fd380Sjfb8856606 fprintf(f, "\t\tnb_obj=%" PRIu64 ",\n", ls->op.nb_obj);
116*2d9fd380Sjfb8856606 fprintf(f, "\t\tnb_cycle=%" PRIu64 ",\n", ls->op.nb_cycle);
117*2d9fd380Sjfb8856606 fprintf(f, "\t\tobj/call(avg): %.2Lf\n",
118*2d9fd380Sjfb8856606 (long double)ls->op.nb_obj / ls->op.nb_call);
119*2d9fd380Sjfb8856606 fprintf(f, "\t\tcycles/obj(avg): %.2Lf\n",
120*2d9fd380Sjfb8856606 (long double)ls->op.nb_cycle / ls->op.nb_obj);
121*2d9fd380Sjfb8856606 fprintf(f, "\t\tcycles/call(avg): %.2Lf\n",
122*2d9fd380Sjfb8856606 (long double)ls->op.nb_cycle / ls->op.nb_call);
123*2d9fd380Sjfb8856606
124*2d9fd380Sjfb8856606 /* if min/max cycles per call stats was collected */
125*2d9fd380Sjfb8856606 if (ls->op.min_cycle != UINT64_MAX) {
126*2d9fd380Sjfb8856606 fprintf(f, "\t\tmax cycles/call=%" PRIu64 "(%.2Lf usec),\n",
127*2d9fd380Sjfb8856606 ls->op.max_cycle,
128*2d9fd380Sjfb8856606 (long double)ls->op.max_cycle / st);
129*2d9fd380Sjfb8856606 fprintf(f, "\t\tmin cycles/call=%" PRIu64 "(%.2Lf usec),\n",
130*2d9fd380Sjfb8856606 ls->op.min_cycle,
131*2d9fd380Sjfb8856606 (long double)ls->op.min_cycle / st);
132*2d9fd380Sjfb8856606 }
133*2d9fd380Sjfb8856606
134*2d9fd380Sjfb8856606 fprintf(f, "\t},\n");
135*2d9fd380Sjfb8856606 fprintf(f, "};\n");
136*2d9fd380Sjfb8856606 }
137*2d9fd380Sjfb8856606
138*2d9fd380Sjfb8856606 static void
fill_ring_elm(struct ring_elem * elm,uint32_t fill)139*2d9fd380Sjfb8856606 fill_ring_elm(struct ring_elem *elm, uint32_t fill)
140*2d9fd380Sjfb8856606 {
141*2d9fd380Sjfb8856606 uint32_t i;
142*2d9fd380Sjfb8856606
143*2d9fd380Sjfb8856606 for (i = 0; i != RTE_DIM(elm->cnt); i++)
144*2d9fd380Sjfb8856606 elm->cnt[i] = fill;
145*2d9fd380Sjfb8856606 }
146*2d9fd380Sjfb8856606
147*2d9fd380Sjfb8856606 static int32_t
check_updt_elem(struct ring_elem * elm[],uint32_t num,const struct ring_elem * check,const struct ring_elem * fill)148*2d9fd380Sjfb8856606 check_updt_elem(struct ring_elem *elm[], uint32_t num,
149*2d9fd380Sjfb8856606 const struct ring_elem *check, const struct ring_elem *fill)
150*2d9fd380Sjfb8856606 {
151*2d9fd380Sjfb8856606 uint32_t i;
152*2d9fd380Sjfb8856606
153*2d9fd380Sjfb8856606 static rte_spinlock_t dump_lock;
154*2d9fd380Sjfb8856606
155*2d9fd380Sjfb8856606 for (i = 0; i != num; i++) {
156*2d9fd380Sjfb8856606 if (memcmp(check, elm[i], sizeof(*check)) != 0) {
157*2d9fd380Sjfb8856606 rte_spinlock_lock(&dump_lock);
158*2d9fd380Sjfb8856606 printf("%s(lc=%u, num=%u) failed at %u-th iter, "
159*2d9fd380Sjfb8856606 "offending object: %p\n",
160*2d9fd380Sjfb8856606 __func__, rte_lcore_id(), num, i, elm[i]);
161*2d9fd380Sjfb8856606 rte_memdump(stdout, "expected", check, sizeof(*check));
162*2d9fd380Sjfb8856606 rte_memdump(stdout, "result", elm[i], sizeof(*elm[i]));
163*2d9fd380Sjfb8856606 rte_spinlock_unlock(&dump_lock);
164*2d9fd380Sjfb8856606 return -EINVAL;
165*2d9fd380Sjfb8856606 }
166*2d9fd380Sjfb8856606 memcpy(elm[i], fill, sizeof(*elm[i]));
167*2d9fd380Sjfb8856606 }
168*2d9fd380Sjfb8856606
169*2d9fd380Sjfb8856606 return 0;
170*2d9fd380Sjfb8856606 }
171*2d9fd380Sjfb8856606
172*2d9fd380Sjfb8856606 static int
check_ring_op(uint32_t exp,uint32_t res,uint32_t lc,const char * fname,const char * opname)173*2d9fd380Sjfb8856606 check_ring_op(uint32_t exp, uint32_t res, uint32_t lc,
174*2d9fd380Sjfb8856606 const char *fname, const char *opname)
175*2d9fd380Sjfb8856606 {
176*2d9fd380Sjfb8856606 if (exp != res) {
177*2d9fd380Sjfb8856606 printf("%s(lc=%u) failure: %s expected: %u, returned %u\n",
178*2d9fd380Sjfb8856606 fname, lc, opname, exp, res);
179*2d9fd380Sjfb8856606 return -ENOSPC;
180*2d9fd380Sjfb8856606 }
181*2d9fd380Sjfb8856606 return 0;
182*2d9fd380Sjfb8856606 }
183*2d9fd380Sjfb8856606
184*2d9fd380Sjfb8856606 static int
test_worker(void * arg,const char * fname,int32_t prcs)185*2d9fd380Sjfb8856606 test_worker(void *arg, const char *fname, int32_t prcs)
186*2d9fd380Sjfb8856606 {
187*2d9fd380Sjfb8856606 int32_t rc;
188*2d9fd380Sjfb8856606 uint32_t lc, n, num;
189*2d9fd380Sjfb8856606 uint64_t cl, tm0, tm1;
190*2d9fd380Sjfb8856606 struct lcore_arg *la;
191*2d9fd380Sjfb8856606 struct ring_elem def_elm, loc_elm;
192*2d9fd380Sjfb8856606 struct ring_elem *obj[2 * BULK_NUM];
193*2d9fd380Sjfb8856606
194*2d9fd380Sjfb8856606 la = arg;
195*2d9fd380Sjfb8856606 lc = rte_lcore_id();
196*2d9fd380Sjfb8856606
197*2d9fd380Sjfb8856606 fill_ring_elm(&def_elm, UINT32_MAX);
198*2d9fd380Sjfb8856606 fill_ring_elm(&loc_elm, lc);
199*2d9fd380Sjfb8856606
200*2d9fd380Sjfb8856606 while (wrk_cmd != WRK_CMD_RUN) {
201*2d9fd380Sjfb8856606 rte_smp_rmb();
202*2d9fd380Sjfb8856606 rte_pause();
203*2d9fd380Sjfb8856606 }
204*2d9fd380Sjfb8856606
205*2d9fd380Sjfb8856606 cl = rte_rdtsc_precise();
206*2d9fd380Sjfb8856606
207*2d9fd380Sjfb8856606 do {
208*2d9fd380Sjfb8856606 /* num in interval [7/8, 11/8] of BULK_NUM */
209*2d9fd380Sjfb8856606 num = 7 * BULK_NUM / 8 + rte_rand() % (BULK_NUM / 2);
210*2d9fd380Sjfb8856606
211*2d9fd380Sjfb8856606 /* reset all pointer values */
212*2d9fd380Sjfb8856606 memset(obj, 0, sizeof(obj));
213*2d9fd380Sjfb8856606
214*2d9fd380Sjfb8856606 /* dequeue num elems */
215*2d9fd380Sjfb8856606 tm0 = (prcs != 0) ? rte_rdtsc_precise() : 0;
216*2d9fd380Sjfb8856606 n = _st_ring_dequeue_bulk(la->rng, (void **)obj, num, NULL);
217*2d9fd380Sjfb8856606 tm0 = (prcs != 0) ? rte_rdtsc_precise() - tm0 : 0;
218*2d9fd380Sjfb8856606
219*2d9fd380Sjfb8856606 /* check return value and objects */
220*2d9fd380Sjfb8856606 rc = check_ring_op(num, n, lc, fname,
221*2d9fd380Sjfb8856606 RTE_STR(_st_ring_dequeue_bulk));
222*2d9fd380Sjfb8856606 if (rc == 0)
223*2d9fd380Sjfb8856606 rc = check_updt_elem(obj, num, &def_elm, &loc_elm);
224*2d9fd380Sjfb8856606 if (rc != 0)
225*2d9fd380Sjfb8856606 break;
226*2d9fd380Sjfb8856606
227*2d9fd380Sjfb8856606 /* enqueue num elems */
228*2d9fd380Sjfb8856606 rte_compiler_barrier();
229*2d9fd380Sjfb8856606 rc = check_updt_elem(obj, num, &loc_elm, &def_elm);
230*2d9fd380Sjfb8856606 if (rc != 0)
231*2d9fd380Sjfb8856606 break;
232*2d9fd380Sjfb8856606
233*2d9fd380Sjfb8856606 tm1 = (prcs != 0) ? rte_rdtsc_precise() : 0;
234*2d9fd380Sjfb8856606 n = _st_ring_enqueue_bulk(la->rng, (void **)obj, num, NULL);
235*2d9fd380Sjfb8856606 tm1 = (prcs != 0) ? rte_rdtsc_precise() - tm1 : 0;
236*2d9fd380Sjfb8856606
237*2d9fd380Sjfb8856606 /* check return value */
238*2d9fd380Sjfb8856606 rc = check_ring_op(num, n, lc, fname,
239*2d9fd380Sjfb8856606 RTE_STR(_st_ring_enqueue_bulk));
240*2d9fd380Sjfb8856606 if (rc != 0)
241*2d9fd380Sjfb8856606 break;
242*2d9fd380Sjfb8856606
243*2d9fd380Sjfb8856606 lcore_stat_update(&la->stats, 1, num, tm0 + tm1, prcs);
244*2d9fd380Sjfb8856606
245*2d9fd380Sjfb8856606 } while (wrk_cmd == WRK_CMD_RUN);
246*2d9fd380Sjfb8856606
247*2d9fd380Sjfb8856606 cl = rte_rdtsc_precise() - cl;
248*2d9fd380Sjfb8856606 if (prcs == 0)
249*2d9fd380Sjfb8856606 lcore_stat_update(&la->stats, 0, 0, cl, 0);
250*2d9fd380Sjfb8856606 la->stats.nb_cycle = cl;
251*2d9fd380Sjfb8856606 return rc;
252*2d9fd380Sjfb8856606 }
253*2d9fd380Sjfb8856606 static int
test_worker_prcs(void * arg)254*2d9fd380Sjfb8856606 test_worker_prcs(void *arg)
255*2d9fd380Sjfb8856606 {
256*2d9fd380Sjfb8856606 return test_worker(arg, __func__, 1);
257*2d9fd380Sjfb8856606 }
258*2d9fd380Sjfb8856606
259*2d9fd380Sjfb8856606 static int
test_worker_avg(void * arg)260*2d9fd380Sjfb8856606 test_worker_avg(void *arg)
261*2d9fd380Sjfb8856606 {
262*2d9fd380Sjfb8856606 return test_worker(arg, __func__, 0);
263*2d9fd380Sjfb8856606 }
264*2d9fd380Sjfb8856606
265*2d9fd380Sjfb8856606 static void
mt1_fini(struct rte_ring * rng,void * data)266*2d9fd380Sjfb8856606 mt1_fini(struct rte_ring *rng, void *data)
267*2d9fd380Sjfb8856606 {
268*2d9fd380Sjfb8856606 rte_free(rng);
269*2d9fd380Sjfb8856606 rte_free(data);
270*2d9fd380Sjfb8856606 }
271*2d9fd380Sjfb8856606
272*2d9fd380Sjfb8856606 static int
mt1_init(struct rte_ring ** rng,void ** data,uint32_t num)273*2d9fd380Sjfb8856606 mt1_init(struct rte_ring **rng, void **data, uint32_t num)
274*2d9fd380Sjfb8856606 {
275*2d9fd380Sjfb8856606 int32_t rc;
276*2d9fd380Sjfb8856606 size_t sz;
277*2d9fd380Sjfb8856606 uint32_t i, nr;
278*2d9fd380Sjfb8856606 struct rte_ring *r;
279*2d9fd380Sjfb8856606 struct ring_elem *elm;
280*2d9fd380Sjfb8856606 void *p;
281*2d9fd380Sjfb8856606
282*2d9fd380Sjfb8856606 *rng = NULL;
283*2d9fd380Sjfb8856606 *data = NULL;
284*2d9fd380Sjfb8856606
285*2d9fd380Sjfb8856606 sz = num * sizeof(*elm);
286*2d9fd380Sjfb8856606 elm = rte_zmalloc(NULL, sz, __alignof__(*elm));
287*2d9fd380Sjfb8856606 if (elm == NULL) {
288*2d9fd380Sjfb8856606 printf("%s: alloc(%zu) for %u elems data failed",
289*2d9fd380Sjfb8856606 __func__, sz, num);
290*2d9fd380Sjfb8856606 return -ENOMEM;
291*2d9fd380Sjfb8856606 }
292*2d9fd380Sjfb8856606
293*2d9fd380Sjfb8856606 *data = elm;
294*2d9fd380Sjfb8856606
295*2d9fd380Sjfb8856606 /* alloc ring */
296*2d9fd380Sjfb8856606 nr = 2 * num;
297*2d9fd380Sjfb8856606 sz = rte_ring_get_memsize(nr);
298*2d9fd380Sjfb8856606 r = rte_zmalloc(NULL, sz, __alignof__(*r));
299*2d9fd380Sjfb8856606 if (r == NULL) {
300*2d9fd380Sjfb8856606 printf("%s: alloc(%zu) for FIFO with %u elems failed",
301*2d9fd380Sjfb8856606 __func__, sz, nr);
302*2d9fd380Sjfb8856606 return -ENOMEM;
303*2d9fd380Sjfb8856606 }
304*2d9fd380Sjfb8856606
305*2d9fd380Sjfb8856606 *rng = r;
306*2d9fd380Sjfb8856606
307*2d9fd380Sjfb8856606 rc = _st_ring_init(r, RING_NAME, nr);
308*2d9fd380Sjfb8856606 if (rc != 0) {
309*2d9fd380Sjfb8856606 printf("%s: _st_ring_init(%p, %u) failed, error: %d(%s)\n",
310*2d9fd380Sjfb8856606 __func__, r, nr, rc, strerror(-rc));
311*2d9fd380Sjfb8856606 return rc;
312*2d9fd380Sjfb8856606 }
313*2d9fd380Sjfb8856606
314*2d9fd380Sjfb8856606 for (i = 0; i != num; i++) {
315*2d9fd380Sjfb8856606 fill_ring_elm(elm + i, UINT32_MAX);
316*2d9fd380Sjfb8856606 p = elm + i;
317*2d9fd380Sjfb8856606 if (_st_ring_enqueue_bulk(r, &p, 1, NULL) != 1)
318*2d9fd380Sjfb8856606 break;
319*2d9fd380Sjfb8856606 }
320*2d9fd380Sjfb8856606
321*2d9fd380Sjfb8856606 if (i != num) {
322*2d9fd380Sjfb8856606 printf("%s: _st_ring_enqueue(%p, %u) returned %u\n",
323*2d9fd380Sjfb8856606 __func__, r, num, i);
324*2d9fd380Sjfb8856606 return -ENOSPC;
325*2d9fd380Sjfb8856606 }
326*2d9fd380Sjfb8856606
327*2d9fd380Sjfb8856606 return 0;
328*2d9fd380Sjfb8856606 }
329*2d9fd380Sjfb8856606
330*2d9fd380Sjfb8856606 static int
test_mt1(int (* test)(void *))331*2d9fd380Sjfb8856606 test_mt1(int (*test)(void *))
332*2d9fd380Sjfb8856606 {
333*2d9fd380Sjfb8856606 int32_t rc;
334*2d9fd380Sjfb8856606 uint32_t lc, mc;
335*2d9fd380Sjfb8856606 struct rte_ring *r;
336*2d9fd380Sjfb8856606 void *data;
337*2d9fd380Sjfb8856606 struct lcore_arg arg[RTE_MAX_LCORE];
338*2d9fd380Sjfb8856606
339*2d9fd380Sjfb8856606 static const struct lcore_stat init_stat = {
340*2d9fd380Sjfb8856606 .op.min_cycle = UINT64_MAX,
341*2d9fd380Sjfb8856606 };
342*2d9fd380Sjfb8856606
343*2d9fd380Sjfb8856606 rc = mt1_init(&r, &data, RING_SIZE);
344*2d9fd380Sjfb8856606 if (rc != 0) {
345*2d9fd380Sjfb8856606 mt1_fini(r, data);
346*2d9fd380Sjfb8856606 return rc;
347*2d9fd380Sjfb8856606 }
348*2d9fd380Sjfb8856606
349*2d9fd380Sjfb8856606 memset(arg, 0, sizeof(arg));
350*2d9fd380Sjfb8856606
351*2d9fd380Sjfb8856606 /* launch on all workers */
352*2d9fd380Sjfb8856606 RTE_LCORE_FOREACH_WORKER(lc) {
353*2d9fd380Sjfb8856606 arg[lc].rng = r;
354*2d9fd380Sjfb8856606 arg[lc].stats = init_stat;
355*2d9fd380Sjfb8856606 rte_eal_remote_launch(test, &arg[lc], lc);
356*2d9fd380Sjfb8856606 }
357*2d9fd380Sjfb8856606
358*2d9fd380Sjfb8856606 /* signal worker to start test */
359*2d9fd380Sjfb8856606 wrk_cmd = WRK_CMD_RUN;
360*2d9fd380Sjfb8856606 rte_smp_wmb();
361*2d9fd380Sjfb8856606
362*2d9fd380Sjfb8856606 usleep(run_time * US_PER_S);
363*2d9fd380Sjfb8856606
364*2d9fd380Sjfb8856606 /* signal worker to start test */
365*2d9fd380Sjfb8856606 wrk_cmd = WRK_CMD_STOP;
366*2d9fd380Sjfb8856606 rte_smp_wmb();
367*2d9fd380Sjfb8856606
368*2d9fd380Sjfb8856606 /* wait for workers and collect stats. */
369*2d9fd380Sjfb8856606 mc = rte_lcore_id();
370*2d9fd380Sjfb8856606 arg[mc].stats = init_stat;
371*2d9fd380Sjfb8856606
372*2d9fd380Sjfb8856606 rc = 0;
373*2d9fd380Sjfb8856606 RTE_LCORE_FOREACH_WORKER(lc) {
374*2d9fd380Sjfb8856606 rc |= rte_eal_wait_lcore(lc);
375*2d9fd380Sjfb8856606 lcore_stat_aggr(&arg[mc].stats, &arg[lc].stats);
376*2d9fd380Sjfb8856606 if (verbose != 0)
377*2d9fd380Sjfb8856606 lcore_stat_dump(stdout, lc, &arg[lc].stats);
378*2d9fd380Sjfb8856606 }
379*2d9fd380Sjfb8856606
380*2d9fd380Sjfb8856606 lcore_stat_dump(stdout, UINT32_MAX, &arg[mc].stats);
381*2d9fd380Sjfb8856606 mt1_fini(r, data);
382*2d9fd380Sjfb8856606 return rc;
383*2d9fd380Sjfb8856606 }
384*2d9fd380Sjfb8856606
385*2d9fd380Sjfb8856606 static const struct test_case tests[] = {
386*2d9fd380Sjfb8856606 {
387*2d9fd380Sjfb8856606 .name = "MT-WRK_ENQ_DEQ-MST_NONE-PRCS",
388*2d9fd380Sjfb8856606 .func = test_mt1,
389*2d9fd380Sjfb8856606 .wfunc = test_worker_prcs,
390*2d9fd380Sjfb8856606 },
391*2d9fd380Sjfb8856606 {
392*2d9fd380Sjfb8856606 .name = "MT-WRK_ENQ_DEQ-MST_NONE-AVG",
393*2d9fd380Sjfb8856606 .func = test_mt1,
394*2d9fd380Sjfb8856606 .wfunc = test_worker_avg,
395*2d9fd380Sjfb8856606 },
396*2d9fd380Sjfb8856606 };
397