1*4418919fSjohnjiang /* SPDX-License-Identifier: BSD-3-Clause
2*4418919fSjohnjiang  * Copyright(c) 2019 Intel Corporation
3*4418919fSjohnjiang  */
4*4418919fSjohnjiang 
5*4418919fSjohnjiang #ifndef _RTE_STACK_STD_H_
6*4418919fSjohnjiang #define _RTE_STACK_STD_H_
7*4418919fSjohnjiang 
8*4418919fSjohnjiang #include <rte_branch_prediction.h>
9*4418919fSjohnjiang 
10*4418919fSjohnjiang /**
11*4418919fSjohnjiang  * @internal Push several objects on the stack (MT-safe).
12*4418919fSjohnjiang  *
13*4418919fSjohnjiang  * @param s
14*4418919fSjohnjiang  *   A pointer to the stack structure.
15*4418919fSjohnjiang  * @param obj_table
16*4418919fSjohnjiang  *   A pointer to a table of void * pointers (objects).
17*4418919fSjohnjiang  * @param n
18*4418919fSjohnjiang  *   The number of objects to push on the stack from the obj_table.
19*4418919fSjohnjiang  * @return
20*4418919fSjohnjiang  *   Actual number of objects pushed (either 0 or *n*).
21*4418919fSjohnjiang  */
22*4418919fSjohnjiang __rte_experimental
23*4418919fSjohnjiang static __rte_always_inline unsigned int
24*4418919fSjohnjiang __rte_stack_std_push(struct rte_stack *s, void * const *obj_table,
25*4418919fSjohnjiang 		     unsigned int n)
26*4418919fSjohnjiang {
27*4418919fSjohnjiang 	struct rte_stack_std *stack = &s->stack_std;
28*4418919fSjohnjiang 	unsigned int index;
29*4418919fSjohnjiang 	void **cache_objs;
30*4418919fSjohnjiang 
31*4418919fSjohnjiang 	rte_spinlock_lock(&stack->lock);
32*4418919fSjohnjiang 	cache_objs = &stack->objs[stack->len];
33*4418919fSjohnjiang 
34*4418919fSjohnjiang 	/* Is there sufficient space in the stack? */
35*4418919fSjohnjiang 	if ((stack->len + n) > s->capacity) {
36*4418919fSjohnjiang 		rte_spinlock_unlock(&stack->lock);
37*4418919fSjohnjiang 		return 0;
38*4418919fSjohnjiang 	}
39*4418919fSjohnjiang 
40*4418919fSjohnjiang 	/* Add elements back into the cache */
41*4418919fSjohnjiang 	for (index = 0; index < n; ++index, obj_table++)
42*4418919fSjohnjiang 		cache_objs[index] = *obj_table;
43*4418919fSjohnjiang 
44*4418919fSjohnjiang 	stack->len += n;
45*4418919fSjohnjiang 
46*4418919fSjohnjiang 	rte_spinlock_unlock(&stack->lock);
47*4418919fSjohnjiang 	return n;
48*4418919fSjohnjiang }
49*4418919fSjohnjiang 
50*4418919fSjohnjiang /**
51*4418919fSjohnjiang  * @internal Pop several objects from the stack (MT-safe).
52*4418919fSjohnjiang  *
53*4418919fSjohnjiang  * @param s
54*4418919fSjohnjiang  *   A pointer to the stack structure.
55*4418919fSjohnjiang  * @param obj_table
56*4418919fSjohnjiang  *   A pointer to a table of void * pointers (objects).
57*4418919fSjohnjiang  * @param n
58*4418919fSjohnjiang  *   The number of objects to pull from the stack.
59*4418919fSjohnjiang  * @return
60*4418919fSjohnjiang  *   Actual number of objects popped (either 0 or *n*).
61*4418919fSjohnjiang  */
62*4418919fSjohnjiang __rte_experimental
63*4418919fSjohnjiang static __rte_always_inline unsigned int
64*4418919fSjohnjiang __rte_stack_std_pop(struct rte_stack *s, void **obj_table, unsigned int n)
65*4418919fSjohnjiang {
66*4418919fSjohnjiang 	struct rte_stack_std *stack = &s->stack_std;
67*4418919fSjohnjiang 	unsigned int index, len;
68*4418919fSjohnjiang 	void **cache_objs;
69*4418919fSjohnjiang 
70*4418919fSjohnjiang 	rte_spinlock_lock(&stack->lock);
71*4418919fSjohnjiang 
72*4418919fSjohnjiang 	if (unlikely(n > stack->len)) {
73*4418919fSjohnjiang 		rte_spinlock_unlock(&stack->lock);
74*4418919fSjohnjiang 		return 0;
75*4418919fSjohnjiang 	}
76*4418919fSjohnjiang 
77*4418919fSjohnjiang 	cache_objs = stack->objs;
78*4418919fSjohnjiang 
79*4418919fSjohnjiang 	for (index = 0, len = stack->len - 1; index < n;
80*4418919fSjohnjiang 			++index, len--, obj_table++)
81*4418919fSjohnjiang 		*obj_table = cache_objs[len];
82*4418919fSjohnjiang 
83*4418919fSjohnjiang 	stack->len -= n;
84*4418919fSjohnjiang 	rte_spinlock_unlock(&stack->lock);
85*4418919fSjohnjiang 
86*4418919fSjohnjiang 	return n;
87*4418919fSjohnjiang }
88*4418919fSjohnjiang 
89*4418919fSjohnjiang /**
90*4418919fSjohnjiang  * @internal Return the number of used entries in a stack.
91*4418919fSjohnjiang  *
92*4418919fSjohnjiang  * @param s
93*4418919fSjohnjiang  *   A pointer to the stack structure.
94*4418919fSjohnjiang  * @return
95*4418919fSjohnjiang  *   The number of used entries in the stack.
96*4418919fSjohnjiang  */
97*4418919fSjohnjiang __rte_experimental
98*4418919fSjohnjiang static __rte_always_inline unsigned int
99*4418919fSjohnjiang __rte_stack_std_count(struct rte_stack *s)
100*4418919fSjohnjiang {
101*4418919fSjohnjiang 	return (unsigned int)s->stack_std.len;
102*4418919fSjohnjiang }
103*4418919fSjohnjiang 
104*4418919fSjohnjiang /**
105*4418919fSjohnjiang  * @internal Initialize a standard stack.
106*4418919fSjohnjiang  *
107*4418919fSjohnjiang  * @param s
108*4418919fSjohnjiang  *   A pointer to the stack structure.
109*4418919fSjohnjiang  */
110*4418919fSjohnjiang void
111*4418919fSjohnjiang rte_stack_std_init(struct rte_stack *s);
112*4418919fSjohnjiang 
113*4418919fSjohnjiang /**
114*4418919fSjohnjiang  * @internal Return the memory required for a standard stack.
115*4418919fSjohnjiang  *
116*4418919fSjohnjiang  * @param count
117*4418919fSjohnjiang  *   The size of the stack.
118*4418919fSjohnjiang  * @return
119*4418919fSjohnjiang  *   The bytes to allocate for a standard stack.
120*4418919fSjohnjiang  */
121*4418919fSjohnjiang ssize_t
122*4418919fSjohnjiang rte_stack_std_get_memsize(unsigned int count);
123*4418919fSjohnjiang 
124*4418919fSjohnjiang #endif /* _RTE_STACK_STD_H_ */
125