1*4418919fSjohnjiang /* SPDX-License-Identifier: BSD-3-Clause
2*4418919fSjohnjiang  * Copyright(c) 2019 Intel Corporation
3*4418919fSjohnjiang  */
4*4418919fSjohnjiang 
5*4418919fSjohnjiang #ifndef _RTE_STACK_STD_H_
6*4418919fSjohnjiang #define _RTE_STACK_STD_H_
7*4418919fSjohnjiang 
8*4418919fSjohnjiang #include <rte_branch_prediction.h>
9*4418919fSjohnjiang 
10*4418919fSjohnjiang /**
11*4418919fSjohnjiang  * @internal Push several objects on the stack (MT-safe).
12*4418919fSjohnjiang  *
13*4418919fSjohnjiang  * @param s
14*4418919fSjohnjiang  *   A pointer to the stack structure.
15*4418919fSjohnjiang  * @param obj_table
16*4418919fSjohnjiang  *   A pointer to a table of void * pointers (objects).
17*4418919fSjohnjiang  * @param n
18*4418919fSjohnjiang  *   The number of objects to push on the stack from the obj_table.
19*4418919fSjohnjiang  * @return
20*4418919fSjohnjiang  *   Actual number of objects pushed (either 0 or *n*).
21*4418919fSjohnjiang  */
22*4418919fSjohnjiang static __rte_always_inline unsigned int
__rte_stack_std_push(struct rte_stack * s,void * const * obj_table,unsigned int n)23*4418919fSjohnjiang __rte_stack_std_push(struct rte_stack *s, void * const *obj_table,
24*4418919fSjohnjiang 		     unsigned int n)
25*4418919fSjohnjiang {
26*4418919fSjohnjiang 	struct rte_stack_std *stack = &s->stack_std;
27*4418919fSjohnjiang 	unsigned int index;
28*4418919fSjohnjiang 	void **cache_objs;
29*4418919fSjohnjiang 
30*4418919fSjohnjiang 	rte_spinlock_lock(&stack->lock);
31*4418919fSjohnjiang 	cache_objs = &stack->objs[stack->len];
32*4418919fSjohnjiang 
33*4418919fSjohnjiang 	/* Is there sufficient space in the stack? */
34*4418919fSjohnjiang 	if ((stack->len + n) > s->capacity) {
35*4418919fSjohnjiang 		rte_spinlock_unlock(&stack->lock);
36*4418919fSjohnjiang 		return 0;
37*4418919fSjohnjiang 	}
38*4418919fSjohnjiang 
39*4418919fSjohnjiang 	/* Add elements back into the cache */
40*4418919fSjohnjiang 	for (index = 0; index < n; ++index, obj_table++)
41*4418919fSjohnjiang 		cache_objs[index] = *obj_table;
42*4418919fSjohnjiang 
43*4418919fSjohnjiang 	stack->len += n;
44*4418919fSjohnjiang 
45*4418919fSjohnjiang 	rte_spinlock_unlock(&stack->lock);
46*4418919fSjohnjiang 	return n;
47*4418919fSjohnjiang }
48*4418919fSjohnjiang 
49*4418919fSjohnjiang /**
50*4418919fSjohnjiang  * @internal Pop several objects from the stack (MT-safe).
51*4418919fSjohnjiang  *
52*4418919fSjohnjiang  * @param s
53*4418919fSjohnjiang  *   A pointer to the stack structure.
54*4418919fSjohnjiang  * @param obj_table
55*4418919fSjohnjiang  *   A pointer to a table of void * pointers (objects).
56*4418919fSjohnjiang  * @param n
57*4418919fSjohnjiang  *   The number of objects to pull from the stack.
58*4418919fSjohnjiang  * @return
59*4418919fSjohnjiang  *   Actual number of objects popped (either 0 or *n*).
60*4418919fSjohnjiang  */
61*4418919fSjohnjiang static __rte_always_inline unsigned int
__rte_stack_std_pop(struct rte_stack * s,void ** obj_table,unsigned int n)62*4418919fSjohnjiang __rte_stack_std_pop(struct rte_stack *s, void **obj_table, unsigned int n)
63*4418919fSjohnjiang {
64*4418919fSjohnjiang 	struct rte_stack_std *stack = &s->stack_std;
65*4418919fSjohnjiang 	unsigned int index, len;
66*4418919fSjohnjiang 	void **cache_objs;
67*4418919fSjohnjiang 
68*4418919fSjohnjiang 	rte_spinlock_lock(&stack->lock);
69*4418919fSjohnjiang 
70*4418919fSjohnjiang 	if (unlikely(n > stack->len)) {
71*4418919fSjohnjiang 		rte_spinlock_unlock(&stack->lock);
72*4418919fSjohnjiang 		return 0;
73*4418919fSjohnjiang 	}
74*4418919fSjohnjiang 
75*4418919fSjohnjiang 	cache_objs = stack->objs;
76*4418919fSjohnjiang 
77*4418919fSjohnjiang 	for (index = 0, len = stack->len - 1; index < n;
78*4418919fSjohnjiang 			++index, len--, obj_table++)
79*4418919fSjohnjiang 		*obj_table = cache_objs[len];
80*4418919fSjohnjiang 
81*4418919fSjohnjiang 	stack->len -= n;
82*4418919fSjohnjiang 	rte_spinlock_unlock(&stack->lock);
83*4418919fSjohnjiang 
84*4418919fSjohnjiang 	return n;
85*4418919fSjohnjiang }
86*4418919fSjohnjiang 
87*4418919fSjohnjiang /**
88*4418919fSjohnjiang  * @internal Return the number of used entries in a stack.
89*4418919fSjohnjiang  *
90*4418919fSjohnjiang  * @param s
91*4418919fSjohnjiang  *   A pointer to the stack structure.
92*4418919fSjohnjiang  * @return
93*4418919fSjohnjiang  *   The number of used entries in the stack.
94*4418919fSjohnjiang  */
95*4418919fSjohnjiang static __rte_always_inline unsigned int
__rte_stack_std_count(struct rte_stack * s)96*4418919fSjohnjiang __rte_stack_std_count(struct rte_stack *s)
97*4418919fSjohnjiang {
98*4418919fSjohnjiang 	return (unsigned int)s->stack_std.len;
99*4418919fSjohnjiang }
100*4418919fSjohnjiang 
101*4418919fSjohnjiang /**
102*4418919fSjohnjiang  * @internal Initialize a standard stack.
103*4418919fSjohnjiang  *
104*4418919fSjohnjiang  * @param s
105*4418919fSjohnjiang  *   A pointer to the stack structure.
106*4418919fSjohnjiang  */
107*4418919fSjohnjiang void
108*4418919fSjohnjiang rte_stack_std_init(struct rte_stack *s);
109*4418919fSjohnjiang 
110*4418919fSjohnjiang /**
111*4418919fSjohnjiang  * @internal Return the memory required for a standard stack.
112*4418919fSjohnjiang  *
113*4418919fSjohnjiang  * @param count
114*4418919fSjohnjiang  *   The size of the stack.
115*4418919fSjohnjiang  * @return
116*4418919fSjohnjiang  *   The bytes to allocate for a standard stack.
117*4418919fSjohnjiang  */
118*4418919fSjohnjiang ssize_t
119*4418919fSjohnjiang rte_stack_std_get_memsize(unsigned int count);
120*4418919fSjohnjiang 
121*4418919fSjohnjiang #endif /* _RTE_STACK_STD_H_ */
122