net: add rte prefix to GRE structure
[dpdk.git] / lib / librte_stack / rte_stack_lf_generic.h
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2019 Intel Corporation
3  */
4
5 #ifndef _RTE_STACK_LF_GENERIC_H_
6 #define _RTE_STACK_LF_GENERIC_H_
7
8 #include <rte_branch_prediction.h>
9 #include <rte_prefetch.h>
10
11 static __rte_always_inline unsigned int
12 __rte_stack_lf_count(struct rte_stack *s)
13 {
14         /* stack_lf_push() and stack_lf_pop() do not update the list's contents
15          * and stack_lf->len atomically, which can cause the list to appear
16          * shorter than it actually is if this function is called while other
17          * threads are modifying the list.
18          *
19          * However, given the inherently approximate nature of the get_count
20          * callback -- even if the list and its size were updated atomically,
21          * the size could change between when get_count executes and when the
22          * value is returned to the caller -- this is acceptable.
23          *
24          * The stack_lf->len updates are placed such that the list may appear to
25          * have fewer elements than it does, but will never appear to have more
26          * elements. If the mempool is near-empty to the point that this is a
27          * concern, the user should consider increasing the mempool size.
28          */
29         return (unsigned int)rte_atomic64_read(&s->stack_lf.used.len);
30 }
31
32 static __rte_always_inline void
33 __rte_stack_lf_push_elems(struct rte_stack_lf_list *list,
34                           struct rte_stack_lf_elem *first,
35                           struct rte_stack_lf_elem *last,
36                           unsigned int num)
37 {
38 #ifndef RTE_ARCH_X86_64
39         RTE_SET_USED(first);
40         RTE_SET_USED(last);
41         RTE_SET_USED(list);
42         RTE_SET_USED(num);
43 #else
44         struct rte_stack_lf_head old_head;
45         int success;
46
47         old_head = list->head;
48
49         do {
50                 struct rte_stack_lf_head new_head;
51
52                 /* An acquire fence (or stronger) is needed for weak memory
53                  * models to establish a synchronized-with relationship between
54                  * the list->head load and store-release operations (as part of
55                  * the rte_atomic128_cmp_exchange()).
56                  */
57                 rte_smp_mb();
58
59                 /* Swing the top pointer to the first element in the list and
60                  * make the last element point to the old top.
61                  */
62                 new_head.top = first;
63                 new_head.cnt = old_head.cnt + 1;
64
65                 last->next = old_head.top;
66
67                 /* old_head is updated on failure */
68                 success = rte_atomic128_cmp_exchange(
69                                 (rte_int128_t *)&list->head,
70                                 (rte_int128_t *)&old_head,
71                                 (rte_int128_t *)&new_head,
72                                 1, __ATOMIC_RELEASE,
73                                 __ATOMIC_RELAXED);
74         } while (success == 0);
75
76         rte_atomic64_add(&list->len, num);
77 #endif
78 }
79
80 static __rte_always_inline struct rte_stack_lf_elem *
81 __rte_stack_lf_pop_elems(struct rte_stack_lf_list *list,
82                          unsigned int num,
83                          void **obj_table,
84                          struct rte_stack_lf_elem **last)
85 {
86 #ifndef RTE_ARCH_X86_64
87         RTE_SET_USED(obj_table);
88         RTE_SET_USED(last);
89         RTE_SET_USED(list);
90         RTE_SET_USED(num);
91
92         return NULL;
93 #else
94         struct rte_stack_lf_head old_head;
95         int success;
96
97         /* Reserve num elements, if available */
98         while (1) {
99                 uint64_t len = rte_atomic64_read(&list->len);
100
101                 /* Does the list contain enough elements? */
102                 if (unlikely(len < num))
103                         return NULL;
104
105                 if (rte_atomic64_cmpset((volatile uint64_t *)&list->len,
106                                         len, len - num))
107                         break;
108         }
109
110         old_head = list->head;
111
112         /* Pop num elements */
113         do {
114                 struct rte_stack_lf_head new_head;
115                 struct rte_stack_lf_elem *tmp;
116                 unsigned int i;
117
118                 /* An acquire fence (or stronger) is needed for weak memory
119                  * models to ensure the LF LIFO element reads are properly
120                  * ordered with respect to the head pointer read.
121                  */
122                 rte_smp_mb();
123
124                 rte_prefetch0(old_head.top);
125
126                 tmp = old_head.top;
127
128                 /* Traverse the list to find the new head. A next pointer will
129                  * either point to another element or NULL; if a thread
130                  * encounters a pointer that has already been popped, the CAS
131                  * will fail.
132                  */
133                 for (i = 0; i < num && tmp != NULL; i++) {
134                         rte_prefetch0(tmp->next);
135                         if (obj_table)
136                                 obj_table[i] = tmp->data;
137                         if (last)
138                                 *last = tmp;
139                         tmp = tmp->next;
140                 }
141
142                 /* If NULL was encountered, the list was modified while
143                  * traversing it. Retry.
144                  */
145                 if (i != num)
146                         continue;
147
148                 new_head.top = tmp;
149                 new_head.cnt = old_head.cnt + 1;
150
151                 /* old_head is updated on failure */
152                 success = rte_atomic128_cmp_exchange(
153                                 (rte_int128_t *)&list->head,
154                                 (rte_int128_t *)&old_head,
155                                 (rte_int128_t *)&new_head,
156                                 1, __ATOMIC_RELEASE,
157                                 __ATOMIC_RELAXED);
158         } while (success == 0);
159
160         return old_head.top;
161 #endif
162 }
163
164 #endif /* _RTE_STACK_LF_GENERIC_H_ */