1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017-2018 Intel Corporation
9 #include <rte_fbarray.h>
10 #include <rte_memzone.h>
11 #include <rte_memory.h>
12 #include <rte_string_fns.h>
13 #include <rte_rwlock.h>
15 #include "eal_private.h"
16 #include "eal_internal_cfg.h"
17 #include "eal_memalloc.h"
19 struct mem_event_callback_entry {
20 TAILQ_ENTRY(mem_event_callback_entry) next;
21 char name[RTE_MEM_EVENT_CALLBACK_NAME_LEN];
22 rte_mem_event_callback_t clb;
26 struct mem_alloc_validator_entry {
27 TAILQ_ENTRY(mem_alloc_validator_entry) next;
28 char name[RTE_MEM_ALLOC_VALIDATOR_NAME_LEN];
29 rte_mem_alloc_validator_t clb;
34 /** Double linked list of actions. */
35 TAILQ_HEAD(mem_event_callback_entry_list, mem_event_callback_entry);
36 TAILQ_HEAD(mem_alloc_validator_entry_list, mem_alloc_validator_entry);
38 static struct mem_event_callback_entry_list mem_event_callback_list =
39 TAILQ_HEAD_INITIALIZER(mem_event_callback_list);
40 static rte_rwlock_t mem_event_rwlock = RTE_RWLOCK_INITIALIZER;
42 static struct mem_alloc_validator_entry_list mem_alloc_validator_list =
43 TAILQ_HEAD_INITIALIZER(mem_alloc_validator_list);
44 static rte_rwlock_t mem_alloc_validator_rwlock = RTE_RWLOCK_INITIALIZER;
46 static struct mem_event_callback_entry *
47 find_mem_event_callback(const char *name, void *arg)
49 struct mem_event_callback_entry *r;
51 TAILQ_FOREACH(r, &mem_event_callback_list, next) {
52 if (!strcmp(r->name, name) && r->arg == arg)
58 static struct mem_alloc_validator_entry *
59 find_mem_alloc_validator(const char *name, int socket_id)
61 struct mem_alloc_validator_entry *r;
63 TAILQ_FOREACH(r, &mem_alloc_validator_list, next) {
64 if (!strcmp(r->name, name) && r->socket_id == socket_id)
71 eal_memalloc_is_contig(const struct rte_memseg_list *msl, void *start,
74 void *end, *aligned_start, *aligned_end;
75 size_t pgsz = (size_t)msl->page_sz;
76 const struct rte_memseg *ms;
77 const struct internal_config *internal_conf =
78 eal_get_internal_configuration();
80 /* for IOVA_VA, it's always contiguous */
81 if (rte_eal_iova_mode() == RTE_IOVA_VA && !msl->external)
84 /* for legacy memory, it's always contiguous */
85 if (internal_conf->legacy_mem)
88 end = RTE_PTR_ADD(start, len);
90 /* for nohuge, we check pagemap, otherwise check memseg */
91 if (!rte_eal_has_hugepages()) {
92 rte_iova_t cur, expected;
94 aligned_start = RTE_PTR_ALIGN_FLOOR(start, pgsz);
95 aligned_end = RTE_PTR_ALIGN_CEIL(end, pgsz);
97 /* if start and end are on the same page, bail out early */
98 if (RTE_PTR_DIFF(aligned_end, aligned_start) == pgsz)
101 /* skip first iteration */
102 cur = rte_mem_virt2iova(aligned_start);
103 expected = cur + pgsz;
104 aligned_start = RTE_PTR_ADD(aligned_start, pgsz);
106 while (aligned_start < aligned_end) {
107 cur = rte_mem_virt2iova(aligned_start);
110 aligned_start = RTE_PTR_ADD(aligned_start, pgsz);
114 int start_seg, end_seg, cur_seg;
115 rte_iova_t cur, expected;
117 aligned_start = RTE_PTR_ALIGN_FLOOR(start, pgsz);
118 aligned_end = RTE_PTR_ALIGN_CEIL(end, pgsz);
120 start_seg = RTE_PTR_DIFF(aligned_start, msl->base_va) /
122 end_seg = RTE_PTR_DIFF(aligned_end, msl->base_va) /
125 /* if start and end are on the same page, bail out early */
126 if (RTE_PTR_DIFF(aligned_end, aligned_start) == pgsz)
129 /* skip first iteration */
130 ms = rte_fbarray_get(&msl->memseg_arr, start_seg);
132 expected = cur + pgsz;
134 /* if we can't access IOVA addresses, assume non-contiguous */
135 if (cur == RTE_BAD_IOVA)
138 for (cur_seg = start_seg + 1; cur_seg < end_seg;
139 cur_seg++, expected += pgsz) {
140 ms = rte_fbarray_get(&msl->memseg_arr, cur_seg);
142 if (ms->iova != expected)
150 eal_memalloc_mem_event_callback_register(const char *name,
151 rte_mem_event_callback_t clb, void *arg)
153 struct mem_event_callback_entry *entry;
155 if (name == NULL || clb == NULL) {
159 len = strnlen(name, RTE_MEM_EVENT_CALLBACK_NAME_LEN);
163 } else if (len == RTE_MEM_EVENT_CALLBACK_NAME_LEN) {
164 rte_errno = ENAMETOOLONG;
167 rte_rwlock_write_lock(&mem_event_rwlock);
169 entry = find_mem_event_callback(name, arg);
176 entry = malloc(sizeof(*entry));
183 /* callback successfully created and is valid, add it to the list */
186 strlcpy(entry->name, name, RTE_MEM_EVENT_CALLBACK_NAME_LEN);
187 TAILQ_INSERT_TAIL(&mem_event_callback_list, entry, next);
191 RTE_LOG(DEBUG, EAL, "Mem event callback '%s:%p' registered\n",
195 rte_rwlock_write_unlock(&mem_event_rwlock);
200 eal_memalloc_mem_event_callback_unregister(const char *name, void *arg)
202 struct mem_event_callback_entry *entry;
209 len = strnlen(name, RTE_MEM_EVENT_CALLBACK_NAME_LEN);
213 } else if (len == RTE_MEM_EVENT_CALLBACK_NAME_LEN) {
214 rte_errno = ENAMETOOLONG;
217 rte_rwlock_write_lock(&mem_event_rwlock);
219 entry = find_mem_event_callback(name, arg);
225 TAILQ_REMOVE(&mem_event_callback_list, entry, next);
230 RTE_LOG(DEBUG, EAL, "Mem event callback '%s:%p' unregistered\n",
234 rte_rwlock_write_unlock(&mem_event_rwlock);
239 eal_memalloc_mem_event_notify(enum rte_mem_event event, const void *start,
242 struct mem_event_callback_entry *entry;
244 rte_rwlock_read_lock(&mem_event_rwlock);
246 TAILQ_FOREACH(entry, &mem_event_callback_list, next) {
247 RTE_LOG(DEBUG, EAL, "Calling mem event callback '%s:%p'\n",
248 entry->name, entry->arg);
249 entry->clb(event, start, len, entry->arg);
252 rte_rwlock_read_unlock(&mem_event_rwlock);
256 eal_memalloc_mem_alloc_validator_register(const char *name,
257 rte_mem_alloc_validator_t clb, int socket_id, size_t limit)
259 struct mem_alloc_validator_entry *entry;
261 if (name == NULL || clb == NULL || socket_id < 0) {
265 len = strnlen(name, RTE_MEM_ALLOC_VALIDATOR_NAME_LEN);
269 } else if (len == RTE_MEM_ALLOC_VALIDATOR_NAME_LEN) {
270 rte_errno = ENAMETOOLONG;
273 rte_rwlock_write_lock(&mem_alloc_validator_rwlock);
275 entry = find_mem_alloc_validator(name, socket_id);
282 entry = malloc(sizeof(*entry));
289 /* callback successfully created and is valid, add it to the list */
291 entry->socket_id = socket_id;
292 entry->limit = limit;
293 strlcpy(entry->name, name, RTE_MEM_ALLOC_VALIDATOR_NAME_LEN);
294 TAILQ_INSERT_TAIL(&mem_alloc_validator_list, entry, next);
298 RTE_LOG(DEBUG, EAL, "Mem alloc validator '%s' on socket %i with limit %zu registered\n",
299 name, socket_id, limit);
302 rte_rwlock_write_unlock(&mem_alloc_validator_rwlock);
307 eal_memalloc_mem_alloc_validator_unregister(const char *name, int socket_id)
309 struct mem_alloc_validator_entry *entry;
312 if (name == NULL || socket_id < 0) {
316 len = strnlen(name, RTE_MEM_ALLOC_VALIDATOR_NAME_LEN);
320 } else if (len == RTE_MEM_ALLOC_VALIDATOR_NAME_LEN) {
321 rte_errno = ENAMETOOLONG;
324 rte_rwlock_write_lock(&mem_alloc_validator_rwlock);
326 entry = find_mem_alloc_validator(name, socket_id);
332 TAILQ_REMOVE(&mem_alloc_validator_list, entry, next);
337 RTE_LOG(DEBUG, EAL, "Mem alloc validator '%s' on socket %i unregistered\n",
341 rte_rwlock_write_unlock(&mem_alloc_validator_rwlock);
346 eal_memalloc_mem_alloc_validate(int socket_id, size_t new_len)
348 struct mem_alloc_validator_entry *entry;
351 rte_rwlock_read_lock(&mem_alloc_validator_rwlock);
353 TAILQ_FOREACH(entry, &mem_alloc_validator_list, next) {
354 if (entry->socket_id != socket_id || entry->limit > new_len)
356 RTE_LOG(DEBUG, EAL, "Calling mem alloc validator '%s' on socket %i\n",
357 entry->name, entry->socket_id);
358 if (entry->clb(socket_id, entry->limit, new_len) < 0)
362 rte_rwlock_read_unlock(&mem_alloc_validator_rwlock);