1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017-2018 Intel Corporation
8 #include <rte_fbarray.h>
9 #include <rte_memory.h>
10 #include <rte_string_fns.h>
12 #include "eal_private.h"
13 #include "eal_internal_cfg.h"
14 #include "eal_memalloc.h"
16 struct mem_event_callback_entry {
17 TAILQ_ENTRY(mem_event_callback_entry) next;
18 char name[RTE_MEM_EVENT_CALLBACK_NAME_LEN];
19 rte_mem_event_callback_t clb;
23 struct mem_alloc_validator_entry {
24 TAILQ_ENTRY(mem_alloc_validator_entry) next;
25 char name[RTE_MEM_ALLOC_VALIDATOR_NAME_LEN];
26 rte_mem_alloc_validator_t clb;
31 /** Double linked list of actions. */
32 TAILQ_HEAD(mem_event_callback_entry_list, mem_event_callback_entry);
33 TAILQ_HEAD(mem_alloc_validator_entry_list, mem_alloc_validator_entry);
35 static struct mem_event_callback_entry_list mem_event_callback_list =
36 TAILQ_HEAD_INITIALIZER(mem_event_callback_list);
37 static rte_rwlock_t mem_event_rwlock = RTE_RWLOCK_INITIALIZER;
39 static struct mem_alloc_validator_entry_list mem_alloc_validator_list =
40 TAILQ_HEAD_INITIALIZER(mem_alloc_validator_list);
41 static rte_rwlock_t mem_alloc_validator_rwlock = RTE_RWLOCK_INITIALIZER;
43 static struct mem_event_callback_entry *
44 find_mem_event_callback(const char *name, void *arg)
46 struct mem_event_callback_entry *r;
48 TAILQ_FOREACH(r, &mem_event_callback_list, next) {
49 if (!strcmp(r->name, name) && r->arg == arg)
55 static struct mem_alloc_validator_entry *
56 find_mem_alloc_validator(const char *name, int socket_id)
58 struct mem_alloc_validator_entry *r;
60 TAILQ_FOREACH(r, &mem_alloc_validator_list, next) {
61 if (!strcmp(r->name, name) && r->socket_id == socket_id)
68 eal_memalloc_is_contig(const struct rte_memseg_list *msl, void *start,
71 void *end, *aligned_start, *aligned_end;
72 size_t pgsz = (size_t)msl->page_sz;
73 const struct rte_memseg *ms;
74 const struct internal_config *internal_conf =
75 eal_get_internal_configuration();
77 /* for IOVA_VA, it's always contiguous */
78 if (rte_eal_iova_mode() == RTE_IOVA_VA && !msl->external)
81 /* for legacy memory, it's always contiguous */
82 if (internal_conf->legacy_mem)
85 end = RTE_PTR_ADD(start, len);
87 /* for nohuge, we check pagemap, otherwise check memseg */
88 if (!rte_eal_has_hugepages()) {
89 rte_iova_t cur, expected;
91 aligned_start = RTE_PTR_ALIGN_FLOOR(start, pgsz);
92 aligned_end = RTE_PTR_ALIGN_CEIL(end, pgsz);
94 /* if start and end are on the same page, bail out early */
95 if (RTE_PTR_DIFF(aligned_end, aligned_start) == pgsz)
98 /* skip first iteration */
99 cur = rte_mem_virt2iova(aligned_start);
100 expected = cur + pgsz;
101 aligned_start = RTE_PTR_ADD(aligned_start, pgsz);
103 while (aligned_start < aligned_end) {
104 cur = rte_mem_virt2iova(aligned_start);
107 aligned_start = RTE_PTR_ADD(aligned_start, pgsz);
111 int start_seg, end_seg, cur_seg;
112 rte_iova_t cur, expected;
114 aligned_start = RTE_PTR_ALIGN_FLOOR(start, pgsz);
115 aligned_end = RTE_PTR_ALIGN_CEIL(end, pgsz);
117 start_seg = RTE_PTR_DIFF(aligned_start, msl->base_va) /
119 end_seg = RTE_PTR_DIFF(aligned_end, msl->base_va) /
122 /* if start and end are on the same page, bail out early */
123 if (RTE_PTR_DIFF(aligned_end, aligned_start) == pgsz)
126 /* skip first iteration */
127 ms = rte_fbarray_get(&msl->memseg_arr, start_seg);
129 expected = cur + pgsz;
131 /* if we can't access IOVA addresses, assume non-contiguous */
132 if (cur == RTE_BAD_IOVA)
135 for (cur_seg = start_seg + 1; cur_seg < end_seg;
136 cur_seg++, expected += pgsz) {
137 ms = rte_fbarray_get(&msl->memseg_arr, cur_seg);
139 if (ms->iova != expected)
147 eal_memalloc_mem_event_callback_register(const char *name,
148 rte_mem_event_callback_t clb, void *arg)
150 struct mem_event_callback_entry *entry;
152 if (name == NULL || clb == NULL) {
156 len = strnlen(name, RTE_MEM_EVENT_CALLBACK_NAME_LEN);
160 } else if (len == RTE_MEM_EVENT_CALLBACK_NAME_LEN) {
161 rte_errno = ENAMETOOLONG;
164 rte_rwlock_write_lock(&mem_event_rwlock);
166 entry = find_mem_event_callback(name, arg);
173 entry = malloc(sizeof(*entry));
180 /* callback successfully created and is valid, add it to the list */
183 strlcpy(entry->name, name, RTE_MEM_EVENT_CALLBACK_NAME_LEN);
184 TAILQ_INSERT_TAIL(&mem_event_callback_list, entry, next);
188 RTE_LOG(DEBUG, EAL, "Mem event callback '%s:%p' registered\n",
192 rte_rwlock_write_unlock(&mem_event_rwlock);
197 eal_memalloc_mem_event_callback_unregister(const char *name, void *arg)
199 struct mem_event_callback_entry *entry;
206 len = strnlen(name, RTE_MEM_EVENT_CALLBACK_NAME_LEN);
210 } else if (len == RTE_MEM_EVENT_CALLBACK_NAME_LEN) {
211 rte_errno = ENAMETOOLONG;
214 rte_rwlock_write_lock(&mem_event_rwlock);
216 entry = find_mem_event_callback(name, arg);
222 TAILQ_REMOVE(&mem_event_callback_list, entry, next);
227 RTE_LOG(DEBUG, EAL, "Mem event callback '%s:%p' unregistered\n",
231 rte_rwlock_write_unlock(&mem_event_rwlock);
236 eal_memalloc_mem_event_notify(enum rte_mem_event event, const void *start,
239 struct mem_event_callback_entry *entry;
241 rte_rwlock_read_lock(&mem_event_rwlock);
243 TAILQ_FOREACH(entry, &mem_event_callback_list, next) {
244 RTE_LOG(DEBUG, EAL, "Calling mem event callback '%s:%p'\n",
245 entry->name, entry->arg);
246 entry->clb(event, start, len, entry->arg);
249 rte_rwlock_read_unlock(&mem_event_rwlock);
253 eal_memalloc_mem_alloc_validator_register(const char *name,
254 rte_mem_alloc_validator_t clb, int socket_id, size_t limit)
256 struct mem_alloc_validator_entry *entry;
258 if (name == NULL || clb == NULL || socket_id < 0) {
262 len = strnlen(name, RTE_MEM_ALLOC_VALIDATOR_NAME_LEN);
266 } else if (len == RTE_MEM_ALLOC_VALIDATOR_NAME_LEN) {
267 rte_errno = ENAMETOOLONG;
270 rte_rwlock_write_lock(&mem_alloc_validator_rwlock);
272 entry = find_mem_alloc_validator(name, socket_id);
279 entry = malloc(sizeof(*entry));
286 /* callback successfully created and is valid, add it to the list */
288 entry->socket_id = socket_id;
289 entry->limit = limit;
290 strlcpy(entry->name, name, RTE_MEM_ALLOC_VALIDATOR_NAME_LEN);
291 TAILQ_INSERT_TAIL(&mem_alloc_validator_list, entry, next);
295 RTE_LOG(DEBUG, EAL, "Mem alloc validator '%s' on socket %i with limit %zu registered\n",
296 name, socket_id, limit);
299 rte_rwlock_write_unlock(&mem_alloc_validator_rwlock);
304 eal_memalloc_mem_alloc_validator_unregister(const char *name, int socket_id)
306 struct mem_alloc_validator_entry *entry;
309 if (name == NULL || socket_id < 0) {
313 len = strnlen(name, RTE_MEM_ALLOC_VALIDATOR_NAME_LEN);
317 } else if (len == RTE_MEM_ALLOC_VALIDATOR_NAME_LEN) {
318 rte_errno = ENAMETOOLONG;
321 rte_rwlock_write_lock(&mem_alloc_validator_rwlock);
323 entry = find_mem_alloc_validator(name, socket_id);
329 TAILQ_REMOVE(&mem_alloc_validator_list, entry, next);
334 RTE_LOG(DEBUG, EAL, "Mem alloc validator '%s' on socket %i unregistered\n",
338 rte_rwlock_write_unlock(&mem_alloc_validator_rwlock);
343 eal_memalloc_mem_alloc_validate(int socket_id, size_t new_len)
345 struct mem_alloc_validator_entry *entry;
348 rte_rwlock_read_lock(&mem_alloc_validator_rwlock);
350 TAILQ_FOREACH(entry, &mem_alloc_validator_list, next) {
351 if (entry->socket_id != socket_id || entry->limit > new_len)
353 RTE_LOG(DEBUG, EAL, "Calling mem alloc validator '%s' on socket %i\n",
354 entry->name, entry->socket_id);
355 if (entry->clb(socket_id, entry->limit, new_len) < 0)
359 rte_rwlock_read_unlock(&mem_alloc_validator_rwlock);