#include <rte_common.h>
#include <rte_spinlock.h>
+#include "eal_private.h"
#include "eal_internal_cfg.h"
#include "eal_memalloc.h"
#include "malloc_elem.h"
#include "malloc_heap.h"
+/*
+ * If debugging is enabled, freed memory is set to poison value
+ * to catch buggy programs. Otherwise, freed memory is set to zero
+ * to avoid having to zero in zmalloc
+ */
+#ifdef RTE_MALLOC_DEBUG
+#define MALLOC_POISON 0x6b
+#else
+#define MALLOC_POISON 0
+#endif
+
size_t
malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align)
{
rte_iova_t expected_iova;
struct rte_memseg *ms;
size_t page_sz, cur, max;
+ const struct internal_config *internal_conf =
+ eal_get_internal_configuration();
page_sz = (size_t)elem->msl->page_sz;
data_start = RTE_PTR_ADD(elem, MALLOC_ELEM_HEADER_LEN);
/* segment must start after header and with specified alignment */
contig_seg_start = RTE_PTR_ALIGN_CEIL(data_start, align);
+ /* return if aligned address is already out of malloc element */
+ if (contig_seg_start > data_end)
+ return 0;
+
/* if we're in IOVA as VA mode, or if we're in legacy mode with
- * hugepages, all elements are IOVA-contiguous.
+ * hugepages, all elements are IOVA-contiguous. however, we can only
+ * make these assumptions about internal memory - externally allocated
+ * segments have to be checked.
*/
- if (rte_eal_iova_mode() == RTE_IOVA_VA ||
- (internal_config.legacy_mem && rte_eal_has_hugepages()))
+ if (!elem->msl->external &&
+ (rte_eal_iova_mode() == RTE_IOVA_VA ||
+ (internal_conf->legacy_mem &&
+ rte_eal_has_hugepages())))
return RTE_PTR_DIFF(data_end, contig_seg_start);
cur_page = RTE_PTR_ALIGN_FLOOR(contig_seg_start, page_sz);
*/
void
malloc_elem_init(struct malloc_elem *elem, struct malloc_heap *heap,
- struct rte_memseg_list *msl, size_t size)
+ struct rte_memseg_list *msl, size_t size,
+ struct malloc_elem *orig_elem, size_t orig_size)
{
elem->heap = heap;
elem->msl = msl;
elem->state = ELEM_FREE;
elem->size = size;
elem->pad = 0;
+ elem->orig_elem = orig_elem;
+ elem->orig_size = orig_size;
set_header(elem);
set_trailer(elem);
}
next_elem = NULL;
heap->last = elem;
} else {
- /* the new memory is somewhere inbetween start and end */
+ /* the new memory is somewhere between start and end */
uint64_t dist_from_start, dist_from_end;
dist_from_end = RTE_PTR_DIFF(heap->last, elem);
const size_t old_elem_size = (uintptr_t)split_pt - (uintptr_t)elem;
const size_t new_elem_size = elem->size - old_elem_size;
- malloc_elem_init(split_pt, elem->heap, elem->msl, new_elem_size);
+ malloc_elem_init(split_pt, elem->heap, elem->msl, new_elem_size,
+ elem->orig_elem, elem->orig_size);
split_pt->prev = elem;
split_pt->next = next_elem;
if (next_elem)
elem->next = split_pt;
elem->size = old_elem_size;
set_trailer(elem);
+ if (elem->pad) {
+ /* Update inner padding inner element size. */
+ elem = RTE_PTR_ADD(elem, elem->pad);
+ elem->size = old_elem_size - elem->pad;
+ }
}
/*
static int
next_elem_is_adjacent(struct malloc_elem *elem)
{
- return elem->next == RTE_PTR_ADD(elem, elem->size);
+ const struct internal_config *internal_conf =
+ eal_get_internal_configuration();
+
+ return elem->next == RTE_PTR_ADD(elem, elem->size) &&
+ elem->next->msl == elem->msl &&
+ (!internal_conf->match_allocations ||
+ elem->orig_elem == elem->next->orig_elem);
}
static int
prev_elem_is_adjacent(struct malloc_elem *elem)
{
- return elem == RTE_PTR_ADD(elem->prev, elem->prev->size);
+ const struct internal_config *internal_conf =
+ eal_get_internal_configuration();
+
+ return elem == RTE_PTR_ADD(elem->prev, elem->prev->size) &&
+ elem->prev->msl == elem->msl &&
+ (!internal_conf->match_allocations ||
+ elem->orig_elem == elem->prev->orig_elem);
}
/*
else
elem1->heap->last = elem1;
elem1->next = next;
+ if (elem1->pad) {
+ struct malloc_elem *inner = RTE_PTR_ADD(elem1, elem1->pad);
+ inner->size = elem1->size - elem1->pad;
+ }
}
struct malloc_elem *
join_elem(elem, elem->next);
/* erase header, trailer and pad */
- memset(erase, 0, erase_len);
+ memset(erase, MALLOC_POISON, erase_len);
}
/*
join_elem(new_elem, elem);
/* erase header, trailer and pad */
- memset(erase, 0, erase_len);
+ memset(erase, MALLOC_POISON, erase_len);
elem = new_elem;
}
/* decrease heap's count of allocated elements */
elem->heap->alloc_count--;
- memset(ptr, 0, data_len);
+ /* poison memory */
+ memset(ptr, MALLOC_POISON, data_len);
return elem;
}