#include <rte_common.h>
#include <rte_spinlock.h>
+#include "eal_private.h"
#include "eal_internal_cfg.h"
#include "eal_memalloc.h"
#include "malloc_elem.h"
#include "malloc_heap.h"
+/*
+ * If debugging is enabled, freed memory is set to poison value
+ * to catch buggy programs. Otherwise, freed memory is set to zero
+ * to avoid having to zero in zmalloc
+ */
+#ifdef RTE_MALLOC_DEBUG
+#define MALLOC_POISON 0x6b
+#else
+#define MALLOC_POISON 0
+#endif
+
size_t
malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align)
{
rte_iova_t expected_iova;
struct rte_memseg *ms;
size_t page_sz, cur, max;
+ const struct internal_config *internal_conf =
+ eal_get_internal_configuration();
page_sz = (size_t)elem->msl->page_sz;
data_start = RTE_PTR_ADD(elem, MALLOC_ELEM_HEADER_LEN);
/* segment must start after header and with specified alignment */
contig_seg_start = RTE_PTR_ALIGN_CEIL(data_start, align);
+ /* return if aligned address is already out of malloc element */
+ if (contig_seg_start > data_end)
+ return 0;
+
/* if we're in IOVA as VA mode, or if we're in legacy mode with
* hugepages, all elements are IOVA-contiguous. however, we can only
* make these assumptions about internal memory - externally allocated
*/
if (!elem->msl->external &&
(rte_eal_iova_mode() == RTE_IOVA_VA ||
- (internal_config.legacy_mem &&
+ (internal_conf->legacy_mem &&
rte_eal_has_hugepages())))
return RTE_PTR_DIFF(data_end, contig_seg_start);
next_elem = NULL;
heap->last = elem;
} else {
- /* the new memory is somewhere inbetween start and end */
+ /* the new memory is somewhere between start and end */
uint64_t dist_from_start, dist_from_end;
dist_from_end = RTE_PTR_DIFF(heap->last, elem);
elem->next = split_pt;
elem->size = old_elem_size;
set_trailer(elem);
+ if (elem->pad) {
+ /* Update inner padding inner element size. */
+ elem = RTE_PTR_ADD(elem, elem->pad);
+ elem->size = old_elem_size - elem->pad;
+ }
}
/*
static int
next_elem_is_adjacent(struct malloc_elem *elem)
{
+ const struct internal_config *internal_conf =
+ eal_get_internal_configuration();
+
return elem->next == RTE_PTR_ADD(elem, elem->size) &&
elem->next->msl == elem->msl &&
- (!internal_config.match_allocations ||
+ (!internal_conf->match_allocations ||
elem->orig_elem == elem->next->orig_elem);
}
static int
prev_elem_is_adjacent(struct malloc_elem *elem)
{
+ const struct internal_config *internal_conf =
+ eal_get_internal_configuration();
+
return elem == RTE_PTR_ADD(elem->prev, elem->prev->size) &&
elem->prev->msl == elem->msl &&
- (!internal_config.match_allocations ||
+ (!internal_conf->match_allocations ||
elem->orig_elem == elem->prev->orig_elem);
}
return 0;
/* Find next power of 2 >= size. */
- log2 = sizeof(size) * 8 - __builtin_clzl(size-1);
+ log2 = sizeof(size) * 8 - __builtin_clzl(size - 1);
/* Compute freelist index, based on log2(size). */
index = (log2 - MALLOC_MINSIZE_LOG2 + MALLOC_LOG2_INCREMENT - 1) /
- MALLOC_LOG2_INCREMENT;
+ MALLOC_LOG2_INCREMENT;
- return index <= RTE_HEAP_NUM_FREELISTS-1?
- index: RTE_HEAP_NUM_FREELISTS-1;
+ return index <= RTE_HEAP_NUM_FREELISTS - 1 ?
+ index : RTE_HEAP_NUM_FREELISTS - 1;
}
/*
else
elem1->heap->last = elem1;
elem1->next = next;
+ if (elem1->pad) {
+ struct malloc_elem *inner = RTE_PTR_ADD(elem1, elem1->pad);
+ inner->size = elem1->size - elem1->pad;
+ }
}
struct malloc_elem *
join_elem(elem, elem->next);
/* erase header, trailer and pad */
- memset(erase, 0, erase_len);
+ memset(erase, MALLOC_POISON, erase_len);
}
/*
join_elem(new_elem, elem);
/* erase header, trailer and pad */
- memset(erase, 0, erase_len);
+ memset(erase, MALLOC_POISON, erase_len);
elem = new_elem;
}
/* decrease heap's count of allocated elements */
elem->heap->alloc_count--;
- memset(ptr, 0, data_len);
+ /* poison memory */
+ memset(ptr, MALLOC_POISON, data_len);
return elem;
}