X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=lib%2Flibrte_eal%2Fcommon%2Fmalloc_elem.c;h=c2c9461f1dfa636fb589436fc562cb316d3c67cb;hb=6a17919b0e2a482e0b3f505a048a2a3383e979e3;hp=fcdb18120adb6f11feaf2a0ad80d4ed41b4e6086;hpb=476c847ab6755d233dd786e8b87970c7105fbce2;p=dpdk.git diff --git a/lib/librte_eal/common/malloc_elem.c b/lib/librte_eal/common/malloc_elem.c index fcdb18120a..c2c9461f1d 100644 --- a/lib/librte_eal/common/malloc_elem.c +++ b/lib/librte_eal/common/malloc_elem.c @@ -18,11 +18,23 @@ #include #include +#include "eal_private.h" #include "eal_internal_cfg.h" #include "eal_memalloc.h" #include "malloc_elem.h" #include "malloc_heap.h" +/* + * If debugging is enabled, freed memory is set to poison value + * to catch buggy programs. Otherwise, freed memory is set to zero + * to avoid having to zero in zmalloc + */ +#ifdef RTE_MALLOC_DEBUG +#define MALLOC_POISON 0x6b +#else +#define MALLOC_POISON 0 +#endif + size_t malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align) { @@ -31,6 +43,8 @@ malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align) rte_iova_t expected_iova; struct rte_memseg *ms; size_t page_sz, cur, max; + const struct internal_config *internal_conf = + eal_get_internal_configuration(); page_sz = (size_t)elem->msl->page_sz; data_start = RTE_PTR_ADD(elem, MALLOC_ELEM_HEADER_LEN); @@ -38,6 +52,10 @@ malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align) /* segment must start after header and with specified alignment */ contig_seg_start = RTE_PTR_ALIGN_CEIL(data_start, align); + /* return if aligned address is already out of malloc element */ + if (contig_seg_start > data_end) + return 0; + /* if we're in IOVA as VA mode, or if we're in legacy mode with * hugepages, all elements are IOVA-contiguous. however, we can only * make these assumptions about internal memory - externally allocated @@ -45,7 +63,7 @@ malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align) */ if (!elem->msl->external && (rte_eal_iova_mode() == RTE_IOVA_VA || - (internal_config.legacy_mem && + (internal_conf->legacy_mem && rte_eal_has_hugepages()))) return RTE_PTR_DIFF(data_end, contig_seg_start); @@ -156,7 +174,7 @@ malloc_elem_insert(struct malloc_elem *elem) next_elem = NULL; heap->last = elem; } else { - /* the new memory is somewhere inbetween start and end */ + /* the new memory is somewhere between start and end */ uint64_t dist_from_start, dist_from_end; dist_from_end = RTE_PTR_DIFF(heap->last, elem); @@ -292,6 +310,11 @@ split_elem(struct malloc_elem *elem, struct malloc_elem *split_pt) elem->next = split_pt; elem->size = old_elem_size; set_trailer(elem); + if (elem->pad) { + /* Update inner padding inner element size. */ + elem = RTE_PTR_ADD(elem, elem->pad); + elem->size = old_elem_size - elem->pad; + } } /* @@ -320,18 +343,24 @@ remove_elem(struct malloc_elem *elem) static int next_elem_is_adjacent(struct malloc_elem *elem) { + const struct internal_config *internal_conf = + eal_get_internal_configuration(); + return elem->next == RTE_PTR_ADD(elem, elem->size) && elem->next->msl == elem->msl && - (!internal_config.match_allocations || + (!internal_conf->match_allocations || elem->orig_elem == elem->next->orig_elem); } static int prev_elem_is_adjacent(struct malloc_elem *elem) { + const struct internal_config *internal_conf = + eal_get_internal_configuration(); + return elem == RTE_PTR_ADD(elem->prev, elem->prev->size) && elem->prev->msl == elem->msl && - (!internal_config.match_allocations || + (!internal_conf->match_allocations || elem->orig_elem == elem->prev->orig_elem); } @@ -362,14 +391,14 @@ malloc_elem_free_list_index(size_t size) return 0; /* Find next power of 2 >= size. */ - log2 = sizeof(size) * 8 - __builtin_clzl(size-1); + log2 = sizeof(size) * 8 - __builtin_clzl(size - 1); /* Compute freelist index, based on log2(size). */ index = (log2 - MALLOC_MINSIZE_LOG2 + MALLOC_LOG2_INCREMENT - 1) / - MALLOC_LOG2_INCREMENT; + MALLOC_LOG2_INCREMENT; - return index <= RTE_HEAP_NUM_FREELISTS-1? - index: RTE_HEAP_NUM_FREELISTS-1; + return index <= RTE_HEAP_NUM_FREELISTS - 1 ? + index : RTE_HEAP_NUM_FREELISTS - 1; } /* @@ -467,6 +496,10 @@ join_elem(struct malloc_elem *elem1, struct malloc_elem *elem2) else elem1->heap->last = elem1; elem1->next = next; + if (elem1->pad) { + struct malloc_elem *inner = RTE_PTR_ADD(elem1, elem1->pad); + inner->size = elem1->size - elem1->pad; + } } struct malloc_elem * @@ -490,7 +523,7 @@ malloc_elem_join_adjacent_free(struct malloc_elem *elem) join_elem(elem, elem->next); /* erase header, trailer and pad */ - memset(erase, 0, erase_len); + memset(erase, MALLOC_POISON, erase_len); } /* @@ -514,7 +547,7 @@ malloc_elem_join_adjacent_free(struct malloc_elem *elem) join_elem(new_elem, elem); /* erase header, trailer and pad */ - memset(erase, 0, erase_len); + memset(erase, MALLOC_POISON, erase_len); elem = new_elem; } @@ -545,7 +578,8 @@ malloc_elem_free(struct malloc_elem *elem) /* decrease heap's count of allocated elements */ elem->heap->alloc_count--; - memset(ptr, 0, data_len); + /* poison memory */ + memset(ptr, MALLOC_POISON, data_len); return elem; }