X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=lib%2Flibrte_eal%2Fcommon%2Fmalloc_elem.c;h=885d00424bd47cdd61e01b1fb93b2b535d6adab9;hb=a1b6cda16a07183fda6729a79abf622937cd4d13;hp=e0a8ed15b4d64178a2758596bd693b994b1423d4;hpb=9554dbb50a8a22942128a0e5bcb52243a4f723ab;p=dpdk.git diff --git a/lib/librte_eal/common/malloc_elem.c b/lib/librte_eal/common/malloc_elem.c index e0a8ed15b4..885d00424b 100644 --- a/lib/librte_eal/common/malloc_elem.c +++ b/lib/librte_eal/common/malloc_elem.c @@ -23,6 +23,17 @@ #include "malloc_elem.h" #include "malloc_heap.h" +/* + * If debugging is enabled, freed memory is set to poison value + * to catch buggy programs. Otherwise, freed memory is set to zero + * to avoid having to zero in zmalloc + */ +#ifdef RTE_MALLOC_DEBUG +#define MALLOC_POISON 0x6b +#else +#define MALLOC_POISON 0 +#endif + size_t malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align) { @@ -38,11 +49,19 @@ malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align) /* segment must start after header and with specified alignment */ contig_seg_start = RTE_PTR_ALIGN_CEIL(data_start, align); + /* return if aligned address is already out of malloc element */ + if (contig_seg_start > data_end) + return 0; + /* if we're in IOVA as VA mode, or if we're in legacy mode with - * hugepages, all elements are IOVA-contiguous. + * hugepages, all elements are IOVA-contiguous. however, we can only + * make these assumptions about internal memory - externally allocated + * segments have to be checked. */ - if (rte_eal_iova_mode() == RTE_IOVA_VA || - (internal_config.legacy_mem && rte_eal_has_hugepages())) + if (!elem->msl->external && + (rte_eal_iova_mode() == RTE_IOVA_VA || + (internal_config.legacy_mem && + rte_eal_has_hugepages()))) return RTE_PTR_DIFF(data_end, contig_seg_start); cur_page = RTE_PTR_ALIGN_FLOOR(contig_seg_start, page_sz); @@ -106,7 +125,8 @@ malloc_elem_find_max_iova_contig(struct malloc_elem *elem, size_t align) */ void malloc_elem_init(struct malloc_elem *elem, struct malloc_heap *heap, - struct rte_memseg_list *msl, size_t size) + struct rte_memseg_list *msl, size_t size, + struct malloc_elem *orig_elem, size_t orig_size) { elem->heap = heap; elem->msl = msl; @@ -116,6 +136,8 @@ malloc_elem_init(struct malloc_elem *elem, struct malloc_heap *heap, elem->state = ELEM_FREE; elem->size = size; elem->pad = 0; + elem->orig_elem = orig_elem; + elem->orig_size = orig_size; set_header(elem); set_trailer(elem); } @@ -274,7 +296,8 @@ split_elem(struct malloc_elem *elem, struct malloc_elem *split_pt) const size_t old_elem_size = (uintptr_t)split_pt - (uintptr_t)elem; const size_t new_elem_size = elem->size - old_elem_size; - malloc_elem_init(split_pt, elem->heap, elem->msl, new_elem_size); + malloc_elem_init(split_pt, elem->heap, elem->msl, new_elem_size, + elem->orig_elem, elem->orig_size); split_pt->prev = elem; split_pt->next = next_elem; if (next_elem) @@ -284,6 +307,11 @@ split_elem(struct malloc_elem *elem, struct malloc_elem *split_pt) elem->next = split_pt; elem->size = old_elem_size; set_trailer(elem); + if (elem->pad) { + /* Update inner padding inner element size. */ + elem = RTE_PTR_ADD(elem, elem->pad); + elem->size = old_elem_size - elem->pad; + } } /* @@ -312,13 +340,19 @@ remove_elem(struct malloc_elem *elem) static int next_elem_is_adjacent(struct malloc_elem *elem) { - return elem->next == RTE_PTR_ADD(elem, elem->size); + return elem->next == RTE_PTR_ADD(elem, elem->size) && + elem->next->msl == elem->msl && + (!internal_config.match_allocations || + elem->orig_elem == elem->next->orig_elem); } static int prev_elem_is_adjacent(struct malloc_elem *elem) { - return elem == RTE_PTR_ADD(elem->prev, elem->prev->size); + return elem == RTE_PTR_ADD(elem->prev, elem->prev->size) && + elem->prev->msl == elem->msl && + (!internal_config.match_allocations || + elem->orig_elem == elem->prev->orig_elem); } /* @@ -453,6 +487,10 @@ join_elem(struct malloc_elem *elem1, struct malloc_elem *elem2) else elem1->heap->last = elem1; elem1->next = next; + if (elem1->pad) { + struct malloc_elem *inner = RTE_PTR_ADD(elem1, elem1->pad); + inner->size = elem1->size - elem1->pad; + } } struct malloc_elem * @@ -476,7 +514,7 @@ malloc_elem_join_adjacent_free(struct malloc_elem *elem) join_elem(elem, elem->next); /* erase header, trailer and pad */ - memset(erase, 0, erase_len); + memset(erase, MALLOC_POISON, erase_len); } /* @@ -500,7 +538,7 @@ malloc_elem_join_adjacent_free(struct malloc_elem *elem) join_elem(new_elem, elem); /* erase header, trailer and pad */ - memset(erase, 0, erase_len); + memset(erase, MALLOC_POISON, erase_len); elem = new_elem; } @@ -531,7 +569,8 @@ malloc_elem_free(struct malloc_elem *elem) /* decrease heap's count of allocated elements */ elem->heap->alloc_count--; - memset(ptr, 0, data_len); + /* poison memory */ + memset(ptr, MALLOC_POISON, data_len); return elem; }