From: Anatoly Burakov Date: Fri, 10 Sep 2021 12:30:08 +0000 (+0000) Subject: mem: promote DMA mask API to stable X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=27e7e2509c357e746b4dbb30cd42a432eea601d4;p=dpdk.git mem: promote DMA mask API to stable As per ABI policy, move the formerly experimental API's to the stable section. Signed-off-by: Anatoly Burakov Acked-by: Ray Kinsella --- diff --git a/lib/eal/include/rte_memory.h b/lib/eal/include/rte_memory.h index c68b9d5e62..6d018629ae 100644 --- a/lib/eal/include/rte_memory.h +++ b/lib/eal/include/rte_memory.h @@ -553,22 +553,15 @@ unsigned rte_memory_get_nchannel(void); unsigned rte_memory_get_nrank(void); /** - * @warning - * @b EXPERIMENTAL: this API may change without prior notice - * * Check if all currently allocated memory segments are compliant with * supplied DMA address width. * * @param maskbits * Address width to check against. */ -__rte_experimental int rte_mem_check_dma_mask(uint8_t maskbits); /** - * @warning - * @b EXPERIMENTAL: this API may change without prior notice - * * Check if all currently allocated memory segments are compliant with * supplied DMA address width. This function will use * rte_memseg_walk_thread_unsafe instead of rte_memseg_walk implying @@ -581,18 +574,13 @@ int rte_mem_check_dma_mask(uint8_t maskbits); * @param maskbits * Address width to check against. */ -__rte_experimental int rte_mem_check_dma_mask_thread_unsafe(uint8_t maskbits); /** - * @warning - * @b EXPERIMENTAL: this API may change without prior notice - * * Set dma mask to use once memory initialization is done. Previous functions * rte_mem_check_dma_mask and rte_mem_check_dma_mask_thread_unsafe can not be * used safely until memory has been initialized. */ -__rte_experimental void rte_mem_set_dma_mask(uint8_t maskbits); /** diff --git a/lib/eal/version.map b/lib/eal/version.map index 54a3f02dc2..7e692a35f5 100644 --- a/lib/eal/version.map +++ b/lib/eal/version.map @@ -174,10 +174,13 @@ DPDK_22 { rte_mcfg_tailq_write_unlock; rte_mem_alloc_validator_register; rte_mem_alloc_validator_unregister; + rte_mem_check_dma_mask; + rte_mem_check_dma_mask_thread_unsafe; rte_mem_event_callback_register; rte_mem_event_callback_unregister; rte_mem_iova2virt; rte_mem_lock_page; + rte_mem_set_dma_mask; rte_mem_virt2iova; rte_mem_virt2memseg; rte_mem_virt2memseg_list; @@ -293,7 +296,6 @@ EXPERIMENTAL { rte_dev_event_monitor_start; # WINDOWS_NO_EXPORT rte_dev_event_monitor_stop; # WINDOWS_NO_EXPORT rte_log_register_type_and_pick_level; - rte_mem_check_dma_mask; # added in 18.08 rte_class_find; @@ -308,8 +310,6 @@ EXPERIMENTAL { rte_dev_event_callback_process; rte_dev_hotplug_handle_disable; # WINDOWS_NO_EXPORT rte_dev_hotplug_handle_enable; # WINDOWS_NO_EXPORT - rte_mem_check_dma_mask_thread_unsafe; - rte_mem_set_dma_mask; # added in 19.05 rte_dev_dma_map;