unsigned rte_memory_get_nrank(void);
/**
- * @warning
- * @b EXPERIMENTAL: this API may change without prior notice
- *
* Check if all currently allocated memory segments are compliant with
* supplied DMA address width.
*
* @param maskbits
* Address width to check against.
*/
-__rte_experimental
int rte_mem_check_dma_mask(uint8_t maskbits);
/**
- * @warning
- * @b EXPERIMENTAL: this API may change without prior notice
- *
* Check if all currently allocated memory segments are compliant with
* supplied DMA address width. This function will use
* rte_memseg_walk_thread_unsafe instead of rte_memseg_walk implying
* @param maskbits
* Address width to check against.
*/
-__rte_experimental
int rte_mem_check_dma_mask_thread_unsafe(uint8_t maskbits);
/**
- * @warning
- * @b EXPERIMENTAL: this API may change without prior notice
- *
* Set dma mask to use once memory initialization is done. Previous functions
* rte_mem_check_dma_mask and rte_mem_check_dma_mask_thread_unsafe can not be
* used safely until memory has been initialized.
*/
-__rte_experimental
void rte_mem_set_dma_mask(uint8_t maskbits);
/**
rte_mcfg_tailq_write_unlock;
rte_mem_alloc_validator_register;
rte_mem_alloc_validator_unregister;
+ rte_mem_check_dma_mask;
+ rte_mem_check_dma_mask_thread_unsafe;
rte_mem_event_callback_register;
rte_mem_event_callback_unregister;
rte_mem_iova2virt;
rte_mem_lock_page;
+ rte_mem_set_dma_mask;
rte_mem_virt2iova;
rte_mem_virt2memseg;
rte_mem_virt2memseg_list;
rte_dev_event_monitor_start; # WINDOWS_NO_EXPORT
rte_dev_event_monitor_stop; # WINDOWS_NO_EXPORT
rte_log_register_type_and_pick_level;
- rte_mem_check_dma_mask;
# added in 18.08
rte_class_find;
rte_dev_event_callback_process;
rte_dev_hotplug_handle_disable; # WINDOWS_NO_EXPORT
rte_dev_hotplug_handle_enable; # WINDOWS_NO_EXPORT
- rte_mem_check_dma_mask_thread_unsafe;
- rte_mem_set_dma_mask;
# added in 19.05
rte_dev_dma_map;