1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2021 HiSilicon Limited
7 #include <rte_cycles.h>
8 #include <rte_malloc.h>
10 #include <rte_dmadev.h>
12 extern int test_dma_api(uint16_t dev_id);
14 #define DMA_TEST_API_RUN(test) \
15 testsuite_run_test(test, #test)
17 #define TEST_MEMCPY_SIZE 1024
18 #define TEST_WAIT_US_VAL 50000
20 #define TEST_SUCCESS 0
21 #define TEST_FAILED -1
23 static int16_t test_dev_id;
24 static int16_t invalid_dev_id;
34 testsuite_setup(int16_t dev_id)
39 src = rte_malloc("dmadev_test_src", TEST_MEMCPY_SIZE, 0);
42 dst = rte_malloc("dmadev_test_dst", TEST_MEMCPY_SIZE, 0);
53 /* Set dmadev log level to critical to suppress unnecessary output
56 rte_log_set_level_pattern("lib.dmadev", RTE_LOG_CRIT);
62 testsuite_teardown(void)
68 /* Ensure the dmadev is stopped. */
69 rte_dma_stop(test_dev_id);
71 rte_log_set_level_pattern("lib.dmadev", RTE_LOG_INFO);
75 testsuite_run_test(int (*test)(void), const char *name)
83 printf("%s Failed\n", name);
86 printf("%s Passed\n", name);
94 test_dma_get_dev_id_by_name(void)
96 int ret = rte_dma_get_dev_id_by_name("invalid_dmadev_device");
97 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
102 test_dma_is_valid_dev(void)
105 ret = rte_dma_is_valid(invalid_dev_id);
106 RTE_TEST_ASSERT(ret == false, "Expected false for invalid dev id");
107 ret = rte_dma_is_valid(test_dev_id);
108 RTE_TEST_ASSERT(ret == true, "Expected true for valid dev id");
115 uint16_t count = rte_dma_count_avail();
116 RTE_TEST_ASSERT(count > 0, "Invalid dmadev count %u", count);
121 test_dma_info_get(void)
123 struct rte_dma_info info = { 0 };
126 ret = rte_dma_info_get(invalid_dev_id, &info);
127 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
128 ret = rte_dma_info_get(test_dev_id, NULL);
129 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
130 ret = rte_dma_info_get(test_dev_id, &info);
131 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
137 test_dma_configure(void)
139 struct rte_dma_conf conf = { 0 };
140 struct rte_dma_info info = { 0 };
143 /* Check for invalid parameters */
144 ret = rte_dma_configure(invalid_dev_id, &conf);
145 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
146 ret = rte_dma_configure(test_dev_id, NULL);
147 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
149 /* Check for nb_vchans == 0 */
150 memset(&conf, 0, sizeof(conf));
151 ret = rte_dma_configure(test_dev_id, &conf);
152 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
154 /* Check for conf.nb_vchans > info.max_vchans */
155 ret = rte_dma_info_get(test_dev_id, &info);
156 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
157 memset(&conf, 0, sizeof(conf));
158 conf.nb_vchans = info.max_vchans + 1;
159 ret = rte_dma_configure(test_dev_id, &conf);
160 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
162 /* Check enable silent mode */
163 memset(&conf, 0, sizeof(conf));
164 conf.nb_vchans = info.max_vchans;
165 conf.enable_silent = true;
166 ret = rte_dma_configure(test_dev_id, &conf);
167 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
169 /* Configure success */
170 memset(&conf, 0, sizeof(conf));
171 conf.nb_vchans = info.max_vchans;
172 ret = rte_dma_configure(test_dev_id, &conf);
173 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure dmadev, %d", ret);
175 /* Check configure success */
176 ret = rte_dma_info_get(test_dev_id, &info);
177 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
178 RTE_TEST_ASSERT_EQUAL(conf.nb_vchans, info.nb_vchans,
179 "Configure nb_vchans not match");
185 check_direction(void)
187 struct rte_dma_vchan_conf vchan_conf;
190 /* Check for direction */
191 memset(&vchan_conf, 0, sizeof(vchan_conf));
192 vchan_conf.direction = RTE_DMA_DIR_DEV_TO_DEV + 1;
193 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
194 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
195 vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM - 1;
196 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
197 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
199 /* Check for direction and dev_capa combination */
200 memset(&vchan_conf, 0, sizeof(vchan_conf));
201 vchan_conf.direction = RTE_DMA_DIR_MEM_TO_DEV;
202 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
203 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
204 vchan_conf.direction = RTE_DMA_DIR_DEV_TO_MEM;
205 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
206 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
207 vchan_conf.direction = RTE_DMA_DIR_DEV_TO_DEV;
208 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
209 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
215 check_port_type(struct rte_dma_info *dev_info)
217 struct rte_dma_vchan_conf vchan_conf;
220 /* Check src port type validation */
221 memset(&vchan_conf, 0, sizeof(vchan_conf));
222 vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
223 vchan_conf.nb_desc = dev_info->min_desc;
224 vchan_conf.src_port.port_type = RTE_DMA_PORT_PCIE;
225 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
226 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
228 /* Check dst port type validation */
229 memset(&vchan_conf, 0, sizeof(vchan_conf));
230 vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
231 vchan_conf.nb_desc = dev_info->min_desc;
232 vchan_conf.dst_port.port_type = RTE_DMA_PORT_PCIE;
233 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
234 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
240 test_dma_vchan_setup(void)
242 struct rte_dma_vchan_conf vchan_conf = { 0 };
243 struct rte_dma_conf dev_conf = { 0 };
244 struct rte_dma_info dev_info = { 0 };
247 /* Check for invalid parameters */
248 ret = rte_dma_vchan_setup(invalid_dev_id, 0, &vchan_conf);
249 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
250 ret = rte_dma_vchan_setup(test_dev_id, 0, NULL);
251 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
252 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
253 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
255 /* Make sure configure success */
256 ret = rte_dma_info_get(test_dev_id, &dev_info);
257 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
258 dev_conf.nb_vchans = dev_info.max_vchans;
259 ret = rte_dma_configure(test_dev_id, &dev_conf);
260 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure dmadev, %d", ret);
262 /* Check for invalid vchan */
263 ret = rte_dma_vchan_setup(test_dev_id, dev_conf.nb_vchans, &vchan_conf);
264 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
266 /* Check for direction */
267 ret = check_direction();
268 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to check direction");
270 /* Check for nb_desc validation */
271 memset(&vchan_conf, 0, sizeof(vchan_conf));
272 vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
273 vchan_conf.nb_desc = dev_info.min_desc - 1;
274 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
275 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
276 vchan_conf.nb_desc = dev_info.max_desc + 1;
277 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
278 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
280 /* Check port type */
281 ret = check_port_type(&dev_info);
282 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to check port type");
284 /* Check vchan setup success */
285 memset(&vchan_conf, 0, sizeof(vchan_conf));
286 vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
287 vchan_conf.nb_desc = dev_info.min_desc;
288 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
289 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup vchan, %d", ret);
295 setup_one_vchan(void)
297 struct rte_dma_vchan_conf vchan_conf = { 0 };
298 struct rte_dma_info dev_info = { 0 };
299 struct rte_dma_conf dev_conf = { 0 };
302 ret = rte_dma_info_get(test_dev_id, &dev_info);
303 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
304 dev_conf.nb_vchans = dev_info.max_vchans;
305 ret = rte_dma_configure(test_dev_id, &dev_conf);
306 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure, %d", ret);
307 vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
308 vchan_conf.nb_desc = dev_info.min_desc;
309 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
310 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup vchan, %d", ret);
316 test_dma_start_stop(void)
318 struct rte_dma_vchan_conf vchan_conf = { 0 };
319 struct rte_dma_conf dev_conf = { 0 };
322 /* Check for invalid parameters */
323 ret = rte_dma_start(invalid_dev_id);
324 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
325 ret = rte_dma_stop(invalid_dev_id);
326 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
328 /* Setup one vchan for later test */
329 ret = setup_one_vchan();
330 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
332 ret = rte_dma_start(test_dev_id);
333 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
335 /* Check reconfigure and vchan setup when device started */
336 ret = rte_dma_configure(test_dev_id, &dev_conf);
337 RTE_TEST_ASSERT(ret == -EBUSY, "Failed to configure, %d", ret);
338 ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
339 RTE_TEST_ASSERT(ret == -EBUSY, "Failed to setup vchan, %d", ret);
341 ret = rte_dma_stop(test_dev_id);
342 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
350 struct rte_dma_info dev_info = { 0 };
351 struct rte_dma_stats stats = { 0 };
354 /* Check for invalid parameters */
355 ret = rte_dma_stats_get(invalid_dev_id, 0, &stats);
356 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
357 ret = rte_dma_stats_get(invalid_dev_id, 0, NULL);
358 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
359 ret = rte_dma_stats_reset(invalid_dev_id, 0);
360 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
362 /* Setup one vchan for later test */
363 ret = setup_one_vchan();
364 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
366 /* Check for invalid vchan */
367 ret = rte_dma_info_get(test_dev_id, &dev_info);
368 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
369 ret = rte_dma_stats_get(test_dev_id, dev_info.max_vchans, &stats);
370 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
371 ret = rte_dma_stats_reset(test_dev_id, dev_info.max_vchans);
372 RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
374 /* Check for valid vchan */
375 ret = rte_dma_stats_get(test_dev_id, 0, &stats);
376 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to get stats, %d", ret);
377 ret = rte_dma_stats_get(test_dev_id, RTE_DMA_ALL_VCHAN, &stats);
378 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to get all stats, %d", ret);
379 ret = rte_dma_stats_reset(test_dev_id, 0);
380 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to reset stats, %d", ret);
381 ret = rte_dma_stats_reset(test_dev_id, RTE_DMA_ALL_VCHAN);
382 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to reset all stats, %d", ret);
392 /* Check for invalid parameters */
393 ret = rte_dma_dump(invalid_dev_id, stderr);
394 RTE_TEST_ASSERT(ret == -EINVAL, "Excepted -EINVAL, %d", ret);
395 ret = rte_dma_dump(test_dev_id, NULL);
396 RTE_TEST_ASSERT(ret == -EINVAL, "Excepted -EINVAL, %d", ret);
406 for (i = 0; i < TEST_MEMCPY_SIZE; i++)
408 memset(dst, 0, TEST_MEMCPY_SIZE);
416 for (i = 0; i < TEST_MEMCPY_SIZE; i++) {
417 if (src[i] == dst[i])
419 RTE_TEST_ASSERT_EQUAL(src[i], dst[i],
420 "Failed to copy memory, %d %d", src[i], dst[i]);
427 test_dma_completed(void)
429 uint16_t last_idx = 1;
430 bool has_error = true;
434 /* Setup one vchan for later test */
435 ret = setup_one_vchan();
436 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
438 ret = rte_dma_start(test_dev_id);
439 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
443 /* Check enqueue without submit */
444 ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
445 TEST_MEMCPY_SIZE, 0);
446 RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
447 rte_delay_us_sleep(TEST_WAIT_US_VAL);
448 cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
449 RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to get completed");
451 /* Check add submit */
452 ret = rte_dma_submit(test_dev_id, 0);
453 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to submit, %d", ret);
454 rte_delay_us_sleep(TEST_WAIT_US_VAL);
455 cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
456 RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
457 RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u",
459 RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
460 ret = verify_memory();
461 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
465 /* Check for enqueue with submit */
466 ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
467 TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
468 RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
469 rte_delay_us_sleep(TEST_WAIT_US_VAL);
470 cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
471 RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
472 RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u",
474 RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
475 ret = verify_memory();
476 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
478 /* Stop dmadev to make sure dmadev to a known state */
479 ret = rte_dma_stop(test_dev_id);
480 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
486 test_dma_completed_status(void)
488 enum rte_dma_status_code status[1] = { 1 };
489 uint16_t last_idx = 1;
493 /* Setup one vchan for later test */
494 ret = setup_one_vchan();
495 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
497 ret = rte_dma_start(test_dev_id);
498 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
500 /* Check for enqueue with submit */
501 ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
502 TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
503 RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
504 rte_delay_us_sleep(TEST_WAIT_US_VAL);
505 cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
507 RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to completed status");
508 RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u",
510 for (i = 0; i < RTE_DIM(status); i++)
511 RTE_TEST_ASSERT_EQUAL(status[i], 0,
512 "Failed to completed status, %d", status[i]);
514 /* Check do completed status again */
515 cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
517 RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to completed status");
519 /* Check for enqueue with submit again */
520 ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
521 TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
522 RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
523 rte_delay_us_sleep(TEST_WAIT_US_VAL);
524 cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
526 RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to completed status");
527 RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u",
529 for (i = 0; i < RTE_DIM(status); i++)
530 RTE_TEST_ASSERT_EQUAL(status[i], 0,
531 "Failed to completed status, %d", status[i]);
533 /* Stop dmadev to make sure dmadev to a known state */
534 ret = rte_dma_stop(test_dev_id);
535 RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
541 test_dma_api(uint16_t dev_id)
543 int ret = testsuite_setup(dev_id);
545 printf("testsuite setup fail!\n");
549 /* If the testcase exit successfully, ensure that the test dmadev exist
550 * and the dmadev is in the stopped state.
552 DMA_TEST_API_RUN(test_dma_get_dev_id_by_name);
553 DMA_TEST_API_RUN(test_dma_is_valid_dev);
554 DMA_TEST_API_RUN(test_dma_count);
555 DMA_TEST_API_RUN(test_dma_info_get);
556 DMA_TEST_API_RUN(test_dma_configure);
557 DMA_TEST_API_RUN(test_dma_vchan_setup);
558 DMA_TEST_API_RUN(test_dma_start_stop);
559 DMA_TEST_API_RUN(test_dma_stats);
560 DMA_TEST_API_RUN(test_dma_dump);
561 DMA_TEST_API_RUN(test_dma_completed);
562 DMA_TEST_API_RUN(test_dma_completed_status);
564 testsuite_teardown();
566 printf("Total tests : %d\n", total);
567 printf("Passed : %d\n", passed);
568 printf("Failed : %d\n", failed);