Lines Matching refs:dev_id
39 runtest(const char *printable, int (*test_fn)(int16_t dev_id, uint16_t vchan), int iterations, in runtest() argument
40 int16_t dev_id, uint16_t vchan, bool check_err_stats) in runtest()
45 rte_dma_stats_reset(dev_id, vchan); in runtest()
46 printf("DMA Dev %d: Running %s Tests %s\n", dev_id, printable, in runtest()
49 if (test_fn(dev_id, vchan) < 0) in runtest()
52 rte_dma_stats_get(dev_id, 0, &stats); in runtest()
67 await_hw(int16_t dev_id, uint16_t vchan) in await_hw() argument
71 if (rte_dma_vchan_status(dev_id, vchan, &st) < 0) { in await_hw()
81 rte_dma_vchan_status(dev_id, vchan, &st); in await_hw()
87 do_multi_copies(int16_t dev_id, uint16_t vchan, in do_multi_copies() argument
102 rte_dma_submit(dev_id, vchan); in do_multi_copies()
113 if (rte_dma_copy(dev_id, vchan, srcs[i]->buf_iova + srcs[i]->data_off, in do_multi_copies()
117 rte_dma_submit(dev_id, vchan); in do_multi_copies()
119 await_hw(dev_id, vchan); in do_multi_copies()
124 int ret = rte_dma_completed(dev_id, vchan, half_len, NULL, &dma_err); in do_multi_copies()
129 ret = rte_dma_completed(dev_id, vchan, half_len, NULL, &dma_err); in do_multi_copies()
138 int n = rte_dma_completed(dev_id, vchan, RTE_DIM(srcs), NULL, &dma_err); in do_multi_copies()
143 int n = rte_dma_completed_status(dev_id, vchan, RTE_DIM(srcs), NULL, sc); in do_multi_copies()
157 rte_dma_completed_status(dev_id, vchan, RTE_DIM(srcs), NULL, sc) : in do_multi_copies()
158 rte_dma_completed(dev_id, vchan, RTE_DIM(srcs), NULL, &dma_err); in do_multi_copies()
178 test_enqueue_copies(int16_t dev_id, uint16_t vchan) in test_enqueue_copies() argument
196 id = rte_dma_copy(dev_id, vchan, rte_pktmbuf_iova(src), rte_pktmbuf_iova(dst), in test_enqueue_copies()
203 await_hw(dev_id, vchan); in test_enqueue_copies()
211 if (rte_dma_completed(dev_id, vchan, 1, &id, NULL) != 1) in test_enqueue_copies()
222 if (rte_dma_completed(dev_id, 0, 1, NULL, NULL) != 0) in test_enqueue_copies()
246 if (rte_dma_copy(dev_id, vchan, in test_enqueue_copies()
252 await_hw(dev_id, vchan); in test_enqueue_copies()
254 count = rte_dma_completed(dev_id, vchan, max_ops * 2, &id, NULL); in test_enqueue_copies()
272 return do_multi_copies(dev_id, vchan, 0, 0, 0) /* enqueue and complete 1 batch at a time */ in test_enqueue_copies()
274 || do_multi_copies(dev_id, vchan, 1, 0, 0) in test_enqueue_copies()
276 || do_multi_copies(dev_id, vchan, 0, 1, 0) in test_enqueue_copies()
278 || do_multi_copies(dev_id, vchan, 0, 0, 1); in test_enqueue_copies()
286 test_failure_in_full_burst(int16_t dev_id, uint16_t vchan, bool fence, in test_failure_in_full_burst() argument
299 rte_dma_stats_get(dev_id, vchan, &baseline); /* get a baseline set of stats */ in test_failure_in_full_burst()
301 int id = rte_dma_copy(dev_id, vchan, in test_failure_in_full_burst()
310 rte_dma_submit(dev_id, vchan); in test_failure_in_full_burst()
311 rte_dma_stats_get(dev_id, vchan, &stats); in test_failure_in_full_burst()
316 await_hw(dev_id, vchan); in test_failure_in_full_burst()
318 count = rte_dma_completed(dev_id, vchan, COMP_BURST_SZ, &idx, &error); in test_failure_in_full_burst()
331 if (rte_dma_completed(dev_id, vchan, COMP_BURST_SZ, &idx, &error) != 0 in test_failure_in_full_burst()
336 status_count = rte_dma_completed_status(dev_id, vchan, COMP_BURST_SZ, in test_failure_in_full_burst()
343 await_hw(dev_id, vchan); in test_failure_in_full_burst()
344 status_count += rte_dma_completed_status(dev_id, vchan, COMP_BURST_SZ - 1, in test_failure_in_full_burst()
365 rte_dma_stats_get(dev_id, vchan, &stats); in test_failure_in_full_burst()
379 test_individual_status_query_with_failure(int16_t dev_id, uint16_t vchan, bool fence, in test_individual_status_query_with_failure() argument
391 int id = rte_dma_copy(dev_id, vchan, in test_individual_status_query_with_failure()
400 rte_dma_submit(dev_id, vchan); in test_individual_status_query_with_failure()
401 await_hw(dev_id, vchan); in test_individual_status_query_with_failure()
405 uint16_t n = rte_dma_completed(dev_id, vchan, 1, &idx, &error); in test_individual_status_query_with_failure()
419 uint16_t n = rte_dma_completed_status(dev_id, vchan, 1, &idx, in test_individual_status_query_with_failure()
421 await_hw(dev_id, vchan); /* allow delay to ensure jobs are completed */ in test_individual_status_query_with_failure()
440 test_single_item_status_query_with_failure(int16_t dev_id, uint16_t vchan, in test_single_item_status_query_with_failure() argument
454 int id = rte_dma_copy(dev_id, vchan, in test_single_item_status_query_with_failure()
463 rte_dma_submit(dev_id, vchan); in test_single_item_status_query_with_failure()
464 await_hw(dev_id, vchan); in test_single_item_status_query_with_failure()
467 count = rte_dma_completed(dev_id, vchan, COMP_BURST_SZ, &idx, &error); in test_single_item_status_query_with_failure()
479 status_count = rte_dma_completed_status(dev_id, vchan, 1, &idx, &status); in test_single_item_status_query_with_failure()
488 await_hw(dev_id, vchan); in test_single_item_status_query_with_failure()
491 count2 = rte_dma_completed(dev_id, vchan, COMP_BURST_SZ, &idx, &error); in test_single_item_status_query_with_failure()
503 test_multi_failure(int16_t dev_id, uint16_t vchan, struct rte_mbuf **srcs, struct rte_mbuf **dsts, in test_multi_failure() argument
520 int id = rte_dma_copy(dev_id, vchan, in test_multi_failure()
526 rte_dma_submit(dev_id, vchan); in test_multi_failure()
527 await_hw(dev_id, vchan); in test_multi_failure()
529 count = rte_dma_completed_status(dev_id, vchan, COMP_BURST_SZ, NULL, status); in test_multi_failure()
531 await_hw(dev_id, vchan); in test_multi_failure()
533 uint16_t ret = rte_dma_completed_status(dev_id, vchan, COMP_BURST_SZ - count, in test_multi_failure()
556 int id = rte_dma_copy(dev_id, vchan, in test_multi_failure()
562 rte_dma_submit(dev_id, vchan); in test_multi_failure()
563 await_hw(dev_id, vchan); in test_multi_failure()
568 count += rte_dma_completed(dev_id, vchan, COMP_BURST_SZ, NULL, &error); in test_multi_failure()
570 uint16_t ret = rte_dma_completed_status(dev_id, vchan, 1, in test_multi_failure()
575 await_hw(dev_id, vchan); in test_multi_failure()
586 test_completion_status(int16_t dev_id, uint16_t vchan, bool fence) in test_completion_status() argument
598 if (test_failure_in_full_burst(dev_id, vchan, fence, srcs, dsts, fail[i]) < 0) in test_completion_status()
601 if (test_individual_status_query_with_failure(dev_id, vchan, fence, in test_completion_status()
606 if (test_single_item_status_query_with_failure(dev_id, vchan, in test_completion_status()
611 if (test_multi_failure(dev_id, vchan, srcs, dsts, fail, RTE_DIM(fail)) < 0) in test_completion_status()
622 test_completion_handling(int16_t dev_id, uint16_t vchan) in test_completion_handling() argument
624 return test_completion_status(dev_id, vchan, false) /* without fences */ in test_completion_handling()
625 || test_completion_status(dev_id, vchan, true); /* with fences */ in test_completion_handling()
629 test_enqueue_fill(int16_t dev_id, uint16_t vchan) in test_enqueue_fill() argument
647 int id = rte_dma_fill(dev_id, vchan, pattern, in test_enqueue_fill()
651 await_hw(dev_id, vchan); in test_enqueue_fill()
653 if (rte_dma_completed(dev_id, vchan, 1, NULL, NULL) != 1) in test_enqueue_fill()
674 test_burst_capacity(int16_t dev_id, uint16_t vchan) in test_burst_capacity() argument
677 const int ring_space = rte_dma_burst_capacity(dev_id, vchan); in test_burst_capacity()
695 cap = rte_dma_burst_capacity(dev_id, vchan); in test_burst_capacity()
698 ret = rte_dma_copy(dev_id, vchan, rte_pktmbuf_iova(src), in test_burst_capacity()
703 if (rte_dma_burst_capacity(dev_id, vchan) != cap - (j + 1)) in test_burst_capacity()
706 if (rte_dma_submit(dev_id, vchan) < 0) in test_burst_capacity()
709 if (cap < rte_dma_burst_capacity(dev_id, vchan)) in test_burst_capacity()
712 await_hw(dev_id, vchan); in test_burst_capacity()
715 ret = rte_dma_completed(dev_id, vchan, in test_burst_capacity()
720 rte_dma_completed_status(dev_id, vchan, 1, NULL, &status); in test_burst_capacity()
725 cap = rte_dma_burst_capacity(dev_id, vchan); in test_burst_capacity()
738 test_dmadev_instance(int16_t dev_id) in test_dmadev_instance() argument
752 ret = rte_dma_info_get(dev_id, &info); in test_dmadev_instance()
757 dev_id, info.dev_name); in test_dmadev_instance()
760 ERR_RETURN("Error, no channels available on device id %u\n", dev_id); in test_dmadev_instance()
762 if (rte_dma_configure(dev_id, &conf) != 0) in test_dmadev_instance()
765 if (rte_dma_vchan_setup(dev_id, vchan, &qconf) < 0) in test_dmadev_instance()
768 ret = rte_dma_info_get(dev_id, &info); in test_dmadev_instance()
770 ERR_RETURN("Error, no configured queues reported on device id %u\n", dev_id); in test_dmadev_instance()
772 if (rte_dma_start(dev_id) != 0) in test_dmadev_instance()
775 if (rte_dma_stats_get(dev_id, vchan, &stats) != 0) in test_dmadev_instance()
778 if (rte_dma_burst_capacity(dev_id, vchan) < 32) in test_dmadev_instance()
798 if (runtest("copy", test_enqueue_copies, 640, dev_id, vchan, CHECK_ERRS) < 0) in test_dmadev_instance()
802 if (rte_dma_burst_capacity(dev_id, vchan) < 64) in test_dmadev_instance()
804 dev_id); in test_dmadev_instance()
805 else if (runtest("burst capacity", test_burst_capacity, 1, dev_id, vchan, CHECK_ERRS) < 0) in test_dmadev_instance()
813 printf("DMA Dev %u: DPDK not in VA mode, skipping error handling tests\n", dev_id); in test_dmadev_instance()
816 dev_id); in test_dmadev_instance()
818 dev_id, vchan, !CHECK_ERRS) < 0) in test_dmadev_instance()
822 printf("DMA Dev %u: No device fill support, skipping fill tests\n", dev_id); in test_dmadev_instance()
823 else if (runtest("fill", test_enqueue_fill, 1, dev_id, vchan, CHECK_ERRS) < 0) in test_dmadev_instance()
827 rte_dma_stop(dev_id); in test_dmadev_instance()
828 rte_dma_stats_reset(dev_id, vchan); in test_dmadev_instance()
833 rte_dma_stop(dev_id); in test_dmadev_instance()