| /f-stack/dpdk/drivers/baseband/null/ |
| H A D | bbdev_null.c | 50 struct rte_ring *processed_pkts; /* Ring for processed packets */ member 90 rte_ring_free(q->processed_pkts); in q_release() 118 q->processed_pkts = rte_ring_create(ring_name, queue_conf->queue_size, in q_setup() 120 if (q->processed_pkts == NULL) { in q_setup() 146 uint16_t nb_enqueued = rte_ring_enqueue_burst(q->processed_pkts, in enqueue_dec_ops() 161 uint16_t nb_enqueued = rte_ring_enqueue_burst(q->processed_pkts, in enqueue_enc_ops() 176 uint16_t nb_dequeued = rte_ring_dequeue_burst(q->processed_pkts, in dequeue_dec_ops() 189 uint16_t nb_dequeued = rte_ring_dequeue_burst(q->processed_pkts, in dequeue_enc_ops()
|
| /f-stack/dpdk/app/test-eventdev/ |
| H A D | test_pipeline_atq.c | 32 w->processed_pkts++; in pipeline_atq_worker_single_stage_tx() 55 w->processed_pkts++; in pipeline_atq_worker_single_stage_fwd() 81 w->processed_pkts += nb_rx; in pipeline_atq_worker_single_stage_burst_tx() 110 w->processed_pkts += nb_rx; in pipeline_atq_worker_single_stage_burst_fwd() 133 w->processed_pkts++; in pipeline_atq_worker_multi_stage_tx() 164 w->processed_pkts++; in pipeline_atq_worker_multi_stage_fwd() 197 w->processed_pkts++; in pipeline_atq_worker_multi_stage_burst_tx() 231 w->processed_pkts++; in pipeline_atq_worker_multi_stage_burst_fwd()
|
| H A D | test_pipeline_queue.c | 33 w->processed_pkts++; in pipeline_queue_worker_single_stage_tx() 62 w->processed_pkts++; in pipeline_queue_worker_single_stage_fwd() 87 w->processed_pkts++; in pipeline_queue_worker_single_stage_burst_tx() 124 w->processed_pkts += nb_rx; in pipeline_queue_worker_single_stage_burst_fwd() 149 w->processed_pkts++; in pipeline_queue_worker_multi_stage_tx() 183 w->processed_pkts++; in pipeline_queue_worker_multi_stage_fwd() 217 w->processed_pkts++; in pipeline_queue_worker_multi_stage_burst_tx() 257 w->processed_pkts++; in pipeline_queue_worker_multi_stage_burst_fwd()
|
| H A D | test_pipeline_common.c | 18 total += t->worker[i].processed_pkts; in pipeline_test_result() 22 t->worker[i].processed_pkts, in pipeline_test_result() 23 (((double)t->worker[i].processed_pkts)/total) in pipeline_test_result() 42 processed_pkts(struct test_pipeline *t) in processed_pkts() function 49 total += t->worker[i].processed_pkts; in processed_pkts() 88 const uint64_t curr_pkts = processed_pkts(t); in pipeline_launch_lcores() 288 w->processed_pkts = 0; in pipeline_event_port_setup()
|
| H A D | test_perf_common.h | 29 uint64_t processed_pkts; member 100 w->processed_pkts++; in perf_process_last_stage() 119 w->processed_pkts++; in perf_process_last_stage_latency()
|
| H A D | test_perf_common.c | 17 total += t->worker[i].processed_pkts; in perf_test_result() 21 t->worker[i].processed_pkts, in perf_test_result() 22 (((double)t->worker[i].processed_pkts)/total) in perf_test_result() 222 processed_pkts(struct test_perf *t) in processed_pkts() function 229 total += t->worker[i].processed_pkts; in processed_pkts() 298 int64_t remaining = t->outstand_pkts - processed_pkts(t); in perf_launch_lcores() 305 const uint64_t pkts = processed_pkts(t); in perf_launch_lcores() 339 remaining = t->outstand_pkts - processed_pkts(t); in perf_launch_lcores() 478 w->processed_pkts = 0; in perf_event_dev_port_setup()
|
| H A D | test_pipeline_common.h | 33 uint64_t processed_pkts; member
|
| /f-stack/dpdk/drivers/compress/octeontx/ |
| H A D | otx_zip_pmd.c | 340 if (qp->processed_pkts) in zip_pmd_qp_release() 341 rte_ring_free(qp->processed_pkts); in zip_pmd_qp_release() 408 qp->processed_pkts = zip_pmd_qp_create_processed_pkts_ring(qp, in zip_pmd_qp_setup() 410 if (qp->processed_pkts == NULL) in zip_pmd_qp_setup() 426 if (qp->processed_pkts) in zip_pmd_qp_setup() 427 rte_ring_free(qp->processed_pkts); in zip_pmd_qp_setup() 508 ret = rte_ring_enqueue(qp->processed_pkts, (void *)op); in zip_pmd_enqueue_burst_sync() 529 nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts, in zip_pmd_dequeue_burst_sync()
|
| H A D | otx_zip.h | 132 struct rte_ring *processed_pkts; member
|
| /f-stack/dpdk/drivers/compress/zlib/ |
| H A D | zlib_pmd_ops.c | 132 rte_ring_free(qp->processed_pkts); in zlib_pmd_qp_release() 159 struct rte_ring *r = qp->processed_pkts; in zlib_pmd_qp_create_processed_pkts_ring() 200 qp->processed_pkts = zlib_pmd_qp_create_processed_pkts_ring(qp, in zlib_pmd_qp_setup() 202 if (qp->processed_pkts == NULL) in zlib_pmd_qp_setup()
|
| H A D | zlib_pmd_private.h | 34 struct rte_ring *processed_pkts; member
|
| H A D | zlib_pmd.c | 215 return rte_ring_enqueue(qp->processed_pkts, (void *)op); in process_zlib_op() 347 nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts, in zlib_pmd_dequeue_burst()
|
| /f-stack/dpdk/drivers/crypto/null/ |
| H A D | null_crypto_pmd_ops.c | 138 if (qp->processed_pkts) in null_crypto_pmd_qp_release() 139 rte_ring_free(qp->processed_pkts); in null_crypto_pmd_qp_release() 228 qp->processed_pkts = null_crypto_pmd_qp_create_processed_pkts_ring(qp, in null_crypto_pmd_qp_setup() 230 if (qp->processed_pkts == NULL) { in null_crypto_pmd_qp_setup()
|
| H A D | null_crypto_pmd_private.h | 30 struct rte_ring *processed_pkts; member
|
| H A D | null_crypto_pmd.c | 69 return rte_ring_enqueue(qp->processed_pkts, (void *)op); in process_op() 150 nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts, in null_crypto_pmd_dequeue_burst()
|
| /f-stack/dpdk/drivers/crypto/aesni_gcm/ |
| H A D | aesni_gcm_pmd_ops.c | 158 if (qp->processed_pkts) in aesni_gcm_pmd_qp_release() 159 rte_ring_free(qp->processed_pkts); in aesni_gcm_pmd_qp_release() 232 qp->processed_pkts = aesni_gcm_pmd_qp_create_processed_pkts_ring(qp, in aesni_gcm_pmd_qp_setup() 234 if (qp->processed_pkts == NULL) in aesni_gcm_pmd_qp_setup()
|
| H A D | aesni_gcm_pmd_private.h | 47 struct rte_ring *processed_pkts; member
|
| H A D | aesni_gcm_pmd.c | 735 nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts, in aesni_gcm_pmd_dequeue_burst() 770 nb_enqueued = rte_ring_enqueue_burst(qp->processed_pkts, in aesni_gcm_pmd_enqueue_burst()
|
| /f-stack/dpdk/drivers/compress/isal/ |
| H A D | isal_compress_pmd_ops.c | 178 rte_ring_free(qp->processed_pkts); in isal_comp_pmd_qp_release() 284 qp->processed_pkts = isal_comp_pmd_qp_create_processed_pkts_ring(qp, in isal_comp_pmd_qp_setup() 286 if (qp->processed_pkts == NULL) { in isal_comp_pmd_qp_setup() 292 qp->num_free_elements = rte_ring_free_count(qp->processed_pkts); in isal_comp_pmd_qp_setup()
|
| H A D | isal_compress_pmd_private.h | 28 struct rte_ring *processed_pkts; member
|
| H A D | isal_compress_pmd.c | 651 retval = rte_ring_enqueue_burst(qp->processed_pkts, (void *)ops, in isal_comp_pmd_enqueue_burst() 667 nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts, (void **)ops, in isal_comp_pmd_dequeue_burst()
|
| /f-stack/dpdk/drivers/crypto/ccp/ |
| H A D | ccp_pmd_private.h | 77 struct rte_ring *processed_pkts; member
|
| H A D | ccp_pmd_ops.c | 639 rte_ring_free(qp->processed_pkts); in ccp_pmd_qp_release() 722 qp->processed_pkts = ccp_pmd_qp_create_batch_info_ring(qp, in ccp_pmd_qp_setup() 724 if (qp->processed_pkts == NULL) { in ccp_pmd_qp_setup()
|
| H A D | rte_ccp_pmd.c | 209 if (unlikely(rte_ring_full(qp->processed_pkts) != 0)) in ccp_pmd_enqueue_burst()
|
| /f-stack/dpdk/drivers/baseband/turbo_sw/ |
| H A D | bbdev_turbo_software.c | 78 struct rte_ring *processed_pkts; member 265 rte_ring_free(q->processed_pkts); in q_release() 454 q->processed_pkts = rte_ring_create(name, queue_conf->queue_size, in q_setup() 456 if (q->processed_pkts == NULL) { in q_setup() 469 rte_ring_free(q->processed_pkts); in q_setup() 1166 return rte_ring_enqueue_burst(q->processed_pkts, (void **)ops, nb_ops, in enqueue_enc_all_ops() 1183 return rte_ring_enqueue_burst(q->processed_pkts, (void **)ops, nb_ops, in enqueue_ldpc_enc_all_ops() 1736 return rte_ring_enqueue_burst(q->processed_pkts, (void **)ops, nb_ops, in enqueue_dec_all_ops() 1753 return rte_ring_enqueue_burst(q->processed_pkts, (void **)ops, nb_ops, in enqueue_ldpc_dec_all_ops() 1833 uint16_t nb_dequeued = rte_ring_dequeue_burst(q->processed_pkts, in dequeue_dec_ops() [all …]
|