Home
last modified time | relevance | path

Searched refs:nb_ops (Results 1 – 25 of 72) sorted by relevance

123

/dpdk/drivers/crypto/scheduler/
H A Dscheduler_failover.c29 for (i = 0; i < nb_ops && i < 4; i++) in failover_worker_enqueue()
33 worker->qp_id, ops, nb_ops); in failover_worker_enqueue()
46 if (unlikely(nb_ops == 0)) in schedule_enqueue()
50 ops, nb_ops); in schedule_enqueue()
52 if (enqueued_ops < nb_ops) in schedule_enqueue()
56 nb_ops - enqueued_ops); in schedule_enqueue()
64 uint16_t nb_ops) in schedule_enqueue_ordering() argument
69 nb_ops); in schedule_enqueue_ordering()
90 worker->qp_id, ops, nb_ops); in schedule_dequeue()
96 if (nb_deq_ops == nb_ops) in schedule_dequeue()
[all …]
H A Dscheduler_pkt_size_distr.c53 if (unlikely(nb_ops == 0)) in schedule_enqueue()
61 for (i = 0; (i < (nb_ops - 8)) && (nb_ops > 8); i += 4) { in schedule_enqueue()
85 i = nb_ops; in schedule_enqueue()
99 i = nb_ops; in schedule_enqueue()
113 i = nb_ops; in schedule_enqueue()
127 i = nb_ops; in schedule_enqueue()
135 for (; i < nb_ops; i++) { in schedule_enqueue()
143 i = nb_ops; in schedule_enqueue()
175 uint16_t nb_ops) in schedule_enqueue_ordering() argument
180 nb_ops); in schedule_enqueue_ordering()
[all …]
H A Dscheduler_roundrobin.c20 schedule_enqueue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_enqueue() argument
28 if (unlikely(nb_ops == 0)) in schedule_enqueue()
31 for (i = 0; i < nb_ops && i < 4; i++) in schedule_enqueue()
35 worker->qp_id, ops, nb_ops); in schedule_enqueue()
47 uint16_t nb_ops) in schedule_enqueue_ordering() argument
52 nb_ops); in schedule_enqueue_ordering()
63 schedule_dequeue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_dequeue() argument
88 worker->qp_id, ops, nb_ops); in schedule_dequeue()
102 uint16_t nb_ops) in schedule_dequeue_ordering() argument
107 schedule_dequeue(qp, ops, nb_ops); in schedule_dequeue_ordering()
[all …]
H A Dscheduler_pmd_private.h68 get_max_enqueue_order_count(struct rte_ring *order_ring, uint16_t nb_ops) in get_max_enqueue_order_count() argument
72 return count > nb_ops ? nb_ops : count; in get_max_enqueue_order_count()
77 struct rte_crypto_op **ops, uint16_t nb_ops) in scheduler_order_insert() argument
79 rte_ring_sp_enqueue_burst(order_ring, (void **)ops, nb_ops, NULL); in scheduler_order_insert()
84 struct rte_crypto_op **ops, uint16_t nb_ops) in scheduler_order_drain() argument
90 nb_ops, NULL); in scheduler_order_drain()
H A Dscheduler_multicore.c47 if (unlikely(nb_ops == 0)) in schedule_enqueue()
50 for (i = 0; i < mc_ctx->num_workers && nb_ops != 0; i++) { in schedule_enqueue()
53 (void *)(&ops[processed_ops]), nb_ops, NULL); in schedule_enqueue()
55 nb_ops -= nb_queue_ops; in schedule_enqueue()
68 uint16_t nb_ops) in schedule_enqueue_ordering() argument
73 nb_ops); in schedule_enqueue_ordering()
92 for (i = 0; i < mc_ctx->num_workers && nb_ops != 0; i++) { in schedule_dequeue()
95 (void *)(&ops[processed_ops]), nb_ops, NULL); in schedule_dequeue()
97 nb_ops -= nb_deq_ops; in schedule_dequeue()
111 uint16_t nb_ops) in schedule_dequeue_ordering() argument
[all …]
/dpdk/lib/compressdev/
H A Drte_comp.c90 struct rte_comp_op **ops, uint16_t nb_ops) in rte_comp_op_raw_bulk_alloc() argument
92 if (rte_mempool_get_bulk(mempool, (void **)ops, nb_ops) == 0) in rte_comp_op_raw_bulk_alloc()
93 return nb_ops; in rte_comp_op_raw_bulk_alloc()
186 struct rte_comp_op **ops, uint16_t nb_ops) in rte_comp_op_bulk_alloc() argument
191 retval = rte_comp_op_raw_bulk_alloc(mempool, ops, nb_ops); in rte_comp_op_bulk_alloc()
192 if (unlikely(retval != nb_ops)) in rte_comp_op_bulk_alloc()
195 for (i = 0; i < nb_ops; i++) in rte_comp_op_bulk_alloc()
198 return nb_ops; in rte_comp_op_bulk_alloc()
217 rte_comp_op_bulk_free(struct rte_comp_op **ops, uint16_t nb_ops) in rte_comp_op_bulk_free() argument
221 for (i = 0; i < nb_ops; i++) { in rte_comp_op_bulk_free()
/dpdk/lib/cryptodev/
H A Drte_crypto.h239 struct rte_crypto_op **ops, uint16_t nb_ops) in __rte_crypto_op_raw_bulk_alloc() argument
248 if (rte_mempool_get_bulk(mempool, (void **)ops, nb_ops) == 0) in __rte_crypto_op_raw_bulk_alloc()
249 return nb_ops; in __rte_crypto_op_raw_bulk_alloc()
297 struct rte_crypto_op **ops, uint16_t nb_ops) in rte_crypto_op_bulk_alloc() argument
301 if (unlikely(__rte_crypto_op_raw_bulk_alloc(mempool, type, ops, nb_ops) in rte_crypto_op_bulk_alloc()
302 != nb_ops)) in rte_crypto_op_bulk_alloc()
305 for (i = 0; i < nb_ops; i++) in rte_crypto_op_bulk_alloc()
308 return nb_ops; in rte_crypto_op_bulk_alloc()
H A Drte_cryptodev_trace_fp.h17 uint16_t nb_ops),
21 rte_trace_point_emit_u16(nb_ops);
27 uint16_t nb_ops),
31 rte_trace_point_emit_u16(nb_ops);
H A Drte_cryptodev.h567 struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
1854 struct rte_crypto_op **ops, uint16_t nb_ops) in rte_cryptodev_dequeue_burst() argument
1859 rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops); in rte_cryptodev_dequeue_burst()
1864 nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops); in rte_cryptodev_dequeue_burst()
1882 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops, in rte_cryptodev_dequeue_burst()
1890 return nb_ops; in rte_cryptodev_dequeue_burst()
1926 struct rte_crypto_op **ops, uint16_t nb_ops) in rte_cryptodev_enqueue_burst() argument
1949 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops, in rte_cryptodev_enqueue_burst()
1958 rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops); in rte_cryptodev_enqueue_burst()
1959 return fp_ops->enqueue_burst(qp, ops, nb_ops); in rte_cryptodev_enqueue_burst()
H A Drte_cryptodev_core.h21 struct rte_crypto_op **ops, uint16_t nb_ops);
25 struct rte_crypto_op **ops, uint16_t nb_ops);
/dpdk/drivers/crypto/ccp/
H A Drte_ccp_pmd.c93 uint16_t nb_ops) in ccp_pmd_enqueue_burst() argument
100 uint16_t tmp_ops = nb_ops, b_idx, cur_ops = 0; in ccp_pmd_enqueue_burst()
102 if (nb_ops == 0) in ccp_pmd_enqueue_burst()
108 cur_ops = nb_ops / cryptodev_cnt + (nb_ops)%cryptodev_cnt; in ccp_pmd_enqueue_burst()
112 b_idx = nb_ops - tmp_ops; in ccp_pmd_enqueue_burst()
136 nb_ops, slots_req, b_idx); in ccp_pmd_enqueue_burst()
146 uint16_t nb_ops) in ccp_pmd_dequeue_burst() argument
151 nb_dequeued = process_ops_to_dequeue(qp, ops, nb_ops, &total_nb_ops); in ccp_pmd_dequeue_burst()
156 ops, nb_ops, &total_nb_ops); in ccp_pmd_dequeue_burst()
/dpdk/drivers/baseband/null/
H A Dbbdev_null.c149 struct rte_bbdev_dec_op **ops, uint16_t nb_ops) in enqueue_dec_ops() argument
153 (void **)ops, nb_ops, NULL); in enqueue_dec_ops()
155 q_data->queue_stats.enqueue_err_count += nb_ops - nb_enqueued; in enqueue_dec_ops()
164 struct rte_bbdev_enc_op **ops, uint16_t nb_ops) in enqueue_enc_ops() argument
168 (void **)ops, nb_ops, NULL); in enqueue_enc_ops()
170 q_data->queue_stats.enqueue_err_count += nb_ops - nb_enqueued; in enqueue_enc_ops()
179 struct rte_bbdev_dec_op **ops, uint16_t nb_ops) in dequeue_dec_ops() argument
183 (void **)ops, nb_ops, NULL); in dequeue_dec_ops()
192 struct rte_bbdev_enc_op **ops, uint16_t nb_ops) in dequeue_enc_ops() argument
196 (void **)ops, nb_ops, NULL); in dequeue_enc_ops()
/dpdk/drivers/crypto/bcmfs/
H A Dbcmfs_sym_pmd.c248 uint16_t nb_ops) in bcmfs_sym_pmd_enqueue_op_burst() argument
257 if (nb_ops == 0) in bcmfs_sym_pmd_enqueue_op_burst()
260 if (nb_ops > BCMFS_MAX_REQS_BUFF) in bcmfs_sym_pmd_enqueue_op_burst()
261 nb_ops = BCMFS_MAX_REQS_BUFF; in bcmfs_sym_pmd_enqueue_op_burst()
264 if (nb_ops > (qp->nb_descriptors - qp->nb_pending_requests)) in bcmfs_sym_pmd_enqueue_op_burst()
265 nb_ops = qp->nb_descriptors - qp->nb_pending_requests; in bcmfs_sym_pmd_enqueue_op_burst()
267 for (i = 0; i < nb_ops; i++) { in bcmfs_sym_pmd_enqueue_op_burst()
316 uint16_t nb_ops) in bcmfs_sym_pmd_dequeue_op_burst() argument
324 if (nb_ops > BCMFS_MAX_REQS_BUFF) in bcmfs_sym_pmd_dequeue_op_burst()
325 nb_ops = BCMFS_MAX_REQS_BUFF; in bcmfs_sym_pmd_dequeue_op_burst()
[all …]
H A Dbcmfs_qp.h119 uint16_t nb_ops);
127 bcmfs_enqueue_op_burst(void *qp, void **ops, uint16_t nb_ops);
129 bcmfs_dequeue_op_burst(void *qp, void **ops, uint16_t nb_ops);
/dpdk/drivers/crypto/cnxk/
H A Dcn10k_cryptodev_ops.c189 cn10k_cpt_enqueue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops) in cn10k_cpt_enqueue_burst() argument
207 nb_ops = RTE_MIN(nb_ops, nb_allowed); in cn10k_cpt_enqueue_burst()
209 if (unlikely(nb_ops == 0)) in cn10k_cpt_enqueue_burst()
219 for (i = 0; i < RTE_MIN(PKTS_PER_LOOP, nb_ops); i++) { in cn10k_cpt_enqueue_burst()
250 if (nb_ops - i > 0 && i == PKTS_PER_LOOP) { in cn10k_cpt_enqueue_burst()
251 nb_ops -= i; in cn10k_cpt_enqueue_burst()
497 cn10k_cpt_dequeue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops) in cn10k_cpt_dequeue_burst() argument
513 nb_ops = RTE_MIN(nb_ops, infl_cnt); in cn10k_cpt_dequeue_burst()
518 for (i = 0; i < nb_ops; i++) { in cn10k_cpt_dequeue_burst()
H A Dcn9k_cryptodev_ops.c213 cn9k_cpt_enqueue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops) in cn9k_cpt_enqueue_burst() argument
245 nb_ops = RTE_MIN(nb_ops, nb_allowed); in cn9k_cpt_enqueue_burst()
247 if (unlikely(nb_ops & 1)) { in cn9k_cpt_enqueue_burst()
267 while (count < nb_ops) { in cn9k_cpt_enqueue_burst()
519 cn9k_cpt_dequeue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops) in cn9k_cpt_dequeue_burst() argument
535 nb_ops = RTE_MIN(nb_ops, infl_cnt); in cn9k_cpt_dequeue_burst()
540 for (i = 0; i < nb_ops; i++) { in cn9k_cpt_dequeue_burst()
/dpdk/app/test-crypto-perf/
H A Dcperf_ops.c26 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_asym()
67 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_security()
147 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_security_ipsec()
173 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_security_ipsec()
199 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_null_cipher()
239 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_null_auth()
279 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_cipher()
312 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_cipher()
336 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_auth()
437 for (i = 0; i < nb_ops; i++) { in cperf_set_ops_cipher_auth()
[all …]
/dpdk/drivers/crypto/mvsam/
H A Drte_mrvl_pmd.c866 uint16_t nb_ops) in mrvl_crypto_pmd_enqueue_burst() argument
876 uint16_t indx_map_crp[nb_ops]; in mrvl_crypto_pmd_enqueue_burst()
877 uint16_t indx_map_sec[nb_ops]; in mrvl_crypto_pmd_enqueue_burst()
886 if (nb_ops == 0) in mrvl_crypto_pmd_enqueue_burst()
895 for (; iter_ops < nb_ops; ++iter_ops) { in mrvl_crypto_pmd_enqueue_burst()
996 uint16_t nb_ops) in mrvl_crypto_pmd_dequeue_burst() argument
1008 qp->stats.dequeue_err_count += nb_ops; in mrvl_crypto_pmd_dequeue_burst()
1011 qp->stats.dequeued_count += nb_ops; in mrvl_crypto_pmd_dequeue_burst()
1017 for (i = 0; i < nb_ops; ++i) { in mrvl_crypto_pmd_dequeue_burst()
1046 qp->stats.dequeued_count += nb_ops; in mrvl_crypto_pmd_dequeue_burst()
[all …]
/dpdk/drivers/baseband/turbo_sw/
H A Dbbdev_turbo_software.c1172 for (i = 0; i < nb_ops; ++i) in enqueue_enc_all_ops()
1189 for (i = 0; i < nb_ops; ++i) in enqueue_ldpc_enc_all_ops()
1750 for (i = 0; i < nb_ops; ++i) in enqueue_dec_all_ops()
1767 for (i = 0; i < nb_ops; ++i) in enqueue_ldpc_dec_all_ops()
1777 struct rte_bbdev_enc_op **ops, uint16_t nb_ops) in enqueue_enc_ops() argument
1794 struct rte_bbdev_enc_op **ops, uint16_t nb_ops) in enqueue_ldpc_enc_ops() argument
1801 q, ops, nb_ops, &q_data->queue_stats); in enqueue_ldpc_enc_ops()
1847 struct rte_bbdev_dec_op **ops, uint16_t nb_ops) in dequeue_dec_ops() argument
1851 (void **)ops, nb_ops, NULL); in dequeue_dec_ops()
1860 struct rte_bbdev_enc_op **ops, uint16_t nb_ops) in dequeue_enc_ops() argument
[all …]
/dpdk/drivers/crypto/octeontx/
H A Dotx_cryptodev_ops.c625 otx_cpt_pkt_enqueue(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops, in otx_cpt_pkt_enqueue() argument
636 if (nb_ops > free_slots) in otx_cpt_pkt_enqueue()
637 nb_ops = free_slots; in otx_cpt_pkt_enqueue()
640 while (likely(count < nb_ops)) { in otx_cpt_pkt_enqueue()
662 return otx_cpt_pkt_enqueue(qptr, ops, nb_ops, OP_TYPE_ASYM); in otx_cpt_enqueue_asym()
666 otx_cpt_enqueue_sym(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops) in otx_cpt_enqueue_sym() argument
668 return otx_cpt_pkt_enqueue(qptr, ops, nb_ops, OP_TYPE_SYM); in otx_cpt_enqueue_sym()
940 uint8_t cc[nb_ops]; in otx_cpt_pkt_dequeue()
951 count = (nb_ops > pcount) ? pcount : nb_ops; in otx_cpt_pkt_dequeue()
990 return otx_cpt_pkt_dequeue(qptr, ops, nb_ops, OP_TYPE_ASYM); in otx_cpt_dequeue_asym()
[all …]
/dpdk/drivers/regex/mlx5/
H A Dmlx5_regex_fastpath.c273 size_t nb_ops) in prep_regex_umr_wqe_set() argument
277 size_t left_ops = nb_ops; in prep_regex_umr_wqe_set()
286 left_ops = nb_ops; in prep_regex_umr_wqe_set()
371 struct rte_regex_ops **ops, uint16_t nb_ops) in mlx5_regexdev_enqueue_gga() argument
376 size_t hw_qpid, nb_left = nb_ops, nb_desc; in mlx5_regexdev_enqueue_gga()
397 nb_ops -= nb_left; in mlx5_regexdev_enqueue_gga()
398 queue->pi += nb_ops; in mlx5_regexdev_enqueue_gga()
399 return nb_ops; in mlx5_regexdev_enqueue_gga()
405 struct rte_regex_ops **ops, uint16_t nb_ops) in mlx5_regexdev_enqueue() argument
421 if (unlikely(i == nb_ops)) { in mlx5_regexdev_enqueue()
[all …]
H A Dmlx5_regex.h92 struct rte_regex_ops **ops, uint16_t nb_ops);
94 struct rte_regex_ops **ops, uint16_t nb_ops);
96 struct rte_regex_ops **ops, uint16_t nb_ops);
/dpdk/drivers/common/qat/
H A Dqat_qp.c551 void **ops, uint16_t nb_ops) in qat_enqueue_op_burst() argument
557 uint16_t nb_ops_possible = nb_ops; in qat_enqueue_op_burst()
561 if (unlikely(nb_ops == 0)) in qat_enqueue_op_burst()
584 if ((inflights + nb_ops) > tmp_qp->max_inflights) { in qat_enqueue_op_burst()
637 qat_enqueue_comp_op_burst(void *qp, void **ops, uint16_t nb_ops) in qat_enqueue_comp_op_burst() argument
643 uint16_t nb_ops_possible = nb_ops; in qat_enqueue_comp_op_burst()
651 if (unlikely(nb_ops == 0)) in qat_enqueue_comp_op_burst()
675 overflow = (inflights + nb_ops) - tmp_qp->max_inflights; in qat_enqueue_comp_op_burst()
677 nb_ops_possible = nb_ops - overflow; in qat_enqueue_comp_op_burst()
704 nb_ops, nb_remaining_descriptors); in qat_enqueue_comp_op_burst()
[all …]
/dpdk/lib/vhost/
H A Drte_vhost_crypto.h116 struct rte_crypto_op **ops, uint16_t nb_ops);
137 uint16_t nb_ops, int *callfds, uint16_t *nb_callfds);
/dpdk/drivers/crypto/null/
H A Dnull_crypto_pmd.c113 uint16_t nb_ops) in null_crypto_pmd_enqueue_burst() argument
120 for (i = 0; i < nb_ops; i++) { in null_crypto_pmd_enqueue_burst()
144 uint16_t nb_ops) in null_crypto_pmd_dequeue_burst() argument
151 (void **)ops, nb_ops, NULL); in null_crypto_pmd_dequeue_burst()

123