| /f-stack/dpdk/drivers/crypto/bcmfs/ |
| H A D | bcmfs_qp.c | 109 queue_size_bytes = qp_conf->nb_descriptors * in bcmfs_queue_create() 201 uint32_t nb_descriptors = qp_conf->nb_descriptors; in bcmfs_qp_setup() local 205 if (nb_descriptors < FS_RM_MIN_REQS) { in bcmfs_qp_setup() 207 nb_descriptors); in bcmfs_qp_setup() 211 if (nb_descriptors > FS_RM_MAX_REQS) in bcmfs_qp_setup() 212 nb_descriptors = FS_RM_MAX_REQS; in bcmfs_qp_setup() 229 qp->nb_descriptors = nb_descriptors; in bcmfs_qp_setup() 252 bmp_size = rte_bitmap_get_memory_footprint(nb_descriptors); in bcmfs_qp_setup() 263 qp->ctx_bmp = rte_bitmap_init(nb_descriptors, qp->ctx_bmp_mem, in bcmfs_qp_setup() 271 for (i = 0; i < nb_descriptors; i++) in bcmfs_qp_setup() [all …]
|
| H A D | bcmfs_qp.h | 51 uint16_t nb_descriptors; member 89 uint32_t nb_descriptors; member
|
| H A D | bcmfs_sym_pmd.c | 199 bcmfs_qp_conf.nb_descriptors = qp_conf->nb_descriptors; in bcmfs_sym_qp_setup() 211 qp->sr_mp = bcmfs_sym_req_pool_create(cdev, qp_conf->nb_descriptors, in bcmfs_sym_qp_setup() 264 if (nb_ops > (qp->nb_descriptors - qp->nb_pending_requests)) in bcmfs_sym_pmd_enqueue_op_burst() 265 nb_ops = qp->nb_descriptors - qp->nb_pending_requests; in bcmfs_sym_pmd_enqueue_op_burst()
|
| /f-stack/dpdk/drivers/common/qat/ |
| H A D | qat_qp.c | 204 if ((qat_qp_conf->nb_descriptors > ADF_MAX_DESC) || in qat_qp_setup() 205 (qat_qp_conf->nb_descriptors < ADF_MIN_DESC)) { in qat_qp_setup() 207 qat_qp_conf->nb_descriptors); in qat_qp_setup() 225 qp->nb_descriptors = qat_qp_conf->nb_descriptors; in qat_qp_setup() 227 qat_qp_conf->nb_descriptors * sizeof(*qp->op_cookies), in qat_qp_setup() 275 qp->nb_descriptors, in qat_qp_setup() 286 for (i = 0; i < qp->nb_descriptors; i++) { in qat_qp_setup() 336 for (i = 0; i < qp->nb_descriptors; i++) in qat_qp_release() 424 if (adf_verify_queue_size(desc_size, qp_conf->nb_descriptors, in qat_queue_create() 456 qp_conf->nb_descriptors, desc_size, in qat_queue_create() [all …]
|
| H A D | qat_qp.h | 34 uint32_t nb_descriptors; member 69 uint32_t nb_descriptors; member
|
| /f-stack/dpdk/drivers/crypto/nitrox/ |
| H A D | nitrox_qp.c | 72 uint32_t nb_descriptors, uint8_t instr_size, int socket_id) in nitrox_qp_setup() argument 77 count = rte_align32pow2(nb_descriptors); in nitrox_qp_setup()
|
| H A D | nitrox_qp.h | 100 const char *dev_name, uint32_t nb_descriptors,
|
| /f-stack/dpdk/app/test-crypto-perf/ |
| H A D | cperf_test_pmd_cyclecount.c | 106 options->nb_descriptors; in cperf_pmd_cyclecount_test_constructor() 156 RTE_MIN(state->opts->nb_descriptors, iter_ops_left); in pmd_cyclecount_bench_ops() 320 cur_op += state->opts->nb_descriptors) { in pmd_cyclecount_bench_burst_sz() 344 state->opts->nb_descriptors, iter_ops_left); in pmd_cyclecount_bench_burst_sz()
|
| H A D | cperf_options.h | 97 uint32_t nb_descriptors; member
|
| H A D | cperf_options_parsing.c | 402 int ret = parse_uint32_t(&opts->nb_descriptors, arg); in parse_desc_nb() 409 if (opts->nb_descriptors == 0) { in parse_desc_nb() 864 opts->nb_descriptors = 2048; in cperf_options_default() 1179 options->pool_sz < options->nb_descriptors) { in cperf_options_check()
|
| H A D | main.c | 221 .nb_descriptors = opts->nb_descriptors in cperf_initialize_cryptodev()
|
| /f-stack/dpdk/drivers/crypto/qat/ |
| H A D | qat_asym_pmd.c | 149 qat_qp_conf.nb_descriptors = qp_conf->nb_descriptors; in qat_asym_qp_setup() 164 for (i = 0; i < qp->nb_descriptors; i++) { in qat_asym_qp_setup()
|
| H A D | qat_sym_pmd.c | 185 qat_qp_conf.nb_descriptors = qp_conf->nb_descriptors; in qat_sym_qp_setup() 200 for (i = 0; i < qp->nb_descriptors; i++) { in qat_sym_qp_setup()
|
| /f-stack/dpdk/lib/librte_cryptodev/ |
| H A D | rte_cryptodev_trace.h | 57 rte_trace_point_emit_u32(conf->nb_descriptors);
|
| /f-stack/dpdk/examples/ip_pipeline/ |
| H A D | cryptodev.c | 102 queue_conf.nb_descriptors = params->queue_size; in cryptodev_create()
|
| /f-stack/dpdk/drivers/net/softnic/ |
| H A D | rte_eth_softnic_cryptodev.c | 111 queue_conf.nb_descriptors = params->queue_size; in softnic_cryptodev_create()
|
| /f-stack/dpdk/drivers/compress/qat/ |
| H A D | qat_comp_pmd.c | 85 for (i = 0; i < qp->nb_descriptors; i++) { in qat_comp_qp_release() 128 qat_qp_conf.nb_descriptors = max_inflight_ops; in qat_comp_qp_setup() 143 for (i = 0; i < qp->nb_descriptors; i++) { in qat_comp_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/ccp/ |
| H A D | ccp_pmd_ops.c | 723 qp_conf->nb_descriptors, socket_id); in ccp_pmd_qp_setup() 735 qp_conf->nb_descriptors, in ccp_pmd_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/armv8/ |
| H A D | rte_armv8_pmd_ops.c | 242 qp_conf->nb_descriptors, socket_id); in armv8_crypto_pmd_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/kasumi/ |
| H A D | rte_kasumi_pmd_ops.c | 217 qp_conf->nb_descriptors, socket_id); in kasumi_pmd_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/zuc/ |
| H A D | rte_zuc_pmd_ops.c | 223 qp_conf->nb_descriptors, socket_id); in zuc_pmd_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/snow3g/ |
| H A D | rte_snow3g_pmd_ops.c | 223 qp_conf->nb_descriptors, socket_id); in snow3g_pmd_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/null/ |
| H A D | null_crypto_pmd_ops.c | 229 qp_conf->nb_descriptors, socket_id); in null_crypto_pmd_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/aesni_gcm/ |
| H A D | aesni_gcm_pmd_ops.c | 233 qp_conf->nb_descriptors, socket_id); in aesni_gcm_pmd_qp_setup()
|
| /f-stack/dpdk/drivers/crypto/octeontx/ |
| H A D | otx_cryptodev_ops.c | 148 if (qp_conf->nb_descriptors > DEFAULT_CMD_QLEN) { in otx_cpt_que_pair_setup() 150 "queue length of %d", qp_conf->nb_descriptors, in otx_cpt_que_pair_setup()
|