Home
last modified time | relevance | path

Searched refs:rte_crypto_op (Results 1 – 25 of 97) sorted by relevance

1234

/f-stack/dpdk/lib/librte_cryptodev/
H A Drte_crypto.h78 struct rte_crypto_op { struct
220 struct rte_crypto_op **ops, uint16_t nb_ops) in __rte_crypto_op_raw_bulk_alloc()
245 static inline struct rte_crypto_op *
248 struct rte_crypto_op *op = NULL; in rte_crypto_op_alloc()
278 struct rte_crypto_op **ops, uint16_t nb_ops) in rte_crypto_op_bulk_alloc()
334 rte_crypto_op_free(struct rte_crypto_op *op) in rte_crypto_op_free()
351 static inline struct rte_crypto_op *
361 if (unlikely(m->priv_size < (sizeof(struct rte_crypto_op) + in rte_crypto_sym_op_alloc_from_mbuf_priv_data()
366 struct rte_crypto_op *op = (struct rte_crypto_op *)(m + 1); in rte_crypto_sym_op_alloc_from_mbuf_priv_data()
412 rte_crypto_op_attach_sym_session(struct rte_crypto_op *op, in rte_crypto_op_attach_sym_session()
[all …]
/f-stack/dpdk/examples/ipsec-secgw/
H A Desp.h12 struct rte_crypto_op *cop);
16 struct rte_crypto_op *cop);
20 struct rte_crypto_op *cop);
24 struct rte_crypto_op *cop);
H A Dipsec_process.c24 free_cops(struct rte_crypto_op *cop[], uint32_t n) in free_cops()
34 enqueue_cop_bulk(struct cdev_qp *cqp, struct rte_crypto_op *cop[], uint32_t num) in enqueue_cop_bulk()
186 struct rte_crypto_op *cop[cnt]; in ipsec_prepare_crypto_group()
333 cqp_dequeue(struct cdev_qp *cqp, struct rte_crypto_op *cop[], uint32_t num) in cqp_dequeue()
348 ctx_dequeue(struct ipsec_ctx *ctx, struct rte_crypto_op *cop[], uint32_t num) in ctx_dequeue()
375 struct rte_crypto_op *cop[RTE_DIM(trf->ipsec.pkts)]; in ipsec_cqp_process()
389 ng = rte_ipsec_pkt_crypto_group((const struct rte_crypto_op **) in ipsec_cqp_process()
/f-stack/dpdk/app/test/
H A Dtest_cryptodev_asym_util.h11 struct rte_crypto_op *result_op) in rsa_verify()
21 struct rte_crypto_op *result_op) in verify_modinv()
30 struct rte_crypto_op *result_op) in verify_modexp()
39 uint8_t *sign_s, struct rte_crypto_op *result_op) in verify_ecdsa_sign()
50 struct rte_crypto_op *result_op) in verify_ecpm()
/f-stack/dpdk/drivers/crypto/qat/
H A Dqat_sym.h82 struct rte_crypto_op *op) in qat_bpicipher_postprocess()
140 qat_crc_verify(struct qat_sym_session *ctx, struct rte_crypto_op *op) in qat_crc_verify()
164 struct rte_crypto_op *op) in qat_crc_generate()
186 struct rte_crypto_op *op; in qat_sym_preprocess_requests()
191 op = (struct rte_crypto_op *)ops[i]; in qat_sym_preprocess_requests()
219 struct rte_crypto_op *rx_op = (struct rte_crypto_op *)(uintptr_t) in qat_sym_process_response()
H A Dqat_asym_pmd.h35 qat_asym_pmd_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
39 qat_asym_pmd_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
/f-stack/dpdk/app/test-crypto-perf/
H A Dcperf_test_pmd_cyclecount.c27 struct rte_crypto_op **ops;
28 struct rte_crypto_op **ops_processed;
57 sizeof(struct rte_crypto_op) + sizeof(struct rte_crypto_sym_op);
105 size_t alloc_sz = sizeof(struct rte_crypto_op *) * in cperf_pmd_cyclecount_test_constructor()
120 uint16_t iv_offset = sizeof(struct rte_crypto_op) + in cperf_pmd_cyclecount_test_constructor()
164 struct rte_crypto_op **ops = &state->ctx->ops[cur_iter_op]; in pmd_cyclecount_bench_ops()
215 struct rte_crypto_op **ops = &state->ctx->ops[cur_iter_op]; in pmd_cyclecount_build_ops()
250 struct rte_crypto_op **ops = &state->ctx->ops[cur_iter_op]; in pmd_cyclecount_bench_enq()
280 struct rte_crypto_op **ops_processed = in pmd_cyclecount_bench_deq()
H A Dcperf_test_throughput.c81 uint16_t iv_offset = sizeof(struct rte_crypto_op) + in cperf_throughput_test_constructor()
111 struct rte_crypto_op *ops[ctx->options->max_burst_size]; in cperf_throughput_test_runner()
112 struct rte_crypto_op *ops_processed[ctx->options->max_burst_size]; in cperf_throughput_test_runner()
142 uint16_t iv_offset = sizeof(struct rte_crypto_op) + in cperf_throughput_test_runner()
190 struct rte_crypto_op *); in cperf_throughput_test_runner()
/f-stack/dpdk/lib/librte_port/
H A Drte_port_sym_crypto.c33 struct rte_crypto_op *ops[RTE_PORT_IN_BURST_SIZE_MAX];
79 struct rte_crypto_op *op = p->ops[i]; in rte_port_sym_crypto_reader_rx()
147 struct rte_crypto_op *tx_buf[2 * RTE_PORT_IN_BURST_SIZE_MAX];
216 p->tx_buf[p->tx_buf_count++] = (struct rte_crypto_op *) in rte_port_sym_crypto_writer_tx()
244 p->tx_buf[p->tx_buf_count++] = (struct rte_crypto_op *) in rte_port_sym_crypto_writer_tx_bulk()
256 p->tx_buf[tx_buf_count++] = (struct rte_crypto_op *) in rte_port_sym_crypto_writer_tx_bulk()
333 struct rte_crypto_op *tx_buf[2 * RTE_PORT_IN_BURST_SIZE_MAX];
427 p->tx_buf[p->tx_buf_count++] = (struct rte_crypto_op *) in rte_port_sym_crypto_writer_nodrop_tx()
456 p->tx_buf[p->tx_buf_count++] = (struct rte_crypto_op *) in rte_port_sym_crypto_writer_nodrop_tx_bulk()
468 p->tx_buf[tx_buf_count++] = (struct rte_crypto_op *) in rte_port_sym_crypto_writer_nodrop_tx_bulk()
/f-stack/dpdk/drivers/crypto/scheduler/
H A Dscheduler_failover.c25 struct rte_crypto_op **ops, uint16_t nb_ops) in failover_worker_enqueue()
40 schedule_enqueue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_enqueue()
63 schedule_enqueue_ordering(void *qp, struct rte_crypto_op **ops, in schedule_enqueue_ordering()
79 schedule_dequeue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_dequeue()
111 schedule_dequeue_ordering(void *qp, struct rte_crypto_op **ops, in schedule_dequeue_ordering()
H A Dscheduler_pmd_private.h77 struct rte_crypto_op **ops, uint16_t nb_ops) in scheduler_order_insert()
84 struct rte_crypto_op **ops, uint16_t nb_ops) in scheduler_order_drain()
86 struct rte_crypto_op *op; in scheduler_order_drain()
H A Dscheduler_multicore.c39 schedule_enqueue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_enqueue()
67 schedule_enqueue_ordering(void *qp, struct rte_crypto_op **ops, in schedule_enqueue_ordering()
84 schedule_dequeue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_dequeue()
110 schedule_dequeue_ordering(void *qp, struct rte_crypto_op **ops, in schedule_dequeue_ordering()
115 struct rte_crypto_op *op; in schedule_dequeue_ordering()
158 struct rte_crypto_op *enq_ops[MC_SCHED_BUFFER_SIZE]; in mc_scheduler_worker()
159 struct rte_crypto_op *deq_ops[MC_SCHED_BUFFER_SIZE]; in mc_scheduler_worker()
H A Dscheduler_roundrobin.c20 schedule_enqueue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_enqueue()
46 schedule_enqueue_ordering(void *qp, struct rte_crypto_op **ops, in schedule_enqueue_ordering()
63 schedule_dequeue(void *qp, struct rte_crypto_op **ops, uint16_t nb_ops) in schedule_dequeue()
101 schedule_dequeue_ordering(void *qp, struct rte_crypto_op **ops, in schedule_dequeue_ordering()
/f-stack/dpdk/drivers/crypto/ccp/
H A Dccp_pmd_private.h50 struct rte_crypto_op *op[CCP_MAX_BURST];
104 struct rte_crypto_op **ops,
108 struct rte_crypto_op **ops,
/f-stack/dpdk/drivers/crypto/nitrox/
H A Dnitrox_sym_reqmgr.h13 int nitrox_process_se_req(uint16_t qno, struct rte_crypto_op *op,
16 int nitrox_check_se_req(struct nitrox_softreq *sr, struct rte_crypto_op **op);
/f-stack/dpdk/drivers/crypto/kasumi/
H A Drte_kasumi_pmd.c131 kasumi_get_session(struct kasumi_qp *qp, struct rte_crypto_op *op) in kasumi_get_session()
173 process_kasumi_cipher_op(struct kasumi_qp *qp, struct rte_crypto_op **ops, in process_kasumi_cipher_op()
209 process_kasumi_cipher_op_bit(struct kasumi_qp *qp, struct rte_crypto_op *op, in process_kasumi_cipher_op_bit()
236 process_kasumi_hash_op(struct kasumi_qp *qp, struct rte_crypto_op **ops, in process_kasumi_hash_op()
286 process_ops(struct rte_crypto_op **ops, struct kasumi_session *session, in process_ops()
346 process_op_bit(struct rte_crypto_op *op, struct kasumi_session *session, in process_op_bit()
397 kasumi_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops, in kasumi_pmd_enqueue_burst()
400 struct rte_crypto_op *c_ops[nb_ops]; in kasumi_pmd_enqueue_burst()
401 struct rte_crypto_op *curr_c_op; in kasumi_pmd_enqueue_burst()
514 struct rte_crypto_op **c_ops, uint16_t nb_ops) in kasumi_pmd_dequeue_burst()
/f-stack/dpdk/lib/librte_vhost/
H A Drte_vhost_crypto.h108 struct rte_crypto_op **ops, uint16_t nb_ops);
129 rte_vhost_crypto_finalize_requests(struct rte_crypto_op **ops,
/f-stack/dpdk/drivers/crypto/snow3g/
H A Drte_snow3g_pmd.c141 snow3g_get_session(struct snow3g_qp *qp, struct rte_crypto_op *op) in snow3g_get_session()
184 process_snow3g_cipher_op(struct snow3g_qp *qp, struct rte_crypto_op **ops, in process_snow3g_cipher_op()
219 struct rte_crypto_op *op, in process_snow3g_cipher_op_bit()
246 process_snow3g_hash_op(struct snow3g_qp *qp, struct rte_crypto_op **ops, in process_snow3g_hash_op()
296 process_ops(struct rte_crypto_op **ops, struct snow3g_session *session, in process_ops()
371 process_op_bit(struct rte_crypto_op *op, struct snow3g_session *session, in process_op_bit()
422 snow3g_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops, in snow3g_pmd_enqueue_burst()
425 struct rte_crypto_op *c_ops[SNOW3G_MAX_BURST]; in snow3g_pmd_enqueue_burst()
426 struct rte_crypto_op *curr_c_op; in snow3g_pmd_enqueue_burst()
526 struct rte_crypto_op **c_ops, uint16_t nb_ops) in snow3g_pmd_dequeue_burst()
/f-stack/dpdk/drivers/crypto/virtio/
H A Dvirtio_cryptodev.h59 struct rte_crypto_op **tx_pkts,
63 struct rte_crypto_op **tx_pkts,
/f-stack/dpdk/drivers/crypto/octeontx/
H A Dotx_cryptodev_ops.c446 struct rte_crypto_op *op, in otx_cpt_enq_single_asym()
527 struct rte_crypto_op *op, in otx_cpt_enq_single_sym()
571 struct rte_crypto_op *op, in otx_cpt_enq_single_sym_sessless()
610 struct rte_crypto_op *op, in otx_cpt_enq_single()
634 otx_cpt_pkt_enqueue(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops, in otx_cpt_pkt_enqueue()
668 otx_cpt_enqueue_sym(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops) in otx_cpt_enqueue_sym()
674 otx_cpt_asym_rsa_op(struct rte_crypto_op *cop, struct cpt_request_info *req, in otx_cpt_asym_rsa_op()
762 otx_cpt_asym_post_process(struct rte_crypto_op *cop, in otx_cpt_asym_post_process()
794 otx_cpt_dequeue_post_process(struct rte_crypto_op *cop, uintptr_t *rsp, in otx_cpt_dequeue_post_process()
820 otx_cpt_pkt_dequeue(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops, in otx_cpt_pkt_dequeue()
[all …]
/f-stack/dpdk/drivers/crypto/null/
H A Dnull_crypto_pmd.c51 process_op(const struct null_crypto_qp *qp, struct rte_crypto_op *op, in process_op()
73 get_session(struct null_crypto_qp *qp, struct rte_crypto_op *op) in get_session()
112 null_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops, in null_crypto_pmd_enqueue_burst()
143 null_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops, in null_crypto_pmd_dequeue_burst()
/f-stack/dpdk/drivers/crypto/zuc/
H A Drte_zuc_pmd.c131 zuc_get_session(struct zuc_qp *qp, struct rte_crypto_op *op) in zuc_get_session()
173 process_zuc_cipher_op(struct zuc_qp *qp, struct rte_crypto_op **ops, in process_zuc_cipher_op()
235 process_zuc_hash_op(struct zuc_qp *qp, struct rte_crypto_op **ops, in process_zuc_hash_op()
299 process_ops(struct rte_crypto_op **ops, enum zuc_operation op_type, in process_ops()
359 zuc_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops, in zuc_pmd_enqueue_burst()
362 struct rte_crypto_op *c_ops[ZUC_MAX_BURST]; in zuc_pmd_enqueue_burst()
363 struct rte_crypto_op *curr_c_op; in zuc_pmd_enqueue_burst()
450 struct rte_crypto_op **c_ops, uint16_t nb_ops) in zuc_pmd_dequeue_burst()
/f-stack/dpdk/lib/librte_ipsec/
H A Drte_ipsec.h39 struct rte_crypto_op *cop[],
119 struct rte_mbuf *mb[], struct rte_crypto_op *cop[], uint16_t num) in rte_ipsec_pkt_crypto_prepare()
/f-stack/dpdk/drivers/crypto/bcmfs/
H A Dbcmfs_sym_session.h89 bcmfs_process_crypto_op(struct rte_crypto_op *op,
107 bcmfs_sym_get_session(struct rte_crypto_op *op);
/f-stack/dpdk/drivers/event/octeontx2/
H A Dotx2_evdev_crypto_adptr_dp.h22 struct rte_crypto_op *cop, uintptr_t *rsp, in otx2_ca_deq_post_process()
57 struct rte_crypto_op *cop; in otx2_handle_crypto_event()

1234