Lines Matching refs:sr

167 softreq_init(struct nitrox_softreq *sr, rte_iova_t iova)  in softreq_init()  argument
169 memset(sr, 0, sizeof(*sr)); in softreq_init()
170 sr->iova = iova; in softreq_init()
189 create_se_instr(struct nitrox_softreq *sr, uint8_t qno) in create_se_instr() argument
191 struct nitrox_crypto_ctx *ctx = sr->ctx; in create_se_instr()
196 sr->instr.dptr0 = rte_cpu_to_be_64(sr->dptr); in create_se_instr()
199 sr->instr.ih.value = 0; in create_se_instr()
200 sr->instr.ih.s.g = 1; in create_se_instr()
201 sr->instr.ih.s.gsz = sr->in.map_bufs_cnt; in create_se_instr()
202 sr->instr.ih.s.ssz = sr->out.map_bufs_cnt; in create_se_instr()
203 sr->instr.ih.s.fsz = FDATA_SIZE + sizeof(struct gphdr); in create_se_instr()
204 sr->instr.ih.s.tlen = sr->instr.ih.s.fsz + sr->in.total_bytes; in create_se_instr()
205 sr->instr.ih.value = rte_cpu_to_be_64(sr->instr.ih.value); in create_se_instr()
208 sr->instr.irh.value[0] = 0; in create_se_instr()
209 sr->instr.irh.s.uddl = MIN_UDD_LEN; in create_se_instr()
211 sr->instr.irh.s.ctxl = RTE_ALIGN_MUL_CEIL(sizeof(ctx->fctx), 8) / 8; in create_se_instr()
213 sr->instr.irh.s.destport = SOLICIT_BASE_DPORT + qno; in create_se_instr()
215 sr->instr.irh.s.ctxc = 0x3; in create_se_instr()
216 sr->instr.irh.s.arg = ctx->req_op; in create_se_instr()
217 sr->instr.irh.s.opcode = ctx->opcode; in create_se_instr()
218 sr->instr.irh.value[0] = rte_cpu_to_be_64(sr->instr.irh.value[0]); in create_se_instr()
222 sr->instr.irh.s.ctxp = rte_cpu_to_be_64(ctx_handle); in create_se_instr()
225 sr->instr.slc.value[0] = 0; in create_se_instr()
226 sr->instr.slc.s.ssz = sr->out.map_bufs_cnt; in create_se_instr()
227 sr->instr.slc.value[0] = rte_cpu_to_be_64(sr->instr.slc.value[0]); in create_se_instr()
230 sr->instr.slc.s.rptr = rte_cpu_to_be_64(sr->rptr); in create_se_instr()
236 memcpy(&sr->instr.fdata[0], &sr->gph, sizeof(sr->instr.fdata[0])); in create_se_instr()
237 sr->instr.fdata[1] = 0; in create_se_instr()
241 softreq_copy_iv(struct nitrox_softreq *sr, uint8_t salt_size) in softreq_copy_iv() argument
243 uint16_t offset = sr->ctx->iv.offset + salt_size; in softreq_copy_iv()
245 sr->iv.virt = rte_crypto_op_ctod_offset(sr->op, uint8_t *, offset); in softreq_copy_iv()
246 sr->iv.iova = rte_crypto_op_ctophys_offset(sr->op, offset); in softreq_copy_iv()
247 sr->iv.len = sr->ctx->iv.length - salt_size; in softreq_copy_iv()
329 create_cipher_inbuf(struct nitrox_softreq *sr) in create_cipher_inbuf() argument
332 struct rte_crypto_op *op = sr->op; in create_cipher_inbuf()
334 fill_sglist(&sr->in, sr->iv.len, sr->iv.iova, sr->iv.virt); in create_cipher_inbuf()
335 err = create_sglist_from_mbuf(&sr->in, op->sym->m_src, in create_cipher_inbuf()
341 create_sgcomp(&sr->in); in create_cipher_inbuf()
342 sr->dptr = sr->iova + offsetof(struct nitrox_softreq, in.sgcomp); in create_cipher_inbuf()
348 create_cipher_outbuf(struct nitrox_softreq *sr) in create_cipher_outbuf() argument
350 struct rte_crypto_op *op = sr->op; in create_cipher_outbuf()
355 sr->resp.orh = PENDING_SIG; in create_cipher_outbuf()
356 sr->out.sglist[cnt].len = sizeof(sr->resp.orh); in create_cipher_outbuf()
357 sr->out.sglist[cnt].iova = sr->iova + offsetof(struct nitrox_softreq, in create_cipher_outbuf()
359 sr->out.sglist[cnt].virt = &sr->resp.orh; in create_cipher_outbuf()
362 sr->out.map_bufs_cnt = cnt; in create_cipher_outbuf()
363 fill_sglist(&sr->out, sr->iv.len, sr->iv.iova, sr->iv.virt); in create_cipher_outbuf()
364 err = create_sglist_from_mbuf(&sr->out, m_dst, in create_cipher_outbuf()
370 cnt = sr->out.map_bufs_cnt; in create_cipher_outbuf()
371 sr->resp.completion = PENDING_SIG; in create_cipher_outbuf()
372 sr->out.sglist[cnt].len = sizeof(sr->resp.completion); in create_cipher_outbuf()
373 sr->out.sglist[cnt].iova = sr->iova + offsetof(struct nitrox_softreq, in create_cipher_outbuf()
375 sr->out.sglist[cnt].virt = &sr->resp.completion; in create_cipher_outbuf()
379 sr->out.map_bufs_cnt = cnt; in create_cipher_outbuf()
381 create_sgcomp(&sr->out); in create_cipher_outbuf()
382 sr->rptr = sr->iova + offsetof(struct nitrox_softreq, out.sgcomp); in create_cipher_outbuf()
397 process_cipher_data(struct nitrox_softreq *sr) in process_cipher_data() argument
399 struct rte_crypto_op *op = sr->op; in process_cipher_data()
402 softreq_copy_iv(sr, 0); in process_cipher_data()
403 err = create_cipher_inbuf(sr); in process_cipher_data()
407 err = create_cipher_outbuf(sr); in process_cipher_data()
411 create_cipher_gph(op->sym->cipher.data.length, sr->iv.len, &sr->gph); in process_cipher_data()
417 extract_cipher_auth_digest(struct nitrox_softreq *sr, in extract_cipher_auth_digest() argument
420 struct rte_crypto_op *op = sr->op; in extract_cipher_auth_digest()
424 if (sr->ctx->req_op == NITROX_OP_DECRYPT && in extract_cipher_auth_digest()
428 digest->len = sr->ctx->digest_length; in extract_cipher_auth_digest()
449 create_cipher_auth_sglist(struct nitrox_softreq *sr, in create_cipher_auth_sglist() argument
452 struct rte_crypto_op *op = sr->op; in create_cipher_auth_sglist()
456 fill_sglist(sgtbl, sr->iv.len, sr->iv.iova, sr->iv.virt); in create_cipher_auth_sglist()
482 create_combined_sglist(struct nitrox_softreq *sr, struct nitrox_sgtable *sgtbl, in create_combined_sglist() argument
485 struct rte_crypto_op *op = sr->op; in create_combined_sglist()
487 fill_sglist(sgtbl, sr->iv.len, sr->iv.iova, sr->iv.virt); in create_combined_sglist()
488 fill_sglist(sgtbl, sr->ctx->aad_length, op->sym->aead.aad.phys_addr, in create_combined_sglist()
495 create_aead_sglist(struct nitrox_softreq *sr, struct nitrox_sgtable *sgtbl, in create_aead_sglist() argument
500 switch (sr->ctx->nitrox_chain) { in create_aead_sglist()
503 err = create_cipher_auth_sglist(sr, sgtbl, mbuf); in create_aead_sglist()
506 err = create_combined_sglist(sr, sgtbl, mbuf); in create_aead_sglist()
517 create_aead_inbuf(struct nitrox_softreq *sr, struct nitrox_sglist *digest) in create_aead_inbuf() argument
520 struct nitrox_crypto_ctx *ctx = sr->ctx; in create_aead_inbuf()
522 err = create_aead_sglist(sr, &sr->in, sr->op->sym->m_src); in create_aead_inbuf()
527 fill_sglist(&sr->in, digest->len, digest->iova, digest->virt); in create_aead_inbuf()
529 create_sgcomp(&sr->in); in create_aead_inbuf()
530 sr->dptr = sr->iova + offsetof(struct nitrox_softreq, in.sgcomp); in create_aead_inbuf()
535 create_aead_oop_outbuf(struct nitrox_softreq *sr, struct nitrox_sglist *digest) in create_aead_oop_outbuf() argument
538 struct nitrox_crypto_ctx *ctx = sr->ctx; in create_aead_oop_outbuf()
540 err = create_aead_sglist(sr, &sr->out, sr->op->sym->m_dst); in create_aead_oop_outbuf()
545 fill_sglist(&sr->out, digest->len, digest->iova, digest->virt); in create_aead_oop_outbuf()
551 create_aead_inplace_outbuf(struct nitrox_softreq *sr, in create_aead_inplace_outbuf() argument
555 struct nitrox_crypto_ctx *ctx = sr->ctx; in create_aead_inplace_outbuf()
557 cnt = sr->out.map_bufs_cnt; in create_aead_inplace_outbuf()
558 for (i = 0; i < sr->in.map_bufs_cnt; i++, cnt++) { in create_aead_inplace_outbuf()
559 sr->out.sglist[cnt].len = sr->in.sglist[i].len; in create_aead_inplace_outbuf()
560 sr->out.sglist[cnt].iova = sr->in.sglist[i].iova; in create_aead_inplace_outbuf()
561 sr->out.sglist[cnt].virt = sr->in.sglist[i].virt; in create_aead_inplace_outbuf()
564 sr->out.map_bufs_cnt = cnt; in create_aead_inplace_outbuf()
566 fill_sglist(&sr->out, digest->len, digest->iova, in create_aead_inplace_outbuf()
569 sr->out.map_bufs_cnt--; in create_aead_inplace_outbuf()
574 create_aead_outbuf(struct nitrox_softreq *sr, struct nitrox_sglist *digest) in create_aead_outbuf() argument
576 struct rte_crypto_op *op = sr->op; in create_aead_outbuf()
579 sr->resp.orh = PENDING_SIG; in create_aead_outbuf()
580 sr->out.sglist[cnt].len = sizeof(sr->resp.orh); in create_aead_outbuf()
581 sr->out.sglist[cnt].iova = sr->iova + offsetof(struct nitrox_softreq, in create_aead_outbuf()
583 sr->out.sglist[cnt].virt = &sr->resp.orh; in create_aead_outbuf()
585 sr->out.map_bufs_cnt = cnt; in create_aead_outbuf()
589 err = create_aead_oop_outbuf(sr, digest); in create_aead_outbuf()
593 create_aead_inplace_outbuf(sr, digest); in create_aead_outbuf()
596 cnt = sr->out.map_bufs_cnt; in create_aead_outbuf()
597 sr->resp.completion = PENDING_SIG; in create_aead_outbuf()
598 sr->out.sglist[cnt].len = sizeof(sr->resp.completion); in create_aead_outbuf()
599 sr->out.sglist[cnt].iova = sr->iova + offsetof(struct nitrox_softreq, in create_aead_outbuf()
601 sr->out.sglist[cnt].virt = &sr->resp.completion; in create_aead_outbuf()
604 sr->out.map_bufs_cnt = cnt; in create_aead_outbuf()
606 create_sgcomp(&sr->out); in create_aead_outbuf()
607 sr->rptr = sr->iova + offsetof(struct nitrox_softreq, out.sgcomp); in create_aead_outbuf()
641 process_cipher_auth_data(struct nitrox_softreq *sr) in process_cipher_auth_data() argument
643 struct rte_crypto_op *op = sr->op; in process_cipher_auth_data()
647 softreq_copy_iv(sr, 0); in process_cipher_auth_data()
648 err = extract_cipher_auth_digest(sr, &digest); in process_cipher_auth_data()
652 err = create_aead_inbuf(sr, &digest); in process_cipher_auth_data()
656 err = create_aead_outbuf(sr, &digest); in process_cipher_auth_data()
660 create_aead_gph(op->sym->cipher.data.length, sr->iv.len, in process_cipher_auth_data()
661 op->sym->auth.data.length, &sr->gph); in process_cipher_auth_data()
666 softreq_copy_salt(struct nitrox_softreq *sr) in softreq_copy_salt() argument
668 struct nitrox_crypto_ctx *ctx = sr->ctx; in softreq_copy_salt()
676 addr = rte_crypto_op_ctod_offset(sr->op, uint8_t *, ctx->iv.offset); in softreq_copy_salt()
686 extract_combined_digest(struct nitrox_softreq *sr, struct nitrox_sglist *digest) in extract_combined_digest() argument
688 struct rte_crypto_op *op = sr->op; in extract_combined_digest()
692 digest->len = sr->ctx->digest_length; in extract_combined_digest()
715 process_combined_data(struct nitrox_softreq *sr) in process_combined_data() argument
719 struct rte_crypto_op *op = sr->op; in process_combined_data()
721 err = softreq_copy_salt(sr); in process_combined_data()
725 softreq_copy_iv(sr, AES_GCM_SALT_SIZE); in process_combined_data()
726 err = extract_combined_digest(sr, &digest); in process_combined_data()
730 err = create_aead_inbuf(sr, &digest); in process_combined_data()
734 err = create_aead_outbuf(sr, &digest); in process_combined_data()
738 create_aead_gph(op->sym->aead.data.length, sr->iv.len, in process_combined_data()
739 op->sym->aead.data.length + sr->ctx->aad_length, in process_combined_data()
740 &sr->gph); in process_combined_data()
746 process_softreq(struct nitrox_softreq *sr) in process_softreq() argument
748 struct nitrox_crypto_ctx *ctx = sr->ctx; in process_softreq()
753 err = process_cipher_data(sr); in process_softreq()
757 err = process_cipher_auth_data(sr); in process_softreq()
760 err = process_combined_data(sr); in process_softreq()
773 struct nitrox_softreq *sr) in nitrox_process_se_req() argument
777 softreq_init(sr, sr->iova); in nitrox_process_se_req()
778 sr->ctx = ctx; in nitrox_process_se_req()
779 sr->op = op; in nitrox_process_se_req()
780 err = process_softreq(sr); in nitrox_process_se_req()
784 create_se_instr(sr, qno); in nitrox_process_se_req()
785 sr->timeout = rte_get_timer_cycles() + CMD_TIMEOUT * rte_get_timer_hz(); in nitrox_process_se_req()
790 nitrox_check_se_req(struct nitrox_softreq *sr, struct rte_crypto_op **op) in nitrox_check_se_req() argument
796 cc = *(volatile uint64_t *)(&sr->resp.completion); in nitrox_check_se_req()
797 orh = *(volatile uint64_t *)(&sr->resp.orh); in nitrox_check_se_req()
802 else if (rte_get_timer_cycles() >= sr->timeout) in nitrox_check_se_req()
809 sr->resp.orh); in nitrox_check_se_req()
811 *op = sr->op; in nitrox_check_se_req()
816 nitrox_sym_instr_addr(struct nitrox_softreq *sr) in nitrox_sym_instr_addr() argument
818 return &sr->instr; in nitrox_sym_instr_addr()