Lines Matching refs:resid

108 	int i, blks, inlen, ivlen, outlen, resid;  in swcr_encdec()  local
166 resid = crp->crp_payload_length; in swcr_encdec()
175 while (resid >= blks) { in swcr_encdec()
246 resid -= blks; in swcr_encdec()
250 if (resid > 0) { in swcr_encdec()
257 KASSERT(resid < blks, ("%s: partial block too big", __func__)); in swcr_encdec()
261 if (inlen < resid) { in swcr_encdec()
262 crypto_cursor_copydata(&cc_in, resid, blk); in swcr_encdec()
266 if (outlen < resid) in swcr_encdec()
272 resid); in swcr_encdec()
275 resid); in swcr_encdec()
276 if (outlen < resid) in swcr_encdec()
277 crypto_cursor_copyback(&cc_out, resid, blk); in swcr_encdec()
400 int blksz, error, ivlen, len, resid; in swcr_gmac() local
417 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) { in swcr_gmac()
421 len = rounddown(MIN(len, resid), blksz); in swcr_gmac()
430 if (resid > 0) { in swcr_gmac()
432 crypto_cursor_copydata(&cc, resid, blk); in swcr_gmac()
480 int blksz, error, ivlen, len, r, resid; in swcr_gcm() local
519 for (resid = crp->crp_aad_length; resid >= blksz; in swcr_gcm()
520 resid -= len) { in swcr_gcm()
524 len = rounddown(MIN(len, resid), blksz); in swcr_gcm()
533 if (resid > 0) { in swcr_gcm()
535 crypto_cursor_copydata(&cc_in, resid, blk); in swcr_gcm()
550 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { in swcr_gcm()
573 if (resid > 0) { in swcr_gcm()
574 crypto_cursor_copydata(&cc_in, resid, blk); in swcr_gcm()
576 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid); in swcr_gcm()
577 crypto_cursor_copyback(&cc_out, resid, blk); in swcr_gcm()
579 axf->Update(&ctx, blk, resid); in swcr_gcm()
610 for (resid = crp->crp_payload_length; resid > blksz; in swcr_gcm()
611 resid -= blksz) { in swcr_gcm()
629 if (resid > 0) { in swcr_gcm()
630 crypto_cursor_copydata(&cc_in, resid, blk); in swcr_gcm()
631 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); in swcr_gcm()
632 crypto_cursor_copyback(&cc_out, resid, blk); in swcr_gcm()
717 int blksz, error, ivlen, r, resid; in swcr_ccm() local
768 for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) { in swcr_ccm()
800 if (resid > 0) { in swcr_ccm()
801 crypto_cursor_copydata(&cc_in, resid, blk); in swcr_ccm()
803 axf->Update(&ctx, blk, resid); in swcr_ccm()
804 exf->encrypt_last(swe->sw_kschedule, blk, blk, resid); in swcr_ccm()
805 crypto_cursor_copyback(&cc_out, resid, blk); in swcr_ccm()
807 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); in swcr_ccm()
808 axf->Update(&ctx, blk, resid); in swcr_ccm()
834 for (resid = crp->crp_payload_length; resid > blksz; in swcr_ccm()
835 resid -= blksz) { in swcr_ccm()
853 if (resid > 0) { in swcr_ccm()
854 crypto_cursor_copydata(&cc_in, resid, blk); in swcr_ccm()
855 exf->decrypt_last(swe->sw_kschedule, blk, blk, resid); in swcr_ccm()
856 crypto_cursor_copyback(&cc_out, resid, blk); in swcr_ccm()