Home
last modified time | relevance | path

Searched refs:cryptlen (Results 1 – 25 of 113) sorted by relevance

12345

/linux-6.15/crypto/
H A Dauthencesn.c94 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local
126 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local
142 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv()
184 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local
204 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt()
224 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local
249 skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv); in crypto_authenc_esn_decrypt_tail()
272 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_decrypt() local
278 cryptlen -= authsize; in crypto_authenc_esn_decrypt()
281 err = crypto_authenc_esn_copy(req, assoclen + cryptlen); in crypto_authenc_esn_decrypt()
[all …]
H A Daegis128-core.c344 u64 assoclen, u64 cryptlen) in crypto_aegis128_final() argument
347 u64 cryptbits = cryptlen * 8; in crypto_aegis128_final()
392 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt_generic() local
401 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_encrypt_generic()
403 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, in crypto_aegis128_encrypt_generic()
414 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt_generic() local
427 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_decrypt_generic()
453 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt_simd() local
465 crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0); in crypto_aegis128_encrypt_simd()
477 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt_simd() local
[all …]
H A Dchacha20poly1305.c39 __le64 cryptlen; member
59 unsigned int cryptlen; member
131 if (rctx->cryptlen == 0) in chacha_decrypt()
145 rctx->cryptlen, creq->iv); in chacha_decrypt()
158 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_tail_continue()
178 preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); in poly_tail()
236 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_cipher()
407 if (req->cryptlen == 0) in chacha_encrypt()
421 req->cryptlen, creq->iv); in chacha_encrypt()
434 rctx->cryptlen = req->cryptlen; in chachapoly_encrypt()
[all …]
H A Dccm.c129 unsigned int cryptlen) in format_input() argument
170 unsigned int cryptlen) in crypto_ccm_auth() argument
183 err = format_input(odata, req, cryptlen); in crypto_ccm_auth()
219 cryptlen += ilen; in crypto_ccm_auth()
237 req->assoclen + req->cryptlen, in crypto_ccm_encrypt_done()
293 unsigned int cryptlen = req->cryptlen; in crypto_ccm_encrypt() local
330 unsigned int cryptlen = req->cryptlen - authsize; in crypto_ccm_decrypt_done() local
338 err = crypto_ccm_auth(req, dst, cryptlen); in crypto_ccm_decrypt_done()
353 unsigned int cryptlen = req->cryptlen; in crypto_ccm_decrypt() local
359 cryptlen -= authsize; in crypto_ccm_decrypt()
[all …]
H A Dgcm.c58 unsigned int cryptlen; member
178 unsigned int cryptlen) in crypto_gcm_init_crypt() argument
320 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue()
417 req->assoclen + req->cryptlen, in gcm_enc_copy_hash()
428 gctx->cryptlen = req->cryptlen; in gcm_encrypt_continue()
470 unsigned int cryptlen = req->cryptlen - authsize; in crypto_gcm_verify() local
505 unsigned int cryptlen = req->cryptlen; in crypto_gcm_decrypt() local
508 cryptlen -= authsize; in crypto_gcm_decrypt()
513 gctx->cryptlen = cryptlen; in crypto_gcm_decrypt()
753 req->cryptlen, iv); in crypto_rfc4106_crypt()
[all …]
H A Dechainiv.c37 if (req->cryptlen < ivsize) in echainiv_encrypt()
51 req->assoclen + req->cryptlen, in echainiv_encrypt()
62 req->cryptlen, info); in echainiv_encrypt()
94 if (req->cryptlen < ivsize) in echainiv_decrypt()
104 req->cryptlen - ivsize, req->iv); in echainiv_decrypt()
H A Dseqiv.c58 if (req->cryptlen < ivsize) in seqiv_aead_encrypt()
74 req->assoclen + req->cryptlen, in seqiv_aead_encrypt()
96 req->cryptlen - ivsize, info); in seqiv_aead_encrypt()
117 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt()
127 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
H A Dxts.c87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xts_xor_tweak()
164 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); in xts_cts_final()
167 int tail = req->cryptlen % XTS_BLOCK_SIZE; in xts_cts_final()
209 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_encrypt_done()
229 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_decrypt_done()
247 if (req->cryptlen < XTS_BLOCK_SIZE) in xts_init_crypt()
253 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL); in xts_init_crypt()
272 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_encrypt()
289 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_decrypt()
/linux-6.15/arch/x86/crypto/
H A Daegis128-aesni-glue.c62 unsigned int cryptlen);
172 unsigned int cryptlen, bool enc) in crypto_aegis128_aesni_crypt() argument
189 aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); in crypto_aegis128_aesni_crypt()
199 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_aesni_encrypt() local
201 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, true); in crypto_aegis128_aesni_encrypt()
204 req->assoclen + cryptlen, authsize, 1); in crypto_aegis128_aesni_encrypt()
215 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_aesni_decrypt() local
218 req->assoclen + cryptlen, authsize, 0); in crypto_aegis128_aesni_decrypt()
220 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, false); in crypto_aegis128_aesni_decrypt()
/linux-6.15/drivers/crypto/allwinner/sun8i-ce/
H A Dsun8i-ce-cipher.c45 if (areq->cryptlen == 0) { in sun8i_ce_cipher_need_fallback()
50 if (areq->cryptlen % 16) { in sun8i_ce_cipher_need_fallback()
55 len = areq->cryptlen; in sun8i_ce_cipher_need_fallback()
71 len = areq->cryptlen; in sun8i_ce_cipher_need_fallback()
112 areq->cryptlen, areq->iv); in sun8i_ce_cipher_fallback()
145 areq->cryptlen, in sun8i_ce_cipher_prepare()
199 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_prepare()
237 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
252 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
267 chan->timeout = areq->cryptlen; in sun8i_ce_cipher_prepare()
[all …]
/linux-6.15/arch/arm/crypto/
H A Daes-ce-glue.c282 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
283 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
298 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
304 subreq.cryptlen); in cts_cbc_encrypt()
340 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
341 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
356 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
362 subreq.cryptlen); in cts_cbc_decrypt()
449 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt()
455 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt()
[all …]
/linux-6.15/drivers/crypto/qce/
H A Daead.c71 totallen = req->cryptlen + req->assoclen; in qce_aead_done()
124 totallen = rctx->cryptlen + assoclen; in qce_aead_prepare_dst_buf()
198 cryptlen = rctx->cryptlen + ctx->authsize; in qce_aead_ccm_prepare_buf_assoclen()
200 cryptlen = rctx->cryptlen; in qce_aead_ccm_prepare_buf_assoclen()
201 totallen = cryptlen + req->assoclen; in qce_aead_ccm_prepare_buf_assoclen()
278 totallen = cryptlen + rctx->assoclen; in qce_aead_ccm_prepare_buf_assoclen()
336 unsigned int cryptlen; in qce_aead_ccm_prepare_buf() local
344 cryptlen = rctx->cryptlen + ctx->authsize; in qce_aead_ccm_prepare_buf()
514 rctx->cryptlen = req->cryptlen; in qce_aead_crypt()
516 rctx->cryptlen = req->cryptlen - ctx->authsize; in qce_aead_crypt()
[all …]
H A Dskcipher.c79 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle()
85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
87 rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
110 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
271 if (!req->cryptlen) in qce_skcipher_crypt()
279 if (!IS_ALIGNED(req->cryptlen, blocksize)) in qce_skcipher_crypt()
292 (IS_XTS(rctx->flags) && ((req->cryptlen <= aes_sw_max_len) || in qce_skcipher_crypt()
293 (req->cryptlen > QCE_SECTOR_SIZE && in qce_skcipher_crypt()
294 req->cryptlen % QCE_SECTOR_SIZE))))) { in qce_skcipher_crypt()
301 req->dst, req->cryptlen, req->iv); in qce_skcipher_crypt()
/linux-6.15/drivers/crypto/allwinner/sun8i-ss/
H A Dsun8i-ss-cipher.c33 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sun8i_ss_need_fallback()
39 sg_nents_for_len(areq->dst, areq->cryptlen) > 8) { in sun8i_ss_need_fallback()
44 len = areq->cryptlen; in sun8i_ss_need_fallback()
59 len = areq->cryptlen; in sun8i_ss_need_fallback()
112 areq->cryptlen, areq->iv); in sun8i_ss_cipher_fallback()
128 unsigned int len = areq->cryptlen; in sun8i_ss_setup_ivs()
137 offset = areq->cryptlen - ivsize; in sun8i_ss_setup_ivs()
205 areq->cryptlen, in sun8i_ss_cipher()
253 len = areq->cryptlen; in sun8i_ss_cipher()
275 len = areq->cryptlen; in sun8i_ss_cipher()
[all …]
/linux-6.15/drivers/crypto/
H A Domap-aes-gcm.c94 int alen, clen, cryptlen, assoclen, ret; in omap_aes_gcm_copy_buffers() local
102 cryptlen = req->cryptlen; in omap_aes_gcm_copy_buffers()
108 cryptlen -= authlen; in omap_aes_gcm_copy_buffers()
111 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers()
113 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers()
131 if (cryptlen) { in omap_aes_gcm_copy_buffers()
137 ret = omap_crypto_align_sg(&tmp, cryptlen, in omap_aes_gcm_copy_buffers()
149 dd->total = cryptlen; in omap_aes_gcm_copy_buffers()
162 if (cryptlen) { in omap_aes_gcm_copy_buffers()
163 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers()
[all …]
/linux-6.15/drivers/crypto/allwinner/sun4i-ss/
H A Dsun4i-ss-cipher.c30 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll()
31 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll()
41 if (!areq->cryptlen) in sun4i_ss_opti_poll()
57 algt->stat_bytes += areq->cryptlen; in sun4i_ss_opti_poll()
74 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
75 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
166 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback()
196 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll()
209 if (!areq->cryptlen) in sun4i_ss_cipher_poll()
265 ileft = areq->cryptlen; in sun4i_ss_cipher_poll()
[all …]
/linux-6.15/drivers/crypto/cavium/nitrox/
H A Dnitrox_aead.c165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq()
166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq()
227 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc()
229 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc()
261 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec()
263 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec()
449 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc()
451 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc()
481 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec()
484 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
/linux-6.15/drivers/crypto/tegra/
H A Dtegra-se-aes.c60 unsigned int cryptlen; member
273 rctx->len = req->cryptlen; in tegra_aes_do_one_req()
498 if (!req->cryptlen) in tegra_aes_crypt()
1103 rctx->cryptlen, 0); in tegra_ccm_compute_auth()
1104 offset += rctx->cryptlen; in tegra_ccm_compute_auth()
1139 if (rctx->cryptlen) { in tegra_ccm_do_ctr()
1170 rctx->cryptlen = req->cryptlen; in tegra_ccm_crypt_init()
1172 rctx->cryptlen = req->cryptlen - rctx->authsize; in tegra_ccm_crypt_init()
1283 rctx->cryptlen = req->cryptlen; in tegra_gcm_do_one_req()
1285 rctx->cryptlen = req->cryptlen - ctx->authsize; in tegra_gcm_do_one_req()
[all …]
/linux-6.15/drivers/crypto/gemini/
H A Dsl3516-ce-cipher.c34 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sl3516_ce_need_fallback()
119 areq->cryptlen, areq->iv); in sl3516_ce_cipher_fallback()
147 areq->cryptlen, in sl3516_ce_cipher()
179 len = areq->cryptlen; in sl3516_ce_cipher()
189 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sl3516_ce_cipher()
196 dev_err(ce->dev, "remaining len %d/%u nr_sgs=%d\n", len, areq->cryptlen, nr_sgs); in sl3516_ce_cipher()
201 len = areq->cryptlen; in sl3516_ce_cipher()
211 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sl3516_ce_cipher()
234 ecb->cipher.algorithm_len = areq->cryptlen; in sl3516_ce_cipher()
/linux-6.15/arch/arm64/crypto/
H A Daes-glue.c292 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
293 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
308 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
314 subreq.cryptlen); in cts_cbc_encrypt()
349 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
350 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
365 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
371 subreq.cryptlen); in cts_cbc_decrypt()
546 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt()
552 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt()
[all …]
/linux-6.15/drivers/crypto/intel/keembay/
H A Dkeembay-ocs-aes-core.c289 if (req->cryptlen < AES_BLOCK_SIZE) in kmb_ocs_sk_validate_input()
326 req->cryptlen, req->iv); in kmb_ocs_sk_common()
434 req->cryptlen, 0); in kmb_ocs_sk_prepare_inplace()
469 req->cryptlen, 0); in kmb_ocs_sk_prepare_notinplace()
484 req->cryptlen, 0); in kmb_ocs_sk_prepare_notinplace()
551 req->cryptlen, req->iv, iv_size); in kmb_ocs_sk_run()
571 req->cryptlen - iv_size, in kmb_ocs_sk_run()
580 req->cryptlen - iv_size, in kmb_ocs_sk_run()
652 req->cryptlen, req->iv); in kmb_ocs_aead_common()
745 in_size = req->cryptlen - tag_size; in kmb_ocs_aead_dma_prepare()
[all …]
/linux-6.15/drivers/crypto/xilinx/
H A Dzynqmp-aes-gcm.c93 dma_size = req->cryptlen + ZYNQMP_AES_KEY_SIZE in zynqmp_aes_aead_cipher()
96 dma_size = req->cryptlen + GCM_AES_IV_SIZE; in zynqmp_aes_aead_cipher()
109 data_size = req->cryptlen; in zynqmp_aes_aead_cipher()
110 scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0); in zynqmp_aes_aead_cipher()
191 req->cryptlen < ZYNQMP_AES_MIN_INPUT_BLK_SIZE) { in zynqmp_fallback_check()
194 if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0) in zynqmp_fallback_check()
198 req->cryptlen <= ZYNQMP_AES_AUTH_SIZE) { in zynqmp_fallback_check()
224 areq->cryptlen, areq->iv); in zynqmp_handle_aes_req()
/linux-6.15/drivers/crypto/aspeed/
H A Daspeed-hace-crypto.c38 areq->cryptlen, areq->iv); in aspeed_crypto_do_fallback()
52 if (areq->cryptlen == 0) in aspeed_crypto_need_fallback()
56 !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE)) in aspeed_crypto_need_fallback()
60 !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE)) in aspeed_crypto_need_fallback()
168 "nbytes", nbytes, "cryptlen", req->cryptlen); in aspeed_sk_transfer()
193 crypto_engine->cipher_addr, req->cryptlen); in aspeed_sk_start()
202 "nbytes", nbytes, "cryptlen", req->cryptlen); in aspeed_sk_start()
269 total = req->cryptlen; in aspeed_sk_start_sg()
300 total = req->cryptlen; in aspeed_sk_start_sg()
417 if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE)) in aspeed_des_crypt()
[all …]
/linux-6.15/drivers/crypto/virtio/
H A Dvirtio_crypto_skcipher_algs.c341 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
383 cpu_to_le32(req->cryptlen); in __virtio_crypto_skcipher_do_req()
392 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
394 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
396 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req()
425 req->cryptlen - AES_BLOCK_SIZE, in __virtio_crypto_skcipher_do_req()
475 if (!req->cryptlen) in virtio_crypto_skcipher_encrypt()
477 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_encrypt()
498 if (!req->cryptlen) in virtio_crypto_skcipher_decrypt()
500 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_decrypt()
[all …]
/linux-6.15/drivers/crypto/amlogic/
H A Damlogic-gxl-cipher.c30 if (areq->cryptlen == 0) in meson_cipher_need_fallback()
75 areq->cryptlen, areq->iv); in meson_cipher_do_fallback()
108 areq->cryptlen, in meson_cipher()
131 if (ivsize > areq->cryptlen) { in meson_cipher()
132 dev_err(mc->dev, "invalid ivsize=%d vs len=%d\n", ivsize, areq->cryptlen); in meson_cipher()
144 offset = areq->cryptlen - ivsize; in meson_cipher()
205 len = areq->cryptlen; in meson_cipher()
250 areq->cryptlen - ivsize, in meson_cipher()

12345