xref: /dpdk/lib/ipsec/crypto.h (revision c99d2619)
199a2dd95SBruce Richardson /* SPDX-License-Identifier: BSD-3-Clause
299a2dd95SBruce Richardson  * Copyright(c) 2018 Intel Corporation
399a2dd95SBruce Richardson  */
499a2dd95SBruce Richardson 
599a2dd95SBruce Richardson #ifndef _CRYPTO_H_
699a2dd95SBruce Richardson #define _CRYPTO_H_
799a2dd95SBruce Richardson 
899a2dd95SBruce Richardson /**
999a2dd95SBruce Richardson  * @file crypto.h
1099a2dd95SBruce Richardson  * Contains crypto specific functions/structures/macros used internally
1199a2dd95SBruce Richardson  * by ipsec library.
1299a2dd95SBruce Richardson  */
1399a2dd95SBruce Richardson 
1499a2dd95SBruce Richardson /*
1599a2dd95SBruce Richardson  * AES-CTR counter block format.
1699a2dd95SBruce Richardson  */
1799a2dd95SBruce Richardson 
1899a2dd95SBruce Richardson struct aesctr_cnt_blk {
1999a2dd95SBruce Richardson 	uint32_t nonce;
2099a2dd95SBruce Richardson 	uint64_t iv;
2199a2dd95SBruce Richardson 	uint32_t cnt;
2299a2dd95SBruce Richardson } __rte_packed;
2399a2dd95SBruce Richardson 
2499a2dd95SBruce Richardson  /*
25*c99d2619SRadu Nicolau   * CHACHA20-POLY1305 devices have some specific requirements
26*c99d2619SRadu Nicolau   * for IV and AAD formats.
27*c99d2619SRadu Nicolau   * Ideally that to be done by the driver itself.
28*c99d2619SRadu Nicolau   */
29*c99d2619SRadu Nicolau 
30*c99d2619SRadu Nicolau struct aead_chacha20_poly1305_iv {
31*c99d2619SRadu Nicolau 	uint32_t salt;
32*c99d2619SRadu Nicolau 	uint64_t iv;
33*c99d2619SRadu Nicolau 	uint32_t cnt;
34*c99d2619SRadu Nicolau } __rte_packed;
35*c99d2619SRadu Nicolau 
36*c99d2619SRadu Nicolau struct aead_chacha20_poly1305_aad {
37*c99d2619SRadu Nicolau 	uint32_t spi;
38*c99d2619SRadu Nicolau 	/*
39*c99d2619SRadu Nicolau 	 * RFC 4106, section 5:
40*c99d2619SRadu Nicolau 	 * Two formats of the AAD are defined:
41*c99d2619SRadu Nicolau 	 * one for 32-bit sequence numbers, and one for 64-bit ESN.
42*c99d2619SRadu Nicolau 	 */
43*c99d2619SRadu Nicolau 	union {
44*c99d2619SRadu Nicolau 		uint32_t u32[2];
45*c99d2619SRadu Nicolau 		uint64_t u64;
46*c99d2619SRadu Nicolau 	} sqn;
47*c99d2619SRadu Nicolau 	uint32_t align0; /* align to 16B boundary */
48*c99d2619SRadu Nicolau } __rte_packed;
49*c99d2619SRadu Nicolau 
50*c99d2619SRadu Nicolau struct chacha20_poly1305_esph_iv {
51*c99d2619SRadu Nicolau 	struct rte_esp_hdr esph;
52*c99d2619SRadu Nicolau 	uint64_t iv;
53*c99d2619SRadu Nicolau } __rte_packed;
54*c99d2619SRadu Nicolau 
55*c99d2619SRadu Nicolau  /*
5699a2dd95SBruce Richardson   * AES-GCM devices have some specific requirements for IV and AAD formats.
5799a2dd95SBruce Richardson   * Ideally that to be done by the driver itself.
5899a2dd95SBruce Richardson   */
5999a2dd95SBruce Richardson 
6099a2dd95SBruce Richardson struct aead_gcm_iv {
6199a2dd95SBruce Richardson 	uint32_t salt;
6299a2dd95SBruce Richardson 	uint64_t iv;
6399a2dd95SBruce Richardson 	uint32_t cnt;
6499a2dd95SBruce Richardson } __rte_packed;
6599a2dd95SBruce Richardson 
6699a2dd95SBruce Richardson struct aead_gcm_aad {
6799a2dd95SBruce Richardson 	uint32_t spi;
6899a2dd95SBruce Richardson 	/*
6999a2dd95SBruce Richardson 	 * RFC 4106, section 5:
7099a2dd95SBruce Richardson 	 * Two formats of the AAD are defined:
7199a2dd95SBruce Richardson 	 * one for 32-bit sequence numbers, and one for 64-bit ESN.
7299a2dd95SBruce Richardson 	 */
7399a2dd95SBruce Richardson 	union {
7499a2dd95SBruce Richardson 		uint32_t u32[2];
7599a2dd95SBruce Richardson 		uint64_t u64;
7699a2dd95SBruce Richardson 	} sqn;
7799a2dd95SBruce Richardson 	uint32_t align0; /* align to 16B boundary */
7899a2dd95SBruce Richardson } __rte_packed;
7999a2dd95SBruce Richardson 
8099a2dd95SBruce Richardson struct gcm_esph_iv {
8199a2dd95SBruce Richardson 	struct rte_esp_hdr esph;
8299a2dd95SBruce Richardson 	uint64_t iv;
8399a2dd95SBruce Richardson } __rte_packed;
8499a2dd95SBruce Richardson 
85*c99d2619SRadu Nicolau  /*
86*c99d2619SRadu Nicolau   * AES-CCM devices have some specific requirements for IV and AAD formats.
87*c99d2619SRadu Nicolau   * Ideally that to be done by the driver itself.
88*c99d2619SRadu Nicolau   */
89*c99d2619SRadu Nicolau union aead_ccm_salt {
90*c99d2619SRadu Nicolau 	uint32_t salt;
91*c99d2619SRadu Nicolau 	struct inner {
92*c99d2619SRadu Nicolau 		uint8_t salt8[3];
93*c99d2619SRadu Nicolau 		uint8_t ccm_flags;
94*c99d2619SRadu Nicolau 	} inner;
95*c99d2619SRadu Nicolau } __rte_packed;
96*c99d2619SRadu Nicolau 
97*c99d2619SRadu Nicolau 
98*c99d2619SRadu Nicolau struct aead_ccm_iv {
99*c99d2619SRadu Nicolau 	uint8_t ccm_flags;
100*c99d2619SRadu Nicolau 	uint8_t salt[3];
101*c99d2619SRadu Nicolau 	uint64_t iv;
102*c99d2619SRadu Nicolau 	uint32_t cnt;
103*c99d2619SRadu Nicolau } __rte_packed;
104*c99d2619SRadu Nicolau 
105*c99d2619SRadu Nicolau struct aead_ccm_aad {
106*c99d2619SRadu Nicolau 	uint8_t padding[18];
107*c99d2619SRadu Nicolau 	uint32_t spi;
108*c99d2619SRadu Nicolau 	/*
109*c99d2619SRadu Nicolau 	 * RFC 4309, section 5:
110*c99d2619SRadu Nicolau 	 * Two formats of the AAD are defined:
111*c99d2619SRadu Nicolau 	 * one for 32-bit sequence numbers, and one for 64-bit ESN.
112*c99d2619SRadu Nicolau 	 */
113*c99d2619SRadu Nicolau 	union {
114*c99d2619SRadu Nicolau 		uint32_t u32[2];
115*c99d2619SRadu Nicolau 		uint64_t u64;
116*c99d2619SRadu Nicolau 	} sqn;
117*c99d2619SRadu Nicolau 	uint32_t align0; /* align to 16B boundary */
118*c99d2619SRadu Nicolau } __rte_packed;
119*c99d2619SRadu Nicolau 
120*c99d2619SRadu Nicolau struct ccm_esph_iv {
121*c99d2619SRadu Nicolau 	struct rte_esp_hdr esph;
122*c99d2619SRadu Nicolau 	uint64_t iv;
123*c99d2619SRadu Nicolau } __rte_packed;
124*c99d2619SRadu Nicolau 
125*c99d2619SRadu Nicolau 
12699a2dd95SBruce Richardson static inline void
aes_ctr_cnt_blk_fill(struct aesctr_cnt_blk * ctr,uint64_t iv,uint32_t nonce)12799a2dd95SBruce Richardson aes_ctr_cnt_blk_fill(struct aesctr_cnt_blk *ctr, uint64_t iv, uint32_t nonce)
12899a2dd95SBruce Richardson {
12999a2dd95SBruce Richardson 	ctr->nonce = nonce;
13099a2dd95SBruce Richardson 	ctr->iv = iv;
13199a2dd95SBruce Richardson 	ctr->cnt = rte_cpu_to_be_32(1);
13299a2dd95SBruce Richardson }
13399a2dd95SBruce Richardson 
13499a2dd95SBruce Richardson static inline void
aead_chacha20_poly1305_iv_fill(struct aead_chacha20_poly1305_iv * chacha20_poly1305,uint64_t iv,uint32_t salt)135*c99d2619SRadu Nicolau aead_chacha20_poly1305_iv_fill(struct aead_chacha20_poly1305_iv
136*c99d2619SRadu Nicolau 			       *chacha20_poly1305,
137*c99d2619SRadu Nicolau 			       uint64_t iv, uint32_t salt)
138*c99d2619SRadu Nicolau {
139*c99d2619SRadu Nicolau 	chacha20_poly1305->salt = salt;
140*c99d2619SRadu Nicolau 	chacha20_poly1305->iv = iv;
141*c99d2619SRadu Nicolau 	chacha20_poly1305->cnt = rte_cpu_to_be_32(1);
142*c99d2619SRadu Nicolau }
143*c99d2619SRadu Nicolau 
144*c99d2619SRadu Nicolau static inline void
aead_gcm_iv_fill(struct aead_gcm_iv * gcm,uint64_t iv,uint32_t salt)14599a2dd95SBruce Richardson aead_gcm_iv_fill(struct aead_gcm_iv *gcm, uint64_t iv, uint32_t salt)
14699a2dd95SBruce Richardson {
14799a2dd95SBruce Richardson 	gcm->salt = salt;
14899a2dd95SBruce Richardson 	gcm->iv = iv;
14999a2dd95SBruce Richardson 	gcm->cnt = rte_cpu_to_be_32(1);
15099a2dd95SBruce Richardson }
15199a2dd95SBruce Richardson 
152*c99d2619SRadu Nicolau static inline void
aead_ccm_iv_fill(struct aead_ccm_iv * ccm,uint64_t iv,uint32_t salt)153*c99d2619SRadu Nicolau aead_ccm_iv_fill(struct aead_ccm_iv *ccm, uint64_t iv, uint32_t salt)
154*c99d2619SRadu Nicolau {
155*c99d2619SRadu Nicolau 	union aead_ccm_salt tsalt;
156*c99d2619SRadu Nicolau 
157*c99d2619SRadu Nicolau 	tsalt.salt = salt;
158*c99d2619SRadu Nicolau 	ccm->ccm_flags = tsalt.inner.ccm_flags;
159*c99d2619SRadu Nicolau 	ccm->salt[0] = tsalt.inner.salt8[0];
160*c99d2619SRadu Nicolau 	ccm->salt[1] = tsalt.inner.salt8[1];
161*c99d2619SRadu Nicolau 	ccm->salt[2] = tsalt.inner.salt8[2];
162*c99d2619SRadu Nicolau 	ccm->iv = iv;
163*c99d2619SRadu Nicolau 	ccm->cnt = rte_cpu_to_be_32(1);
164*c99d2619SRadu Nicolau }
165*c99d2619SRadu Nicolau 
166*c99d2619SRadu Nicolau 
16799a2dd95SBruce Richardson /*
16899a2dd95SBruce Richardson  * RFC 4106, 5 AAD Construction
16999a2dd95SBruce Richardson  * spi and sqn should already be converted into network byte order.
17099a2dd95SBruce Richardson  * Make sure that not used bytes are zeroed.
17199a2dd95SBruce Richardson  */
17299a2dd95SBruce Richardson static inline void
aead_gcm_aad_fill(struct aead_gcm_aad * aad,rte_be32_t spi,rte_be64_t sqn,int esn)17399a2dd95SBruce Richardson aead_gcm_aad_fill(struct aead_gcm_aad *aad, rte_be32_t spi, rte_be64_t sqn,
17499a2dd95SBruce Richardson 	int esn)
17599a2dd95SBruce Richardson {
17699a2dd95SBruce Richardson 	aad->spi = spi;
17799a2dd95SBruce Richardson 	if (esn)
17899a2dd95SBruce Richardson 		aad->sqn.u64 = sqn;
17999a2dd95SBruce Richardson 	else {
18099a2dd95SBruce Richardson 		aad->sqn.u32[0] = sqn_low32(sqn);
18199a2dd95SBruce Richardson 		aad->sqn.u32[1] = 0;
18299a2dd95SBruce Richardson 	}
18399a2dd95SBruce Richardson 	aad->align0 = 0;
18499a2dd95SBruce Richardson }
18599a2dd95SBruce Richardson 
186*c99d2619SRadu Nicolau /*
187*c99d2619SRadu Nicolau  * RFC 4309, 5 AAD Construction
188*c99d2619SRadu Nicolau  * spi and sqn should already be converted into network byte order.
189*c99d2619SRadu Nicolau  * Make sure that not used bytes are zeroed.
190*c99d2619SRadu Nicolau  */
191*c99d2619SRadu Nicolau static inline void
aead_ccm_aad_fill(struct aead_ccm_aad * aad,rte_be32_t spi,rte_be64_t sqn,int esn)192*c99d2619SRadu Nicolau aead_ccm_aad_fill(struct aead_ccm_aad *aad, rte_be32_t spi, rte_be64_t sqn,
193*c99d2619SRadu Nicolau 	int esn)
194*c99d2619SRadu Nicolau {
195*c99d2619SRadu Nicolau 	aad->spi = spi;
196*c99d2619SRadu Nicolau 	if (esn)
197*c99d2619SRadu Nicolau 		aad->sqn.u64 = sqn;
198*c99d2619SRadu Nicolau 	else {
199*c99d2619SRadu Nicolau 		aad->sqn.u32[0] = sqn_low32(sqn);
200*c99d2619SRadu Nicolau 		aad->sqn.u32[1] = 0;
201*c99d2619SRadu Nicolau 	}
202*c99d2619SRadu Nicolau 	aad->align0 = 0;
203*c99d2619SRadu Nicolau }
204*c99d2619SRadu Nicolau 
20599a2dd95SBruce Richardson static inline void
gen_iv(uint64_t iv[IPSEC_MAX_IV_QWORD],rte_be64_t sqn)20699a2dd95SBruce Richardson gen_iv(uint64_t iv[IPSEC_MAX_IV_QWORD], rte_be64_t sqn)
20799a2dd95SBruce Richardson {
20899a2dd95SBruce Richardson 	iv[0] = sqn;
20999a2dd95SBruce Richardson 	iv[1] = 0;
21099a2dd95SBruce Richardson }
21199a2dd95SBruce Richardson 
212*c99d2619SRadu Nicolau 
213*c99d2619SRadu Nicolau /*
214*c99d2619SRadu Nicolau  * RFC 7634, 2.1 AAD Construction
215*c99d2619SRadu Nicolau  * spi and sqn should already be converted into network byte order.
216*c99d2619SRadu Nicolau  * Make sure that not used bytes are zeroed.
217*c99d2619SRadu Nicolau  */
218*c99d2619SRadu Nicolau static inline void
aead_chacha20_poly1305_aad_fill(struct aead_chacha20_poly1305_aad * aad,rte_be32_t spi,rte_be64_t sqn,int esn)219*c99d2619SRadu Nicolau aead_chacha20_poly1305_aad_fill(struct aead_chacha20_poly1305_aad *aad,
220*c99d2619SRadu Nicolau 					rte_be32_t spi, rte_be64_t sqn,
221*c99d2619SRadu Nicolau 					int esn)
222*c99d2619SRadu Nicolau {
223*c99d2619SRadu Nicolau 	aad->spi = spi;
224*c99d2619SRadu Nicolau 	if (esn)
225*c99d2619SRadu Nicolau 		aad->sqn.u64 = sqn;
226*c99d2619SRadu Nicolau 	else {
227*c99d2619SRadu Nicolau 		aad->sqn.u32[0] = sqn_low32(sqn);
228*c99d2619SRadu Nicolau 		aad->sqn.u32[1] = 0;
229*c99d2619SRadu Nicolau 	}
230*c99d2619SRadu Nicolau 	aad->align0 = 0;
231*c99d2619SRadu Nicolau }
232*c99d2619SRadu Nicolau 
23399a2dd95SBruce Richardson /*
23499a2dd95SBruce Richardson  * Helper routine to copy IV
23599a2dd95SBruce Richardson  * Right now we support only algorithms with IV length equals 0/8/16 bytes.
23699a2dd95SBruce Richardson  */
23799a2dd95SBruce Richardson static inline void
copy_iv(uint64_t dst[IPSEC_MAX_IV_QWORD],const uint64_t src[IPSEC_MAX_IV_QWORD],uint32_t len)23899a2dd95SBruce Richardson copy_iv(uint64_t dst[IPSEC_MAX_IV_QWORD],
23999a2dd95SBruce Richardson 	const uint64_t src[IPSEC_MAX_IV_QWORD], uint32_t len)
24099a2dd95SBruce Richardson {
24199a2dd95SBruce Richardson 	RTE_BUILD_BUG_ON(IPSEC_MAX_IV_SIZE != 2 * sizeof(uint64_t));
24299a2dd95SBruce Richardson 
24399a2dd95SBruce Richardson 	switch (len) {
24499a2dd95SBruce Richardson 	case IPSEC_MAX_IV_SIZE:
24599a2dd95SBruce Richardson 		dst[1] = src[1];
24699a2dd95SBruce Richardson 		/* fallthrough */
24799a2dd95SBruce Richardson 	case sizeof(uint64_t):
24899a2dd95SBruce Richardson 		dst[0] = src[0];
24999a2dd95SBruce Richardson 		/* fallthrough */
25099a2dd95SBruce Richardson 	case 0:
25199a2dd95SBruce Richardson 		break;
25299a2dd95SBruce Richardson 	default:
25399a2dd95SBruce Richardson 		/* should never happen */
25499a2dd95SBruce Richardson 		RTE_ASSERT(NULL);
25599a2dd95SBruce Richardson 	}
25699a2dd95SBruce Richardson }
25799a2dd95SBruce Richardson 
25899a2dd95SBruce Richardson /*
25999a2dd95SBruce Richardson  * from RFC 4303 3.3.2.1.4:
26099a2dd95SBruce Richardson  * If the ESN option is enabled for the SA, the high-order 32
26199a2dd95SBruce Richardson  * bits of the sequence number are appended after the Next Header field
26299a2dd95SBruce Richardson  * for purposes of this computation, but are not transmitted.
26399a2dd95SBruce Richardson  */
26499a2dd95SBruce Richardson 
26599a2dd95SBruce Richardson /*
26699a2dd95SBruce Richardson  * Helper function that moves ICV by 4B below, and inserts SQN.hibits.
26799a2dd95SBruce Richardson  * icv parameter points to the new start of ICV.
26899a2dd95SBruce Richardson  */
26999a2dd95SBruce Richardson static inline void
insert_sqh(uint32_t sqh,void * picv,uint32_t icv_len)27099a2dd95SBruce Richardson insert_sqh(uint32_t sqh, void *picv, uint32_t icv_len)
27199a2dd95SBruce Richardson {
27299a2dd95SBruce Richardson 	uint32_t *icv;
27399a2dd95SBruce Richardson 	int32_t i;
27499a2dd95SBruce Richardson 
27599a2dd95SBruce Richardson 	RTE_ASSERT(icv_len % sizeof(uint32_t) == 0);
27699a2dd95SBruce Richardson 
27799a2dd95SBruce Richardson 	icv = picv;
27899a2dd95SBruce Richardson 	icv_len = icv_len / sizeof(uint32_t);
27999a2dd95SBruce Richardson 	for (i = icv_len; i-- != 0; icv[i] = icv[i - 1])
28099a2dd95SBruce Richardson 		;
28199a2dd95SBruce Richardson 
28299a2dd95SBruce Richardson 	icv[i] = sqh;
28399a2dd95SBruce Richardson }
28499a2dd95SBruce Richardson 
28599a2dd95SBruce Richardson /*
28699a2dd95SBruce Richardson  * Helper function that moves ICV by 4B up, and removes SQN.hibits.
28799a2dd95SBruce Richardson  * icv parameter points to the new start of ICV.
28899a2dd95SBruce Richardson  */
28999a2dd95SBruce Richardson static inline void
remove_sqh(void * picv,uint32_t icv_len)29099a2dd95SBruce Richardson remove_sqh(void *picv, uint32_t icv_len)
29199a2dd95SBruce Richardson {
29299a2dd95SBruce Richardson 	uint32_t i, *icv;
29399a2dd95SBruce Richardson 
29499a2dd95SBruce Richardson 	RTE_ASSERT(icv_len % sizeof(uint32_t) == 0);
29599a2dd95SBruce Richardson 
29699a2dd95SBruce Richardson 	icv = picv;
29799a2dd95SBruce Richardson 	icv_len = icv_len / sizeof(uint32_t);
29899a2dd95SBruce Richardson 	for (i = 0; i != icv_len; i++)
29999a2dd95SBruce Richardson 		icv[i] = icv[i + 1];
30099a2dd95SBruce Richardson }
30199a2dd95SBruce Richardson 
30299a2dd95SBruce Richardson /*
30399a2dd95SBruce Richardson  * setup crypto ops for LOOKASIDE_NONE (pure crypto) type of devices.
30499a2dd95SBruce Richardson  */
30599a2dd95SBruce Richardson static inline void
lksd_none_cop_prepare(struct rte_crypto_op * cop,struct rte_cryptodev_sym_session * cs,struct rte_mbuf * mb)30699a2dd95SBruce Richardson lksd_none_cop_prepare(struct rte_crypto_op *cop,
30799a2dd95SBruce Richardson 	struct rte_cryptodev_sym_session *cs, struct rte_mbuf *mb)
30899a2dd95SBruce Richardson {
30999a2dd95SBruce Richardson 	struct rte_crypto_sym_op *sop;
31099a2dd95SBruce Richardson 
31199a2dd95SBruce Richardson 	sop = cop->sym;
31299a2dd95SBruce Richardson 	cop->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
31399a2dd95SBruce Richardson 	cop->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
31499a2dd95SBruce Richardson 	cop->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
31599a2dd95SBruce Richardson 	sop->m_src = mb;
31699a2dd95SBruce Richardson 	__rte_crypto_sym_op_attach_sym_session(sop, cs);
31799a2dd95SBruce Richardson }
31899a2dd95SBruce Richardson 
31999a2dd95SBruce Richardson #endif /* _CRYPTO_H_ */
320