1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2020 Intel Corporation
3  */
4 
5 #ifndef _AESNI_GCM_PMD_PRIVATE_H_
6 #define _AESNI_GCM_PMD_PRIVATE_H_
7 
8 #include "aesni_gcm_ops.h"
9 
10 /*
11  * IMB_VERSION_NUM macro was introduced in version Multi-buffer 0.50,
12  * so if macro is not defined, it means that the version is 0.49.
13  */
14 #if !defined(IMB_VERSION_NUM)
15 #define IMB_VERSION(a, b, c) (((a) << 16) + ((b) << 8) + (c))
16 #define IMB_VERSION_NUM IMB_VERSION(0, 49, 0)
17 #endif
18 
19 #define CRYPTODEV_NAME_AESNI_GCM_PMD	crypto_aesni_gcm
20 /**< AES-NI GCM PMD device name */
21 
22 /** AES-NI GCM PMD  LOGTYPE DRIVER */
23 extern int aesni_gcm_logtype_driver;
24 #define AESNI_GCM_LOG(level, fmt, ...) \
25 	rte_log(RTE_LOG_ ## level, aesni_gcm_logtype_driver,	\
26 			"%s() line %u: "fmt "\n", __func__, __LINE__,	\
27 					## __VA_ARGS__)
28 
29 /* Maximum length for digest */
30 #define DIGEST_LENGTH_MAX 16
31 
32 /** private data structure for each virtual AESNI GCM device */
33 struct aesni_gcm_private {
34 	enum aesni_gcm_vector_mode vector_mode;
35 	/**< Vector mode */
36 	unsigned max_nb_queue_pairs;
37 	/**< Max number of queue pairs supported by device */
38 	MB_MGR *mb_mgr;
39 	/**< Multi-buffer instance */
40 	struct aesni_gcm_ops ops[GCM_KEY_NUM];
41 	/**< Function pointer table of the gcm APIs */
42 };
43 
44 struct aesni_gcm_qp {
45 	const struct aesni_gcm_ops *ops;
46 	/**< Function pointer table of the gcm APIs */
47 	struct rte_ring *processed_pkts;
48 	/**< Ring for placing process packets */
49 	struct gcm_context_data gdata_ctx; /* (16 * 5) + 8 = 88 B */
50 	/**< GCM parameters */
51 	struct rte_cryptodev_stats qp_stats; /* 8 * 4 = 32 B */
52 	/**< Queue pair statistics */
53 	struct rte_mempool *sess_mp;
54 	/**< Session Mempool */
55 	struct rte_mempool *sess_mp_priv;
56 	/**< Session Private Data Mempool */
57 	uint16_t id;
58 	/**< Queue Pair Identifier */
59 	char name[RTE_CRYPTODEV_NAME_MAX_LEN];
60 	/**< Unique Queue Pair Name */
61 	uint8_t temp_digest[DIGEST_LENGTH_MAX];
62 	/**< Buffer used to store the digest generated
63 	 * by the driver when verifying a digest provided
64 	 * by the user (using authentication verify operation)
65 	 */
66 } __rte_cache_aligned;
67 
68 
69 enum aesni_gcm_operation {
70 	AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION,
71 	AESNI_GCM_OP_AUTHENTICATED_DECRYPTION,
72 	AESNI_GMAC_OP_GENERATE,
73 	AESNI_GMAC_OP_VERIFY
74 };
75 
76 /** AESNI GCM private session structure */
77 struct aesni_gcm_session {
78 	struct {
79 		uint16_t length;
80 		uint16_t offset;
81 	} iv;
82 	/**< IV parameters */
83 	uint16_t aad_length;
84 	/**< AAD length */
85 	uint16_t req_digest_length;
86 	/**< Requested digest length */
87 	uint16_t gen_digest_length;
88 	/**< Generated digest length */
89 	enum aesni_gcm_operation op;
90 	/**< GCM operation type */
91 	enum aesni_gcm_key key;
92 	/**< GCM key type */
93 	struct gcm_key_data gdata_key;
94 	/**< GCM parameters */
95 	struct aesni_gcm_session_ops ops;
96 	/**< Session handlers */
97 };
98 
99 
100 /**
101  * Setup GCM session parameters
102  * @param	sess	aesni gcm session structure
103  * @param	xform	crypto transform chain
104  *
105  * @return
106  * - On success returns 0
107  * - On failure returns error code < 0
108  */
109 extern int
110 aesni_gcm_set_session_parameters(const struct aesni_gcm_ops *ops,
111 		struct aesni_gcm_session *sess,
112 		const struct rte_crypto_sym_xform *xform);
113 
114 /* Device specific operations function pointer structure */
115 extern struct rte_cryptodev_ops *rte_aesni_gcm_pmd_ops;
116 
117 /** CPU crypto bulk process handler */
118 uint32_t
119 aesni_gcm_pmd_cpu_crypto_process(struct rte_cryptodev *dev,
120 	struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs,
121 	struct rte_crypto_sym_vec *vec);
122 
123 #endif /* _AESNI_GCM_PMD_PRIVATE_H_ */
124