xref: /linux-6.15/include/crypto/algapi.h (revision 65775cf3)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <[email protected]>
6  */
7 #ifndef _CRYPTO_ALGAPI_H
8 #define _CRYPTO_ALGAPI_H
9 
10 #include <crypto/utils.h>
11 #include <linux/align.h>
12 #include <linux/cache.h>
13 #include <linux/crypto.h>
14 #include <linux/list.h>
15 #include <linux/types.h>
16 #include <linux/workqueue.h>
17 
18 /*
19  * Maximum values for blocksize and alignmask, used to allocate
20  * static buffers that are big enough for any combination of
21  * algs and architectures. Ciphers have a lower maximum size.
22  */
23 #define MAX_ALGAPI_BLOCKSIZE		160
24 #define MAX_ALGAPI_ALIGNMASK		127
25 #define MAX_CIPHER_BLOCKSIZE		16
26 #define MAX_CIPHER_ALIGNMASK		15
27 
28 #ifdef ARCH_DMA_MINALIGN
29 #define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
30 #else
31 #define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
32 #endif
33 
34 #define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
35 
36 /*
37  * Autoloaded crypto modules should only use a prefixed name to avoid allowing
38  * arbitrary modules to be loaded. Loading from userspace may still need the
39  * unprefixed names, so retains those aliases as well.
40  * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
41  * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
42  * expands twice on the same line. Instead, use a separate base name for the
43  * alias.
44  */
45 #define MODULE_ALIAS_CRYPTO(name)	\
46 		__MODULE_INFO(alias, alias_userspace, name);	\
47 		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
48 
49 struct crypto_aead;
50 struct crypto_instance;
51 struct module;
52 struct notifier_block;
53 struct rtattr;
54 struct scatterlist;
55 struct seq_file;
56 struct sk_buff;
57 union crypto_no_such_thing;
58 
59 struct crypto_instance {
60 	struct crypto_alg alg;
61 
62 	struct crypto_template *tmpl;
63 
64 	union {
65 		/* Node in list of instances after registration. */
66 		struct hlist_node list;
67 		/* List of attached spawns before registration. */
68 		struct crypto_spawn *spawns;
69 	};
70 
71 	struct work_struct free_work;
72 
73 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
74 };
75 
76 struct crypto_template {
77 	struct list_head list;
78 	struct hlist_head instances;
79 	struct module *module;
80 
81 	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
82 
83 	char name[CRYPTO_MAX_ALG_NAME];
84 };
85 
86 struct crypto_spawn {
87 	struct list_head list;
88 	struct crypto_alg *alg;
89 	union {
90 		/* Back pointer to instance after registration.*/
91 		struct crypto_instance *inst;
92 		/* Spawn list pointer prior to registration. */
93 		struct crypto_spawn *next;
94 	};
95 	const struct crypto_type *frontend;
96 	u32 mask;
97 	bool dead;
98 	bool registered;
99 };
100 
101 struct crypto_queue {
102 	struct list_head list;
103 	struct list_head *backlog;
104 
105 	unsigned int qlen;
106 	unsigned int max_qlen;
107 };
108 
109 struct scatter_walk {
110 	struct scatterlist *sg;
111 	unsigned int offset;
112 	union {
113 		void *const addr;
114 
115 		/* Private API field, do not touch. */
116 		union crypto_no_such_thing *__addr;
117 	};
118 };
119 
120 struct crypto_attr_alg {
121 	char name[CRYPTO_MAX_ALG_NAME];
122 };
123 
124 struct crypto_attr_type {
125 	u32 type;
126 	u32 mask;
127 };
128 
129 /*
130  * Algorithm registration interface.
131  */
132 int crypto_register_alg(struct crypto_alg *alg);
133 void crypto_unregister_alg(struct crypto_alg *alg);
134 int crypto_register_algs(struct crypto_alg *algs, int count);
135 void crypto_unregister_algs(struct crypto_alg *algs, int count);
136 
137 void crypto_mod_put(struct crypto_alg *alg);
138 
139 int crypto_register_template(struct crypto_template *tmpl);
140 int crypto_register_templates(struct crypto_template *tmpls, int count);
141 void crypto_unregister_template(struct crypto_template *tmpl);
142 void crypto_unregister_templates(struct crypto_template *tmpls, int count);
143 struct crypto_template *crypto_lookup_template(const char *name);
144 
145 int crypto_register_instance(struct crypto_template *tmpl,
146 			     struct crypto_instance *inst);
147 void crypto_unregister_instance(struct crypto_instance *inst);
148 
149 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
150 		      const char *name, u32 type, u32 mask);
151 void crypto_drop_spawn(struct crypto_spawn *spawn);
152 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
153 				    u32 mask);
154 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
155 
156 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
157 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
158 const char *crypto_attr_alg_name(struct rtattr *rta);
159 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
160 			struct crypto_alg *alg);
161 
162 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
163 int crypto_enqueue_request(struct crypto_queue *queue,
164 			   struct crypto_async_request *request);
165 void crypto_enqueue_request_head(struct crypto_queue *queue,
166 				 struct crypto_async_request *request);
167 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
168 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
169 {
170 	return queue->qlen;
171 }
172 
173 void crypto_inc(u8 *a, unsigned int size);
174 
175 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
176 {
177 	return tfm->__crt_ctx;
178 }
179 
180 static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
181 					 unsigned int align)
182 {
183 	if (align <= crypto_tfm_ctx_alignment())
184 		align = 1;
185 
186 	return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
187 }
188 
189 static inline unsigned int crypto_dma_align(void)
190 {
191 	return CRYPTO_DMA_ALIGN;
192 }
193 
194 static inline unsigned int crypto_dma_padding(void)
195 {
196 	return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
197 }
198 
199 static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
200 {
201 	return crypto_tfm_ctx_align(tfm, crypto_dma_align());
202 }
203 
204 static inline struct crypto_instance *crypto_tfm_alg_instance(
205 	struct crypto_tfm *tfm)
206 {
207 	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
208 }
209 
210 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
211 {
212 	return inst->__ctx;
213 }
214 
215 static inline struct crypto_async_request *crypto_get_backlog(
216 	struct crypto_queue *queue)
217 {
218 	return queue->backlog == &queue->list ? NULL :
219 	       container_of(queue->backlog, struct crypto_async_request, list);
220 }
221 
222 static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
223 {
224 	return (algt->type ^ off) & algt->mask & off;
225 }
226 
227 /*
228  * When an algorithm uses another algorithm (e.g., if it's an instance of a
229  * template), these are the flags that should always be set on the "outer"
230  * algorithm if any "inner" algorithm has them set.
231  */
232 #define CRYPTO_ALG_INHERITED_FLAGS	\
233 	(CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |	\
234 	 CRYPTO_ALG_ALLOCATES_MEMORY)
235 
236 /*
237  * Given the type and mask that specify the flags restrictions on a template
238  * instance being created, return the mask that should be passed to
239  * crypto_grab_*() (along with type=0) to honor any request the user made to
240  * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
241  */
242 static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
243 {
244 	return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
245 }
246 
247 int crypto_register_notifier(struct notifier_block *nb);
248 int crypto_unregister_notifier(struct notifier_block *nb);
249 
250 /* Crypto notification events. */
251 enum {
252 	CRYPTO_MSG_ALG_REQUEST,
253 	CRYPTO_MSG_ALG_REGISTER,
254 	CRYPTO_MSG_ALG_LOADED,
255 };
256 
257 static inline void crypto_request_complete(struct crypto_async_request *req,
258 					   int err)
259 {
260 	req->complete(req->data, err);
261 }
262 
263 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
264 {
265 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
266 }
267 
268 static inline bool crypto_request_chained(struct crypto_async_request *req)
269 {
270 	return !list_empty(&req->list);
271 }
272 
273 static inline bool crypto_tfm_req_chain(struct crypto_tfm *tfm)
274 {
275 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_REQ_CHAIN;
276 }
277 
278 #endif	/* _CRYPTO_ALGAPI_H */
279