xref: /linux-6.15/include/linux/crypto.h (revision 674f368a)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Scatterlist Cryptographic API.
4  *
5  * Copyright (c) 2002 James Morris <[email protected]>
6  * Copyright (c) 2002 David S. Miller ([email protected])
7  * Copyright (c) 2005 Herbert Xu <[email protected]>
8  *
9  * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]>
10  * and Nettle, by Niels Möller.
11  */
12 #ifndef _LINUX_CRYPTO_H
13 #define _LINUX_CRYPTO_H
14 
15 #include <linux/atomic.h>
16 #include <linux/kernel.h>
17 #include <linux/list.h>
18 #include <linux/bug.h>
19 #include <linux/slab.h>
20 #include <linux/string.h>
21 #include <linux/uaccess.h>
22 #include <linux/completion.h>
23 
24 /*
25  * Autoloaded crypto modules should only use a prefixed name to avoid allowing
26  * arbitrary modules to be loaded. Loading from userspace may still need the
27  * unprefixed names, so retains those aliases as well.
28  * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
29  * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
30  * expands twice on the same line. Instead, use a separate base name for the
31  * alias.
32  */
33 #define MODULE_ALIAS_CRYPTO(name)	\
34 		__MODULE_INFO(alias, alias_userspace, name);	\
35 		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
36 
37 /*
38  * Algorithm masks and types.
39  */
40 #define CRYPTO_ALG_TYPE_MASK		0x0000000f
41 #define CRYPTO_ALG_TYPE_CIPHER		0x00000001
42 #define CRYPTO_ALG_TYPE_COMPRESS	0x00000002
43 #define CRYPTO_ALG_TYPE_AEAD		0x00000003
44 #define CRYPTO_ALG_TYPE_SKCIPHER	0x00000005
45 #define CRYPTO_ALG_TYPE_KPP		0x00000008
46 #define CRYPTO_ALG_TYPE_ACOMPRESS	0x0000000a
47 #define CRYPTO_ALG_TYPE_SCOMPRESS	0x0000000b
48 #define CRYPTO_ALG_TYPE_RNG		0x0000000c
49 #define CRYPTO_ALG_TYPE_AKCIPHER	0x0000000d
50 #define CRYPTO_ALG_TYPE_HASH		0x0000000e
51 #define CRYPTO_ALG_TYPE_SHASH		0x0000000e
52 #define CRYPTO_ALG_TYPE_AHASH		0x0000000f
53 
54 #define CRYPTO_ALG_TYPE_HASH_MASK	0x0000000e
55 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
56 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK	0x0000000e
57 
58 #define CRYPTO_ALG_LARVAL		0x00000010
59 #define CRYPTO_ALG_DEAD			0x00000020
60 #define CRYPTO_ALG_DYING		0x00000040
61 #define CRYPTO_ALG_ASYNC		0x00000080
62 
63 /*
64  * Set this bit if and only if the algorithm requires another algorithm of
65  * the same type to handle corner cases.
66  */
67 #define CRYPTO_ALG_NEED_FALLBACK	0x00000100
68 
69 /*
70  * Set if the algorithm has passed automated run-time testing.  Note that
71  * if there is no run-time testing for a given algorithm it is considered
72  * to have passed.
73  */
74 
75 #define CRYPTO_ALG_TESTED		0x00000400
76 
77 /*
78  * Set if the algorithm is an instance that is built from templates.
79  */
80 #define CRYPTO_ALG_INSTANCE		0x00000800
81 
82 /* Set this bit if the algorithm provided is hardware accelerated but
83  * not available to userspace via instruction set or so.
84  */
85 #define CRYPTO_ALG_KERN_DRIVER_ONLY	0x00001000
86 
87 /*
88  * Mark a cipher as a service implementation only usable by another
89  * cipher and never by a normal user of the kernel crypto API
90  */
91 #define CRYPTO_ALG_INTERNAL		0x00002000
92 
93 /*
94  * Set if the algorithm has a ->setkey() method but can be used without
95  * calling it first, i.e. there is a default key.
96  */
97 #define CRYPTO_ALG_OPTIONAL_KEY		0x00004000
98 
99 /*
100  * Don't trigger module loading
101  */
102 #define CRYPTO_NOLOAD			0x00008000
103 
104 /*
105  * Transform masks and values (for crt_flags).
106  */
107 #define CRYPTO_TFM_NEED_KEY		0x00000001
108 
109 #define CRYPTO_TFM_REQ_MASK		0x000fff00
110 #define CRYPTO_TFM_RES_MASK		0xfff00000
111 
112 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS	0x00000100
113 #define CRYPTO_TFM_REQ_MAY_SLEEP	0x00000200
114 #define CRYPTO_TFM_REQ_MAY_BACKLOG	0x00000400
115 #define CRYPTO_TFM_RES_WEAK_KEY		0x00100000
116 
117 /*
118  * Miscellaneous stuff.
119  */
120 #define CRYPTO_MAX_ALG_NAME		128
121 
122 /*
123  * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
124  * declaration) is used to ensure that the crypto_tfm context structure is
125  * aligned correctly for the given architecture so that there are no alignment
126  * faults for C data types.  In particular, this is required on platforms such
127  * as arm where pointers are 32-bit aligned but there are data types such as
128  * u64 which require 64-bit alignment.
129  */
130 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
131 
132 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
133 
134 struct scatterlist;
135 struct crypto_async_request;
136 struct crypto_tfm;
137 struct crypto_type;
138 
139 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
140 
141 /**
142  * DOC: Block Cipher Context Data Structures
143  *
144  * These data structures define the operating context for each block cipher
145  * type.
146  */
147 
148 struct crypto_async_request {
149 	struct list_head list;
150 	crypto_completion_t complete;
151 	void *data;
152 	struct crypto_tfm *tfm;
153 
154 	u32 flags;
155 };
156 
157 /**
158  * DOC: Block Cipher Algorithm Definitions
159  *
160  * These data structures define modular crypto algorithm implementations,
161  * managed via crypto_register_alg() and crypto_unregister_alg().
162  */
163 
164 /**
165  * struct cipher_alg - single-block symmetric ciphers definition
166  * @cia_min_keysize: Minimum key size supported by the transformation. This is
167  *		     the smallest key length supported by this transformation
168  *		     algorithm. This must be set to one of the pre-defined
169  *		     values as this is not hardware specific. Possible values
170  *		     for this field can be found via git grep "_MIN_KEY_SIZE"
171  *		     include/crypto/
172  * @cia_max_keysize: Maximum key size supported by the transformation. This is
173  *		    the largest key length supported by this transformation
174  *		    algorithm. This must be set to one of the pre-defined values
175  *		    as this is not hardware specific. Possible values for this
176  *		    field can be found via git grep "_MAX_KEY_SIZE"
177  *		    include/crypto/
178  * @cia_setkey: Set key for the transformation. This function is used to either
179  *	        program a supplied key into the hardware or store the key in the
180  *	        transformation context for programming it later. Note that this
181  *	        function does modify the transformation context. This function
182  *	        can be called multiple times during the existence of the
183  *	        transformation object, so one must make sure the key is properly
184  *	        reprogrammed into the hardware. This function is also
185  *	        responsible for checking the key length for validity.
186  * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
187  *		 single block of data, which must be @cra_blocksize big. This
188  *		 always operates on a full @cra_blocksize and it is not possible
189  *		 to encrypt a block of smaller size. The supplied buffers must
190  *		 therefore also be at least of @cra_blocksize size. Both the
191  *		 input and output buffers are always aligned to @cra_alignmask.
192  *		 In case either of the input or output buffer supplied by user
193  *		 of the crypto API is not aligned to @cra_alignmask, the crypto
194  *		 API will re-align the buffers. The re-alignment means that a
195  *		 new buffer will be allocated, the data will be copied into the
196  *		 new buffer, then the processing will happen on the new buffer,
197  *		 then the data will be copied back into the original buffer and
198  *		 finally the new buffer will be freed. In case a software
199  *		 fallback was put in place in the @cra_init call, this function
200  *		 might need to use the fallback if the algorithm doesn't support
201  *		 all of the key sizes. In case the key was stored in
202  *		 transformation context, the key might need to be re-programmed
203  *		 into the hardware in this function. This function shall not
204  *		 modify the transformation context, as this function may be
205  *		 called in parallel with the same transformation object.
206  * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
207  *		 @cia_encrypt, and the conditions are exactly the same.
208  *
209  * All fields are mandatory and must be filled.
210  */
211 struct cipher_alg {
212 	unsigned int cia_min_keysize;
213 	unsigned int cia_max_keysize;
214 	int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
215 	                  unsigned int keylen);
216 	void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
217 	void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
218 };
219 
220 /**
221  * struct compress_alg - compression/decompression algorithm
222  * @coa_compress: Compress a buffer of specified length, storing the resulting
223  *		  data in the specified buffer. Return the length of the
224  *		  compressed data in dlen.
225  * @coa_decompress: Decompress the source buffer, storing the uncompressed
226  *		    data in the specified buffer. The length of the data is
227  *		    returned in dlen.
228  *
229  * All fields are mandatory.
230  */
231 struct compress_alg {
232 	int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
233 			    unsigned int slen, u8 *dst, unsigned int *dlen);
234 	int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
235 			      unsigned int slen, u8 *dst, unsigned int *dlen);
236 };
237 
238 #ifdef CONFIG_CRYPTO_STATS
239 /*
240  * struct crypto_istat_aead - statistics for AEAD algorithm
241  * @encrypt_cnt:	number of encrypt requests
242  * @encrypt_tlen:	total data size handled by encrypt requests
243  * @decrypt_cnt:	number of decrypt requests
244  * @decrypt_tlen:	total data size handled by decrypt requests
245  * @err_cnt:		number of error for AEAD requests
246  */
247 struct crypto_istat_aead {
248 	atomic64_t encrypt_cnt;
249 	atomic64_t encrypt_tlen;
250 	atomic64_t decrypt_cnt;
251 	atomic64_t decrypt_tlen;
252 	atomic64_t err_cnt;
253 };
254 
255 /*
256  * struct crypto_istat_akcipher - statistics for akcipher algorithm
257  * @encrypt_cnt:	number of encrypt requests
258  * @encrypt_tlen:	total data size handled by encrypt requests
259  * @decrypt_cnt:	number of decrypt requests
260  * @decrypt_tlen:	total data size handled by decrypt requests
261  * @verify_cnt:		number of verify operation
262  * @sign_cnt:		number of sign requests
263  * @err_cnt:		number of error for akcipher requests
264  */
265 struct crypto_istat_akcipher {
266 	atomic64_t encrypt_cnt;
267 	atomic64_t encrypt_tlen;
268 	atomic64_t decrypt_cnt;
269 	atomic64_t decrypt_tlen;
270 	atomic64_t verify_cnt;
271 	atomic64_t sign_cnt;
272 	atomic64_t err_cnt;
273 };
274 
275 /*
276  * struct crypto_istat_cipher - statistics for cipher algorithm
277  * @encrypt_cnt:	number of encrypt requests
278  * @encrypt_tlen:	total data size handled by encrypt requests
279  * @decrypt_cnt:	number of decrypt requests
280  * @decrypt_tlen:	total data size handled by decrypt requests
281  * @err_cnt:		number of error for cipher requests
282  */
283 struct crypto_istat_cipher {
284 	atomic64_t encrypt_cnt;
285 	atomic64_t encrypt_tlen;
286 	atomic64_t decrypt_cnt;
287 	atomic64_t decrypt_tlen;
288 	atomic64_t err_cnt;
289 };
290 
291 /*
292  * struct crypto_istat_compress - statistics for compress algorithm
293  * @compress_cnt:	number of compress requests
294  * @compress_tlen:	total data size handled by compress requests
295  * @decompress_cnt:	number of decompress requests
296  * @decompress_tlen:	total data size handled by decompress requests
297  * @err_cnt:		number of error for compress requests
298  */
299 struct crypto_istat_compress {
300 	atomic64_t compress_cnt;
301 	atomic64_t compress_tlen;
302 	atomic64_t decompress_cnt;
303 	atomic64_t decompress_tlen;
304 	atomic64_t err_cnt;
305 };
306 
307 /*
308  * struct crypto_istat_hash - statistics for has algorithm
309  * @hash_cnt:		number of hash requests
310  * @hash_tlen:		total data size hashed
311  * @err_cnt:		number of error for hash requests
312  */
313 struct crypto_istat_hash {
314 	atomic64_t hash_cnt;
315 	atomic64_t hash_tlen;
316 	atomic64_t err_cnt;
317 };
318 
319 /*
320  * struct crypto_istat_kpp - statistics for KPP algorithm
321  * @setsecret_cnt:		number of setsecrey operation
322  * @generate_public_key_cnt:	number of generate_public_key operation
323  * @compute_shared_secret_cnt:	number of compute_shared_secret operation
324  * @err_cnt:			number of error for KPP requests
325  */
326 struct crypto_istat_kpp {
327 	atomic64_t setsecret_cnt;
328 	atomic64_t generate_public_key_cnt;
329 	atomic64_t compute_shared_secret_cnt;
330 	atomic64_t err_cnt;
331 };
332 
333 /*
334  * struct crypto_istat_rng: statistics for RNG algorithm
335  * @generate_cnt:	number of RNG generate requests
336  * @generate_tlen:	total data size of generated data by the RNG
337  * @seed_cnt:		number of times the RNG was seeded
338  * @err_cnt:		number of error for RNG requests
339  */
340 struct crypto_istat_rng {
341 	atomic64_t generate_cnt;
342 	atomic64_t generate_tlen;
343 	atomic64_t seed_cnt;
344 	atomic64_t err_cnt;
345 };
346 #endif /* CONFIG_CRYPTO_STATS */
347 
348 #define cra_cipher	cra_u.cipher
349 #define cra_compress	cra_u.compress
350 
351 /**
352  * struct crypto_alg - definition of a cryptograpic cipher algorithm
353  * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
354  *	       CRYPTO_ALG_* flags for the flags which go in here. Those are
355  *	       used for fine-tuning the description of the transformation
356  *	       algorithm.
357  * @cra_blocksize: Minimum block size of this transformation. The size in bytes
358  *		   of the smallest possible unit which can be transformed with
359  *		   this algorithm. The users must respect this value.
360  *		   In case of HASH transformation, it is possible for a smaller
361  *		   block than @cra_blocksize to be passed to the crypto API for
362  *		   transformation, in case of any other transformation type, an
363  * 		   error will be returned upon any attempt to transform smaller
364  *		   than @cra_blocksize chunks.
365  * @cra_ctxsize: Size of the operational context of the transformation. This
366  *		 value informs the kernel crypto API about the memory size
367  *		 needed to be allocated for the transformation context.
368  * @cra_alignmask: Alignment mask for the input and output data buffer. The data
369  *		   buffer containing the input data for the algorithm must be
370  *		   aligned to this alignment mask. The data buffer for the
371  *		   output data must be aligned to this alignment mask. Note that
372  *		   the Crypto API will do the re-alignment in software, but
373  *		   only under special conditions and there is a performance hit.
374  *		   The re-alignment happens at these occasions for different
375  *		   @cra_u types: cipher -- For both input data and output data
376  *		   buffer; ahash -- For output hash destination buf; shash --
377  *		   For output hash destination buf.
378  *		   This is needed on hardware which is flawed by design and
379  *		   cannot pick data from arbitrary addresses.
380  * @cra_priority: Priority of this transformation implementation. In case
381  *		  multiple transformations with same @cra_name are available to
382  *		  the Crypto API, the kernel will use the one with highest
383  *		  @cra_priority.
384  * @cra_name: Generic name (usable by multiple implementations) of the
385  *	      transformation algorithm. This is the name of the transformation
386  *	      itself. This field is used by the kernel when looking up the
387  *	      providers of particular transformation.
388  * @cra_driver_name: Unique name of the transformation provider. This is the
389  *		     name of the provider of the transformation. This can be any
390  *		     arbitrary value, but in the usual case, this contains the
391  *		     name of the chip or provider and the name of the
392  *		     transformation algorithm.
393  * @cra_type: Type of the cryptographic transformation. This is a pointer to
394  *	      struct crypto_type, which implements callbacks common for all
395  *	      transformation types. There are multiple options, such as
396  *	      &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type.
397  *	      This field might be empty. In that case, there are no common
398  *	      callbacks. This is the case for: cipher, compress, shash.
399  * @cra_u: Callbacks implementing the transformation. This is a union of
400  *	   multiple structures. Depending on the type of transformation selected
401  *	   by @cra_type and @cra_flags above, the associated structure must be
402  *	   filled with callbacks. This field might be empty. This is the case
403  *	   for ahash, shash.
404  * @cra_init: Initialize the cryptographic transformation object. This function
405  *	      is used to initialize the cryptographic transformation object.
406  *	      This function is called only once at the instantiation time, right
407  *	      after the transformation context was allocated. In case the
408  *	      cryptographic hardware has some special requirements which need to
409  *	      be handled by software, this function shall check for the precise
410  *	      requirement of the transformation and put any software fallbacks
411  *	      in place.
412  * @cra_exit: Deinitialize the cryptographic transformation object. This is a
413  *	      counterpart to @cra_init, used to remove various changes set in
414  *	      @cra_init.
415  * @cra_u.cipher: Union member which contains a single-block symmetric cipher
416  *		  definition. See @struct @cipher_alg.
417  * @cra_u.compress: Union member which contains a (de)compression algorithm.
418  *		    See @struct @compress_alg.
419  * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
420  * @cra_list: internally used
421  * @cra_users: internally used
422  * @cra_refcnt: internally used
423  * @cra_destroy: internally used
424  *
425  * @stats: union of all possible crypto_istat_xxx structures
426  * @stats.aead:		statistics for AEAD algorithm
427  * @stats.akcipher:	statistics for akcipher algorithm
428  * @stats.cipher:	statistics for cipher algorithm
429  * @stats.compress:	statistics for compress algorithm
430  * @stats.hash:		statistics for hash algorithm
431  * @stats.rng:		statistics for rng algorithm
432  * @stats.kpp:		statistics for KPP algorithm
433  *
434  * The struct crypto_alg describes a generic Crypto API algorithm and is common
435  * for all of the transformations. Any variable not documented here shall not
436  * be used by a cipher implementation as it is internal to the Crypto API.
437  */
438 struct crypto_alg {
439 	struct list_head cra_list;
440 	struct list_head cra_users;
441 
442 	u32 cra_flags;
443 	unsigned int cra_blocksize;
444 	unsigned int cra_ctxsize;
445 	unsigned int cra_alignmask;
446 
447 	int cra_priority;
448 	refcount_t cra_refcnt;
449 
450 	char cra_name[CRYPTO_MAX_ALG_NAME];
451 	char cra_driver_name[CRYPTO_MAX_ALG_NAME];
452 
453 	const struct crypto_type *cra_type;
454 
455 	union {
456 		struct cipher_alg cipher;
457 		struct compress_alg compress;
458 	} cra_u;
459 
460 	int (*cra_init)(struct crypto_tfm *tfm);
461 	void (*cra_exit)(struct crypto_tfm *tfm);
462 	void (*cra_destroy)(struct crypto_alg *alg);
463 
464 	struct module *cra_module;
465 
466 #ifdef CONFIG_CRYPTO_STATS
467 	union {
468 		struct crypto_istat_aead aead;
469 		struct crypto_istat_akcipher akcipher;
470 		struct crypto_istat_cipher cipher;
471 		struct crypto_istat_compress compress;
472 		struct crypto_istat_hash hash;
473 		struct crypto_istat_rng rng;
474 		struct crypto_istat_kpp kpp;
475 	} stats;
476 #endif /* CONFIG_CRYPTO_STATS */
477 
478 } CRYPTO_MINALIGN_ATTR;
479 
480 #ifdef CONFIG_CRYPTO_STATS
481 void crypto_stats_init(struct crypto_alg *alg);
482 void crypto_stats_get(struct crypto_alg *alg);
483 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
484 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
485 void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg);
486 void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg);
487 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
488 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
489 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg);
490 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg);
491 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg);
492 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg);
493 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret);
494 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret);
495 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret);
496 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
497 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
498 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
499 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
500 #else
501 static inline void crypto_stats_init(struct crypto_alg *alg)
502 {}
503 static inline void crypto_stats_get(struct crypto_alg *alg)
504 {}
505 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
506 {}
507 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
508 {}
509 static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)
510 {}
511 static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)
512 {}
513 static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
514 {}
515 static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
516 {}
517 static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
518 {}
519 static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
520 {}
521 static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
522 {}
523 static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
524 {}
525 static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
526 {}
527 static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
528 {}
529 static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
530 {}
531 static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
532 {}
533 static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
534 {}
535 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
536 {}
537 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
538 {}
539 #endif
540 /*
541  * A helper struct for waiting for completion of async crypto ops
542  */
543 struct crypto_wait {
544 	struct completion completion;
545 	int err;
546 };
547 
548 /*
549  * Macro for declaring a crypto op async wait object on stack
550  */
551 #define DECLARE_CRYPTO_WAIT(_wait) \
552 	struct crypto_wait _wait = { \
553 		COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
554 
555 /*
556  * Async ops completion helper functioons
557  */
558 void crypto_req_done(struct crypto_async_request *req, int err);
559 
560 static inline int crypto_wait_req(int err, struct crypto_wait *wait)
561 {
562 	switch (err) {
563 	case -EINPROGRESS:
564 	case -EBUSY:
565 		wait_for_completion(&wait->completion);
566 		reinit_completion(&wait->completion);
567 		err = wait->err;
568 		break;
569 	}
570 
571 	return err;
572 }
573 
574 static inline void crypto_init_wait(struct crypto_wait *wait)
575 {
576 	init_completion(&wait->completion);
577 }
578 
579 /*
580  * Algorithm registration interface.
581  */
582 int crypto_register_alg(struct crypto_alg *alg);
583 void crypto_unregister_alg(struct crypto_alg *alg);
584 int crypto_register_algs(struct crypto_alg *algs, int count);
585 void crypto_unregister_algs(struct crypto_alg *algs, int count);
586 
587 /*
588  * Algorithm query interface.
589  */
590 int crypto_has_alg(const char *name, u32 type, u32 mask);
591 
592 /*
593  * Transforms: user-instantiated objects which encapsulate algorithms
594  * and core processing logic.  Managed via crypto_alloc_*() and
595  * crypto_free_*(), as well as the various helpers below.
596  */
597 
598 struct crypto_tfm {
599 
600 	u32 crt_flags;
601 
602 	void (*exit)(struct crypto_tfm *tfm);
603 
604 	struct crypto_alg *__crt_alg;
605 
606 	void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
607 };
608 
609 struct crypto_cipher {
610 	struct crypto_tfm base;
611 };
612 
613 struct crypto_comp {
614 	struct crypto_tfm base;
615 };
616 
617 enum {
618 	CRYPTOA_UNSPEC,
619 	CRYPTOA_ALG,
620 	CRYPTOA_TYPE,
621 	CRYPTOA_U32,
622 	__CRYPTOA_MAX,
623 };
624 
625 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
626 
627 /* Maximum number of (rtattr) parameters for each template. */
628 #define CRYPTO_MAX_ATTRS 32
629 
630 struct crypto_attr_alg {
631 	char name[CRYPTO_MAX_ALG_NAME];
632 };
633 
634 struct crypto_attr_type {
635 	u32 type;
636 	u32 mask;
637 };
638 
639 struct crypto_attr_u32 {
640 	u32 num;
641 };
642 
643 /*
644  * Transform user interface.
645  */
646 
647 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
648 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
649 
650 static inline void crypto_free_tfm(struct crypto_tfm *tfm)
651 {
652 	return crypto_destroy_tfm(tfm, tfm);
653 }
654 
655 int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
656 
657 /*
658  * Transform helpers which query the underlying algorithm.
659  */
660 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
661 {
662 	return tfm->__crt_alg->cra_name;
663 }
664 
665 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
666 {
667 	return tfm->__crt_alg->cra_driver_name;
668 }
669 
670 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
671 {
672 	return tfm->__crt_alg->cra_priority;
673 }
674 
675 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
676 {
677 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
678 }
679 
680 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
681 {
682 	return tfm->__crt_alg->cra_blocksize;
683 }
684 
685 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
686 {
687 	return tfm->__crt_alg->cra_alignmask;
688 }
689 
690 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
691 {
692 	return tfm->crt_flags;
693 }
694 
695 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
696 {
697 	tfm->crt_flags |= flags;
698 }
699 
700 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
701 {
702 	tfm->crt_flags &= ~flags;
703 }
704 
705 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
706 {
707 	return tfm->__crt_ctx;
708 }
709 
710 static inline unsigned int crypto_tfm_ctx_alignment(void)
711 {
712 	struct crypto_tfm *tfm;
713 	return __alignof__(tfm->__crt_ctx);
714 }
715 
716 /**
717  * DOC: Single Block Cipher API
718  *
719  * The single block cipher API is used with the ciphers of type
720  * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto).
721  *
722  * Using the single block cipher API calls, operations with the basic cipher
723  * primitive can be implemented. These cipher primitives exclude any block
724  * chaining operations including IV handling.
725  *
726  * The purpose of this single block cipher API is to support the implementation
727  * of templates or other concepts that only need to perform the cipher operation
728  * on one block at a time. Templates invoke the underlying cipher primitive
729  * block-wise and process either the input or the output data of these cipher
730  * operations.
731  */
732 
733 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
734 {
735 	return (struct crypto_cipher *)tfm;
736 }
737 
738 /**
739  * crypto_alloc_cipher() - allocate single block cipher handle
740  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
741  *	     single block cipher
742  * @type: specifies the type of the cipher
743  * @mask: specifies the mask for the cipher
744  *
745  * Allocate a cipher handle for a single block cipher. The returned struct
746  * crypto_cipher is the cipher handle that is required for any subsequent API
747  * invocation for that single block cipher.
748  *
749  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
750  *	   of an error, PTR_ERR() returns the error code.
751  */
752 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
753 							u32 type, u32 mask)
754 {
755 	type &= ~CRYPTO_ALG_TYPE_MASK;
756 	type |= CRYPTO_ALG_TYPE_CIPHER;
757 	mask |= CRYPTO_ALG_TYPE_MASK;
758 
759 	return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
760 }
761 
762 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
763 {
764 	return &tfm->base;
765 }
766 
767 /**
768  * crypto_free_cipher() - zeroize and free the single block cipher handle
769  * @tfm: cipher handle to be freed
770  */
771 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
772 {
773 	crypto_free_tfm(crypto_cipher_tfm(tfm));
774 }
775 
776 /**
777  * crypto_has_cipher() - Search for the availability of a single block cipher
778  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
779  *	     single block cipher
780  * @type: specifies the type of the cipher
781  * @mask: specifies the mask for the cipher
782  *
783  * Return: true when the single block cipher is known to the kernel crypto API;
784  *	   false otherwise
785  */
786 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
787 {
788 	type &= ~CRYPTO_ALG_TYPE_MASK;
789 	type |= CRYPTO_ALG_TYPE_CIPHER;
790 	mask |= CRYPTO_ALG_TYPE_MASK;
791 
792 	return crypto_has_alg(alg_name, type, mask);
793 }
794 
795 /**
796  * crypto_cipher_blocksize() - obtain block size for cipher
797  * @tfm: cipher handle
798  *
799  * The block size for the single block cipher referenced with the cipher handle
800  * tfm is returned. The caller may use that information to allocate appropriate
801  * memory for the data returned by the encryption or decryption operation
802  *
803  * Return: block size of cipher
804  */
805 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
806 {
807 	return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
808 }
809 
810 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
811 {
812 	return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
813 }
814 
815 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
816 {
817 	return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
818 }
819 
820 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
821 					   u32 flags)
822 {
823 	crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
824 }
825 
826 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
827 					     u32 flags)
828 {
829 	crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
830 }
831 
832 /**
833  * crypto_cipher_setkey() - set key for cipher
834  * @tfm: cipher handle
835  * @key: buffer holding the key
836  * @keylen: length of the key in bytes
837  *
838  * The caller provided key is set for the single block cipher referenced by the
839  * cipher handle.
840  *
841  * Note, the key length determines the cipher type. Many block ciphers implement
842  * different cipher modes depending on the key size, such as AES-128 vs AES-192
843  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
844  * is performed.
845  *
846  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
847  */
848 int crypto_cipher_setkey(struct crypto_cipher *tfm,
849 			 const u8 *key, unsigned int keylen);
850 
851 /**
852  * crypto_cipher_encrypt_one() - encrypt one block of plaintext
853  * @tfm: cipher handle
854  * @dst: points to the buffer that will be filled with the ciphertext
855  * @src: buffer holding the plaintext to be encrypted
856  *
857  * Invoke the encryption operation of one block. The caller must ensure that
858  * the plaintext and ciphertext buffers are at least one block in size.
859  */
860 void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
861 			       u8 *dst, const u8 *src);
862 
863 /**
864  * crypto_cipher_decrypt_one() - decrypt one block of ciphertext
865  * @tfm: cipher handle
866  * @dst: points to the buffer that will be filled with the plaintext
867  * @src: buffer holding the ciphertext to be decrypted
868  *
869  * Invoke the decryption operation of one block. The caller must ensure that
870  * the plaintext and ciphertext buffers are at least one block in size.
871  */
872 void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
873 			       u8 *dst, const u8 *src);
874 
875 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
876 {
877 	return (struct crypto_comp *)tfm;
878 }
879 
880 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
881 						    u32 type, u32 mask)
882 {
883 	type &= ~CRYPTO_ALG_TYPE_MASK;
884 	type |= CRYPTO_ALG_TYPE_COMPRESS;
885 	mask |= CRYPTO_ALG_TYPE_MASK;
886 
887 	return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
888 }
889 
890 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
891 {
892 	return &tfm->base;
893 }
894 
895 static inline void crypto_free_comp(struct crypto_comp *tfm)
896 {
897 	crypto_free_tfm(crypto_comp_tfm(tfm));
898 }
899 
900 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
901 {
902 	type &= ~CRYPTO_ALG_TYPE_MASK;
903 	type |= CRYPTO_ALG_TYPE_COMPRESS;
904 	mask |= CRYPTO_ALG_TYPE_MASK;
905 
906 	return crypto_has_alg(alg_name, type, mask);
907 }
908 
909 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
910 {
911 	return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
912 }
913 
914 int crypto_comp_compress(struct crypto_comp *tfm,
915 			 const u8 *src, unsigned int slen,
916 			 u8 *dst, unsigned int *dlen);
917 
918 int crypto_comp_decompress(struct crypto_comp *tfm,
919 			   const u8 *src, unsigned int slen,
920 			   u8 *dst, unsigned int *dlen);
921 
922 #endif	/* _LINUX_CRYPTO_H */
923 
924