xref: /linux-6.15/include/linux/crypto.h (revision c75bec79)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Scatterlist Cryptographic API.
4  *
5  * Copyright (c) 2002 James Morris <[email protected]>
6  * Copyright (c) 2002 David S. Miller ([email protected])
7  * Copyright (c) 2005 Herbert Xu <[email protected]>
8  *
9  * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]>
10  * and Nettle, by Niels Möller.
11  */
12 #ifndef _LINUX_CRYPTO_H
13 #define _LINUX_CRYPTO_H
14 
15 #include <linux/atomic.h>
16 #include <linux/kernel.h>
17 #include <linux/list.h>
18 #include <linux/bug.h>
19 #include <linux/slab.h>
20 #include <linux/string.h>
21 #include <linux/uaccess.h>
22 #include <linux/completion.h>
23 
24 /*
25  * Autoloaded crypto modules should only use a prefixed name to avoid allowing
26  * arbitrary modules to be loaded. Loading from userspace may still need the
27  * unprefixed names, so retains those aliases as well.
28  * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
29  * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
30  * expands twice on the same line. Instead, use a separate base name for the
31  * alias.
32  */
33 #define MODULE_ALIAS_CRYPTO(name)	\
34 		__MODULE_INFO(alias, alias_userspace, name);	\
35 		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
36 
37 /*
38  * Algorithm masks and types.
39  */
40 #define CRYPTO_ALG_TYPE_MASK		0x0000000f
41 #define CRYPTO_ALG_TYPE_CIPHER		0x00000001
42 #define CRYPTO_ALG_TYPE_COMPRESS	0x00000002
43 #define CRYPTO_ALG_TYPE_AEAD		0x00000003
44 #define CRYPTO_ALG_TYPE_SKCIPHER	0x00000005
45 #define CRYPTO_ALG_TYPE_KPP		0x00000008
46 #define CRYPTO_ALG_TYPE_ACOMPRESS	0x0000000a
47 #define CRYPTO_ALG_TYPE_SCOMPRESS	0x0000000b
48 #define CRYPTO_ALG_TYPE_RNG		0x0000000c
49 #define CRYPTO_ALG_TYPE_AKCIPHER	0x0000000d
50 #define CRYPTO_ALG_TYPE_HASH		0x0000000e
51 #define CRYPTO_ALG_TYPE_SHASH		0x0000000e
52 #define CRYPTO_ALG_TYPE_AHASH		0x0000000f
53 
54 #define CRYPTO_ALG_TYPE_HASH_MASK	0x0000000e
55 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
56 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK	0x0000000e
57 
58 #define CRYPTO_ALG_LARVAL		0x00000010
59 #define CRYPTO_ALG_DEAD			0x00000020
60 #define CRYPTO_ALG_DYING		0x00000040
61 #define CRYPTO_ALG_ASYNC		0x00000080
62 
63 /*
64  * Set this bit if and only if the algorithm requires another algorithm of
65  * the same type to handle corner cases.
66  */
67 #define CRYPTO_ALG_NEED_FALLBACK	0x00000100
68 
69 /*
70  * Set if the algorithm has passed automated run-time testing.  Note that
71  * if there is no run-time testing for a given algorithm it is considered
72  * to have passed.
73  */
74 
75 #define CRYPTO_ALG_TESTED		0x00000400
76 
77 /*
78  * Set if the algorithm is an instance that is built from templates.
79  */
80 #define CRYPTO_ALG_INSTANCE		0x00000800
81 
82 /* Set this bit if the algorithm provided is hardware accelerated but
83  * not available to userspace via instruction set or so.
84  */
85 #define CRYPTO_ALG_KERN_DRIVER_ONLY	0x00001000
86 
87 /*
88  * Mark a cipher as a service implementation only usable by another
89  * cipher and never by a normal user of the kernel crypto API
90  */
91 #define CRYPTO_ALG_INTERNAL		0x00002000
92 
93 /*
94  * Set if the algorithm has a ->setkey() method but can be used without
95  * calling it first, i.e. there is a default key.
96  */
97 #define CRYPTO_ALG_OPTIONAL_KEY		0x00004000
98 
99 /*
100  * Don't trigger module loading
101  */
102 #define CRYPTO_NOLOAD			0x00008000
103 
104 /*
105  * Transform masks and values (for crt_flags).
106  */
107 #define CRYPTO_TFM_NEED_KEY		0x00000001
108 
109 #define CRYPTO_TFM_REQ_MASK		0x000fff00
110 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS	0x00000100
111 #define CRYPTO_TFM_REQ_MAY_SLEEP	0x00000200
112 #define CRYPTO_TFM_REQ_MAY_BACKLOG	0x00000400
113 
114 /*
115  * Miscellaneous stuff.
116  */
117 #define CRYPTO_MAX_ALG_NAME		128
118 
119 /*
120  * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
121  * declaration) is used to ensure that the crypto_tfm context structure is
122  * aligned correctly for the given architecture so that there are no alignment
123  * faults for C data types.  In particular, this is required on platforms such
124  * as arm where pointers are 32-bit aligned but there are data types such as
125  * u64 which require 64-bit alignment.
126  */
127 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
128 
129 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
130 
131 struct scatterlist;
132 struct crypto_async_request;
133 struct crypto_tfm;
134 struct crypto_type;
135 
136 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
137 
138 /**
139  * DOC: Block Cipher Context Data Structures
140  *
141  * These data structures define the operating context for each block cipher
142  * type.
143  */
144 
145 struct crypto_async_request {
146 	struct list_head list;
147 	crypto_completion_t complete;
148 	void *data;
149 	struct crypto_tfm *tfm;
150 
151 	u32 flags;
152 };
153 
154 /**
155  * DOC: Block Cipher Algorithm Definitions
156  *
157  * These data structures define modular crypto algorithm implementations,
158  * managed via crypto_register_alg() and crypto_unregister_alg().
159  */
160 
161 /**
162  * struct cipher_alg - single-block symmetric ciphers definition
163  * @cia_min_keysize: Minimum key size supported by the transformation. This is
164  *		     the smallest key length supported by this transformation
165  *		     algorithm. This must be set to one of the pre-defined
166  *		     values as this is not hardware specific. Possible values
167  *		     for this field can be found via git grep "_MIN_KEY_SIZE"
168  *		     include/crypto/
169  * @cia_max_keysize: Maximum key size supported by the transformation. This is
170  *		    the largest key length supported by this transformation
171  *		    algorithm. This must be set to one of the pre-defined values
172  *		    as this is not hardware specific. Possible values for this
173  *		    field can be found via git grep "_MAX_KEY_SIZE"
174  *		    include/crypto/
175  * @cia_setkey: Set key for the transformation. This function is used to either
176  *	        program a supplied key into the hardware or store the key in the
177  *	        transformation context for programming it later. Note that this
178  *	        function does modify the transformation context. This function
179  *	        can be called multiple times during the existence of the
180  *	        transformation object, so one must make sure the key is properly
181  *	        reprogrammed into the hardware. This function is also
182  *	        responsible for checking the key length for validity.
183  * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
184  *		 single block of data, which must be @cra_blocksize big. This
185  *		 always operates on a full @cra_blocksize and it is not possible
186  *		 to encrypt a block of smaller size. The supplied buffers must
187  *		 therefore also be at least of @cra_blocksize size. Both the
188  *		 input and output buffers are always aligned to @cra_alignmask.
189  *		 In case either of the input or output buffer supplied by user
190  *		 of the crypto API is not aligned to @cra_alignmask, the crypto
191  *		 API will re-align the buffers. The re-alignment means that a
192  *		 new buffer will be allocated, the data will be copied into the
193  *		 new buffer, then the processing will happen on the new buffer,
194  *		 then the data will be copied back into the original buffer and
195  *		 finally the new buffer will be freed. In case a software
196  *		 fallback was put in place in the @cra_init call, this function
197  *		 might need to use the fallback if the algorithm doesn't support
198  *		 all of the key sizes. In case the key was stored in
199  *		 transformation context, the key might need to be re-programmed
200  *		 into the hardware in this function. This function shall not
201  *		 modify the transformation context, as this function may be
202  *		 called in parallel with the same transformation object.
203  * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
204  *		 @cia_encrypt, and the conditions are exactly the same.
205  *
206  * All fields are mandatory and must be filled.
207  */
208 struct cipher_alg {
209 	unsigned int cia_min_keysize;
210 	unsigned int cia_max_keysize;
211 	int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
212 	                  unsigned int keylen);
213 	void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
214 	void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
215 };
216 
217 /**
218  * struct compress_alg - compression/decompression algorithm
219  * @coa_compress: Compress a buffer of specified length, storing the resulting
220  *		  data in the specified buffer. Return the length of the
221  *		  compressed data in dlen.
222  * @coa_decompress: Decompress the source buffer, storing the uncompressed
223  *		    data in the specified buffer. The length of the data is
224  *		    returned in dlen.
225  *
226  * All fields are mandatory.
227  */
228 struct compress_alg {
229 	int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
230 			    unsigned int slen, u8 *dst, unsigned int *dlen);
231 	int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
232 			      unsigned int slen, u8 *dst, unsigned int *dlen);
233 };
234 
235 #ifdef CONFIG_CRYPTO_STATS
236 /*
237  * struct crypto_istat_aead - statistics for AEAD algorithm
238  * @encrypt_cnt:	number of encrypt requests
239  * @encrypt_tlen:	total data size handled by encrypt requests
240  * @decrypt_cnt:	number of decrypt requests
241  * @decrypt_tlen:	total data size handled by decrypt requests
242  * @err_cnt:		number of error for AEAD requests
243  */
244 struct crypto_istat_aead {
245 	atomic64_t encrypt_cnt;
246 	atomic64_t encrypt_tlen;
247 	atomic64_t decrypt_cnt;
248 	atomic64_t decrypt_tlen;
249 	atomic64_t err_cnt;
250 };
251 
252 /*
253  * struct crypto_istat_akcipher - statistics for akcipher algorithm
254  * @encrypt_cnt:	number of encrypt requests
255  * @encrypt_tlen:	total data size handled by encrypt requests
256  * @decrypt_cnt:	number of decrypt requests
257  * @decrypt_tlen:	total data size handled by decrypt requests
258  * @verify_cnt:		number of verify operation
259  * @sign_cnt:		number of sign requests
260  * @err_cnt:		number of error for akcipher requests
261  */
262 struct crypto_istat_akcipher {
263 	atomic64_t encrypt_cnt;
264 	atomic64_t encrypt_tlen;
265 	atomic64_t decrypt_cnt;
266 	atomic64_t decrypt_tlen;
267 	atomic64_t verify_cnt;
268 	atomic64_t sign_cnt;
269 	atomic64_t err_cnt;
270 };
271 
272 /*
273  * struct crypto_istat_cipher - statistics for cipher algorithm
274  * @encrypt_cnt:	number of encrypt requests
275  * @encrypt_tlen:	total data size handled by encrypt requests
276  * @decrypt_cnt:	number of decrypt requests
277  * @decrypt_tlen:	total data size handled by decrypt requests
278  * @err_cnt:		number of error for cipher requests
279  */
280 struct crypto_istat_cipher {
281 	atomic64_t encrypt_cnt;
282 	atomic64_t encrypt_tlen;
283 	atomic64_t decrypt_cnt;
284 	atomic64_t decrypt_tlen;
285 	atomic64_t err_cnt;
286 };
287 
288 /*
289  * struct crypto_istat_compress - statistics for compress algorithm
290  * @compress_cnt:	number of compress requests
291  * @compress_tlen:	total data size handled by compress requests
292  * @decompress_cnt:	number of decompress requests
293  * @decompress_tlen:	total data size handled by decompress requests
294  * @err_cnt:		number of error for compress requests
295  */
296 struct crypto_istat_compress {
297 	atomic64_t compress_cnt;
298 	atomic64_t compress_tlen;
299 	atomic64_t decompress_cnt;
300 	atomic64_t decompress_tlen;
301 	atomic64_t err_cnt;
302 };
303 
304 /*
305  * struct crypto_istat_hash - statistics for has algorithm
306  * @hash_cnt:		number of hash requests
307  * @hash_tlen:		total data size hashed
308  * @err_cnt:		number of error for hash requests
309  */
310 struct crypto_istat_hash {
311 	atomic64_t hash_cnt;
312 	atomic64_t hash_tlen;
313 	atomic64_t err_cnt;
314 };
315 
316 /*
317  * struct crypto_istat_kpp - statistics for KPP algorithm
318  * @setsecret_cnt:		number of setsecrey operation
319  * @generate_public_key_cnt:	number of generate_public_key operation
320  * @compute_shared_secret_cnt:	number of compute_shared_secret operation
321  * @err_cnt:			number of error for KPP requests
322  */
323 struct crypto_istat_kpp {
324 	atomic64_t setsecret_cnt;
325 	atomic64_t generate_public_key_cnt;
326 	atomic64_t compute_shared_secret_cnt;
327 	atomic64_t err_cnt;
328 };
329 
330 /*
331  * struct crypto_istat_rng: statistics for RNG algorithm
332  * @generate_cnt:	number of RNG generate requests
333  * @generate_tlen:	total data size of generated data by the RNG
334  * @seed_cnt:		number of times the RNG was seeded
335  * @err_cnt:		number of error for RNG requests
336  */
337 struct crypto_istat_rng {
338 	atomic64_t generate_cnt;
339 	atomic64_t generate_tlen;
340 	atomic64_t seed_cnt;
341 	atomic64_t err_cnt;
342 };
343 #endif /* CONFIG_CRYPTO_STATS */
344 
345 #define cra_cipher	cra_u.cipher
346 #define cra_compress	cra_u.compress
347 
348 /**
349  * struct crypto_alg - definition of a cryptograpic cipher algorithm
350  * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
351  *	       CRYPTO_ALG_* flags for the flags which go in here. Those are
352  *	       used for fine-tuning the description of the transformation
353  *	       algorithm.
354  * @cra_blocksize: Minimum block size of this transformation. The size in bytes
355  *		   of the smallest possible unit which can be transformed with
356  *		   this algorithm. The users must respect this value.
357  *		   In case of HASH transformation, it is possible for a smaller
358  *		   block than @cra_blocksize to be passed to the crypto API for
359  *		   transformation, in case of any other transformation type, an
360  * 		   error will be returned upon any attempt to transform smaller
361  *		   than @cra_blocksize chunks.
362  * @cra_ctxsize: Size of the operational context of the transformation. This
363  *		 value informs the kernel crypto API about the memory size
364  *		 needed to be allocated for the transformation context.
365  * @cra_alignmask: Alignment mask for the input and output data buffer. The data
366  *		   buffer containing the input data for the algorithm must be
367  *		   aligned to this alignment mask. The data buffer for the
368  *		   output data must be aligned to this alignment mask. Note that
369  *		   the Crypto API will do the re-alignment in software, but
370  *		   only under special conditions and there is a performance hit.
371  *		   The re-alignment happens at these occasions for different
372  *		   @cra_u types: cipher -- For both input data and output data
373  *		   buffer; ahash -- For output hash destination buf; shash --
374  *		   For output hash destination buf.
375  *		   This is needed on hardware which is flawed by design and
376  *		   cannot pick data from arbitrary addresses.
377  * @cra_priority: Priority of this transformation implementation. In case
378  *		  multiple transformations with same @cra_name are available to
379  *		  the Crypto API, the kernel will use the one with highest
380  *		  @cra_priority.
381  * @cra_name: Generic name (usable by multiple implementations) of the
382  *	      transformation algorithm. This is the name of the transformation
383  *	      itself. This field is used by the kernel when looking up the
384  *	      providers of particular transformation.
385  * @cra_driver_name: Unique name of the transformation provider. This is the
386  *		     name of the provider of the transformation. This can be any
387  *		     arbitrary value, but in the usual case, this contains the
388  *		     name of the chip or provider and the name of the
389  *		     transformation algorithm.
390  * @cra_type: Type of the cryptographic transformation. This is a pointer to
391  *	      struct crypto_type, which implements callbacks common for all
392  *	      transformation types. There are multiple options, such as
393  *	      &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type.
394  *	      This field might be empty. In that case, there are no common
395  *	      callbacks. This is the case for: cipher, compress, shash.
396  * @cra_u: Callbacks implementing the transformation. This is a union of
397  *	   multiple structures. Depending on the type of transformation selected
398  *	   by @cra_type and @cra_flags above, the associated structure must be
399  *	   filled with callbacks. This field might be empty. This is the case
400  *	   for ahash, shash.
401  * @cra_init: Initialize the cryptographic transformation object. This function
402  *	      is used to initialize the cryptographic transformation object.
403  *	      This function is called only once at the instantiation time, right
404  *	      after the transformation context was allocated. In case the
405  *	      cryptographic hardware has some special requirements which need to
406  *	      be handled by software, this function shall check for the precise
407  *	      requirement of the transformation and put any software fallbacks
408  *	      in place.
409  * @cra_exit: Deinitialize the cryptographic transformation object. This is a
410  *	      counterpart to @cra_init, used to remove various changes set in
411  *	      @cra_init.
412  * @cra_u.cipher: Union member which contains a single-block symmetric cipher
413  *		  definition. See @struct @cipher_alg.
414  * @cra_u.compress: Union member which contains a (de)compression algorithm.
415  *		    See @struct @compress_alg.
416  * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
417  * @cra_list: internally used
418  * @cra_users: internally used
419  * @cra_refcnt: internally used
420  * @cra_destroy: internally used
421  *
422  * @stats: union of all possible crypto_istat_xxx structures
423  * @stats.aead:		statistics for AEAD algorithm
424  * @stats.akcipher:	statistics for akcipher algorithm
425  * @stats.cipher:	statistics for cipher algorithm
426  * @stats.compress:	statistics for compress algorithm
427  * @stats.hash:		statistics for hash algorithm
428  * @stats.rng:		statistics for rng algorithm
429  * @stats.kpp:		statistics for KPP algorithm
430  *
431  * The struct crypto_alg describes a generic Crypto API algorithm and is common
432  * for all of the transformations. Any variable not documented here shall not
433  * be used by a cipher implementation as it is internal to the Crypto API.
434  */
435 struct crypto_alg {
436 	struct list_head cra_list;
437 	struct list_head cra_users;
438 
439 	u32 cra_flags;
440 	unsigned int cra_blocksize;
441 	unsigned int cra_ctxsize;
442 	unsigned int cra_alignmask;
443 
444 	int cra_priority;
445 	refcount_t cra_refcnt;
446 
447 	char cra_name[CRYPTO_MAX_ALG_NAME];
448 	char cra_driver_name[CRYPTO_MAX_ALG_NAME];
449 
450 	const struct crypto_type *cra_type;
451 
452 	union {
453 		struct cipher_alg cipher;
454 		struct compress_alg compress;
455 	} cra_u;
456 
457 	int (*cra_init)(struct crypto_tfm *tfm);
458 	void (*cra_exit)(struct crypto_tfm *tfm);
459 	void (*cra_destroy)(struct crypto_alg *alg);
460 
461 	struct module *cra_module;
462 
463 #ifdef CONFIG_CRYPTO_STATS
464 	union {
465 		struct crypto_istat_aead aead;
466 		struct crypto_istat_akcipher akcipher;
467 		struct crypto_istat_cipher cipher;
468 		struct crypto_istat_compress compress;
469 		struct crypto_istat_hash hash;
470 		struct crypto_istat_rng rng;
471 		struct crypto_istat_kpp kpp;
472 	} stats;
473 #endif /* CONFIG_CRYPTO_STATS */
474 
475 } CRYPTO_MINALIGN_ATTR;
476 
477 #ifdef CONFIG_CRYPTO_STATS
478 void crypto_stats_init(struct crypto_alg *alg);
479 void crypto_stats_get(struct crypto_alg *alg);
480 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
481 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
482 void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg);
483 void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg);
484 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
485 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
486 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg);
487 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg);
488 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg);
489 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg);
490 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret);
491 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret);
492 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret);
493 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
494 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
495 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
496 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
497 #else
498 static inline void crypto_stats_init(struct crypto_alg *alg)
499 {}
500 static inline void crypto_stats_get(struct crypto_alg *alg)
501 {}
502 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
503 {}
504 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
505 {}
506 static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)
507 {}
508 static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)
509 {}
510 static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
511 {}
512 static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
513 {}
514 static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
515 {}
516 static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
517 {}
518 static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
519 {}
520 static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
521 {}
522 static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
523 {}
524 static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
525 {}
526 static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
527 {}
528 static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
529 {}
530 static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
531 {}
532 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
533 {}
534 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
535 {}
536 #endif
537 /*
538  * A helper struct for waiting for completion of async crypto ops
539  */
540 struct crypto_wait {
541 	struct completion completion;
542 	int err;
543 };
544 
545 /*
546  * Macro for declaring a crypto op async wait object on stack
547  */
548 #define DECLARE_CRYPTO_WAIT(_wait) \
549 	struct crypto_wait _wait = { \
550 		COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
551 
552 /*
553  * Async ops completion helper functioons
554  */
555 void crypto_req_done(struct crypto_async_request *req, int err);
556 
557 static inline int crypto_wait_req(int err, struct crypto_wait *wait)
558 {
559 	switch (err) {
560 	case -EINPROGRESS:
561 	case -EBUSY:
562 		wait_for_completion(&wait->completion);
563 		reinit_completion(&wait->completion);
564 		err = wait->err;
565 		break;
566 	}
567 
568 	return err;
569 }
570 
571 static inline void crypto_init_wait(struct crypto_wait *wait)
572 {
573 	init_completion(&wait->completion);
574 }
575 
576 /*
577  * Algorithm registration interface.
578  */
579 int crypto_register_alg(struct crypto_alg *alg);
580 void crypto_unregister_alg(struct crypto_alg *alg);
581 int crypto_register_algs(struct crypto_alg *algs, int count);
582 void crypto_unregister_algs(struct crypto_alg *algs, int count);
583 
584 /*
585  * Algorithm query interface.
586  */
587 int crypto_has_alg(const char *name, u32 type, u32 mask);
588 
589 /*
590  * Transforms: user-instantiated objects which encapsulate algorithms
591  * and core processing logic.  Managed via crypto_alloc_*() and
592  * crypto_free_*(), as well as the various helpers below.
593  */
594 
595 struct crypto_tfm {
596 
597 	u32 crt_flags;
598 
599 	void (*exit)(struct crypto_tfm *tfm);
600 
601 	struct crypto_alg *__crt_alg;
602 
603 	void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
604 };
605 
606 struct crypto_cipher {
607 	struct crypto_tfm base;
608 };
609 
610 struct crypto_comp {
611 	struct crypto_tfm base;
612 };
613 
614 enum {
615 	CRYPTOA_UNSPEC,
616 	CRYPTOA_ALG,
617 	CRYPTOA_TYPE,
618 	CRYPTOA_U32,
619 	__CRYPTOA_MAX,
620 };
621 
622 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
623 
624 /* Maximum number of (rtattr) parameters for each template. */
625 #define CRYPTO_MAX_ATTRS 32
626 
627 struct crypto_attr_alg {
628 	char name[CRYPTO_MAX_ALG_NAME];
629 };
630 
631 struct crypto_attr_type {
632 	u32 type;
633 	u32 mask;
634 };
635 
636 struct crypto_attr_u32 {
637 	u32 num;
638 };
639 
640 /*
641  * Transform user interface.
642  */
643 
644 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
645 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
646 
647 static inline void crypto_free_tfm(struct crypto_tfm *tfm)
648 {
649 	return crypto_destroy_tfm(tfm, tfm);
650 }
651 
652 int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
653 
654 /*
655  * Transform helpers which query the underlying algorithm.
656  */
657 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
658 {
659 	return tfm->__crt_alg->cra_name;
660 }
661 
662 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
663 {
664 	return tfm->__crt_alg->cra_driver_name;
665 }
666 
667 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
668 {
669 	return tfm->__crt_alg->cra_priority;
670 }
671 
672 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
673 {
674 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
675 }
676 
677 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
678 {
679 	return tfm->__crt_alg->cra_blocksize;
680 }
681 
682 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
683 {
684 	return tfm->__crt_alg->cra_alignmask;
685 }
686 
687 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
688 {
689 	return tfm->crt_flags;
690 }
691 
692 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
693 {
694 	tfm->crt_flags |= flags;
695 }
696 
697 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
698 {
699 	tfm->crt_flags &= ~flags;
700 }
701 
702 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
703 {
704 	return tfm->__crt_ctx;
705 }
706 
707 static inline unsigned int crypto_tfm_ctx_alignment(void)
708 {
709 	struct crypto_tfm *tfm;
710 	return __alignof__(tfm->__crt_ctx);
711 }
712 
713 /**
714  * DOC: Single Block Cipher API
715  *
716  * The single block cipher API is used with the ciphers of type
717  * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto).
718  *
719  * Using the single block cipher API calls, operations with the basic cipher
720  * primitive can be implemented. These cipher primitives exclude any block
721  * chaining operations including IV handling.
722  *
723  * The purpose of this single block cipher API is to support the implementation
724  * of templates or other concepts that only need to perform the cipher operation
725  * on one block at a time. Templates invoke the underlying cipher primitive
726  * block-wise and process either the input or the output data of these cipher
727  * operations.
728  */
729 
730 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
731 {
732 	return (struct crypto_cipher *)tfm;
733 }
734 
735 /**
736  * crypto_alloc_cipher() - allocate single block cipher handle
737  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
738  *	     single block cipher
739  * @type: specifies the type of the cipher
740  * @mask: specifies the mask for the cipher
741  *
742  * Allocate a cipher handle for a single block cipher. The returned struct
743  * crypto_cipher is the cipher handle that is required for any subsequent API
744  * invocation for that single block cipher.
745  *
746  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
747  *	   of an error, PTR_ERR() returns the error code.
748  */
749 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
750 							u32 type, u32 mask)
751 {
752 	type &= ~CRYPTO_ALG_TYPE_MASK;
753 	type |= CRYPTO_ALG_TYPE_CIPHER;
754 	mask |= CRYPTO_ALG_TYPE_MASK;
755 
756 	return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
757 }
758 
759 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
760 {
761 	return &tfm->base;
762 }
763 
764 /**
765  * crypto_free_cipher() - zeroize and free the single block cipher handle
766  * @tfm: cipher handle to be freed
767  */
768 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
769 {
770 	crypto_free_tfm(crypto_cipher_tfm(tfm));
771 }
772 
773 /**
774  * crypto_has_cipher() - Search for the availability of a single block cipher
775  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
776  *	     single block cipher
777  * @type: specifies the type of the cipher
778  * @mask: specifies the mask for the cipher
779  *
780  * Return: true when the single block cipher is known to the kernel crypto API;
781  *	   false otherwise
782  */
783 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
784 {
785 	type &= ~CRYPTO_ALG_TYPE_MASK;
786 	type |= CRYPTO_ALG_TYPE_CIPHER;
787 	mask |= CRYPTO_ALG_TYPE_MASK;
788 
789 	return crypto_has_alg(alg_name, type, mask);
790 }
791 
792 /**
793  * crypto_cipher_blocksize() - obtain block size for cipher
794  * @tfm: cipher handle
795  *
796  * The block size for the single block cipher referenced with the cipher handle
797  * tfm is returned. The caller may use that information to allocate appropriate
798  * memory for the data returned by the encryption or decryption operation
799  *
800  * Return: block size of cipher
801  */
802 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
803 {
804 	return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
805 }
806 
807 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
808 {
809 	return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
810 }
811 
812 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
813 {
814 	return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
815 }
816 
817 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
818 					   u32 flags)
819 {
820 	crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
821 }
822 
823 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
824 					     u32 flags)
825 {
826 	crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
827 }
828 
829 /**
830  * crypto_cipher_setkey() - set key for cipher
831  * @tfm: cipher handle
832  * @key: buffer holding the key
833  * @keylen: length of the key in bytes
834  *
835  * The caller provided key is set for the single block cipher referenced by the
836  * cipher handle.
837  *
838  * Note, the key length determines the cipher type. Many block ciphers implement
839  * different cipher modes depending on the key size, such as AES-128 vs AES-192
840  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
841  * is performed.
842  *
843  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
844  */
845 int crypto_cipher_setkey(struct crypto_cipher *tfm,
846 			 const u8 *key, unsigned int keylen);
847 
848 /**
849  * crypto_cipher_encrypt_one() - encrypt one block of plaintext
850  * @tfm: cipher handle
851  * @dst: points to the buffer that will be filled with the ciphertext
852  * @src: buffer holding the plaintext to be encrypted
853  *
854  * Invoke the encryption operation of one block. The caller must ensure that
855  * the plaintext and ciphertext buffers are at least one block in size.
856  */
857 void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
858 			       u8 *dst, const u8 *src);
859 
860 /**
861  * crypto_cipher_decrypt_one() - decrypt one block of ciphertext
862  * @tfm: cipher handle
863  * @dst: points to the buffer that will be filled with the plaintext
864  * @src: buffer holding the ciphertext to be decrypted
865  *
866  * Invoke the decryption operation of one block. The caller must ensure that
867  * the plaintext and ciphertext buffers are at least one block in size.
868  */
869 void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
870 			       u8 *dst, const u8 *src);
871 
872 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
873 {
874 	return (struct crypto_comp *)tfm;
875 }
876 
877 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
878 						    u32 type, u32 mask)
879 {
880 	type &= ~CRYPTO_ALG_TYPE_MASK;
881 	type |= CRYPTO_ALG_TYPE_COMPRESS;
882 	mask |= CRYPTO_ALG_TYPE_MASK;
883 
884 	return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
885 }
886 
887 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
888 {
889 	return &tfm->base;
890 }
891 
892 static inline void crypto_free_comp(struct crypto_comp *tfm)
893 {
894 	crypto_free_tfm(crypto_comp_tfm(tfm));
895 }
896 
897 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
898 {
899 	type &= ~CRYPTO_ALG_TYPE_MASK;
900 	type |= CRYPTO_ALG_TYPE_COMPRESS;
901 	mask |= CRYPTO_ALG_TYPE_MASK;
902 
903 	return crypto_has_alg(alg_name, type, mask);
904 }
905 
906 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
907 {
908 	return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
909 }
910 
911 int crypto_comp_compress(struct crypto_comp *tfm,
912 			 const u8 *src, unsigned int slen,
913 			 u8 *dst, unsigned int *dlen);
914 
915 int crypto_comp_decompress(struct crypto_comp *tfm,
916 			   const u8 *src, unsigned int slen,
917 			   u8 *dst, unsigned int *dlen);
918 
919 #endif	/* _LINUX_CRYPTO_H */
920 
921