xref: /linux-6.15/include/linux/crypto.h (revision 2e4c77be)
1 /*
2  * Scatterlist Cryptographic API.
3  *
4  * Copyright (c) 2002 James Morris <[email protected]>
5  * Copyright (c) 2002 David S. Miller ([email protected])
6  * Copyright (c) 2005 Herbert Xu <[email protected]>
7  *
8  * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]>
9  * and Nettle, by Niels Möller.
10  *
11  * This program is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License as published by the Free
13  * Software Foundation; either version 2 of the License, or (at your option)
14  * any later version.
15  *
16  */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19 
20 #include <asm/atomic.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
26 #include <linux/uaccess.h>
27 
28 /*
29  * Algorithm masks and types.
30  */
31 #define CRYPTO_ALG_TYPE_MASK		0x0000000f
32 #define CRYPTO_ALG_TYPE_CIPHER		0x00000001
33 #define CRYPTO_ALG_TYPE_COMPRESS	0x00000002
34 #define CRYPTO_ALG_TYPE_AEAD		0x00000003
35 #define CRYPTO_ALG_TYPE_BLKCIPHER	0x00000004
36 #define CRYPTO_ALG_TYPE_ABLKCIPHER	0x00000005
37 #define CRYPTO_ALG_TYPE_GIVCIPHER	0x00000006
38 #define CRYPTO_ALG_TYPE_DIGEST		0x00000008
39 #define CRYPTO_ALG_TYPE_HASH		0x00000008
40 #define CRYPTO_ALG_TYPE_SHASH		0x00000009
41 #define CRYPTO_ALG_TYPE_AHASH		0x0000000a
42 #define CRYPTO_ALG_TYPE_RNG		0x0000000c
43 
44 #define CRYPTO_ALG_TYPE_HASH_MASK	0x0000000e
45 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000c
46 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK	0x0000000c
47 
48 #define CRYPTO_ALG_LARVAL		0x00000010
49 #define CRYPTO_ALG_DEAD			0x00000020
50 #define CRYPTO_ALG_DYING		0x00000040
51 #define CRYPTO_ALG_ASYNC		0x00000080
52 
53 /*
54  * Set this bit if and only if the algorithm requires another algorithm of
55  * the same type to handle corner cases.
56  */
57 #define CRYPTO_ALG_NEED_FALLBACK	0x00000100
58 
59 /*
60  * This bit is set for symmetric key ciphers that have already been wrapped
61  * with a generic IV generator to prevent them from being wrapped again.
62  */
63 #define CRYPTO_ALG_GENIV		0x00000200
64 
65 /*
66  * Set if the algorithm has passed automated run-time testing.  Note that
67  * if there is no run-time testing for a given algorithm it is considered
68  * to have passed.
69  */
70 
71 #define CRYPTO_ALG_TESTED		0x00000400
72 
73 /*
74  * Transform masks and values (for crt_flags).
75  */
76 #define CRYPTO_TFM_REQ_MASK		0x000fff00
77 #define CRYPTO_TFM_RES_MASK		0xfff00000
78 
79 #define CRYPTO_TFM_REQ_WEAK_KEY		0x00000100
80 #define CRYPTO_TFM_REQ_MAY_SLEEP	0x00000200
81 #define CRYPTO_TFM_REQ_MAY_BACKLOG	0x00000400
82 #define CRYPTO_TFM_RES_WEAK_KEY		0x00100000
83 #define CRYPTO_TFM_RES_BAD_KEY_LEN   	0x00200000
84 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 	0x00400000
85 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 	0x00800000
86 #define CRYPTO_TFM_RES_BAD_FLAGS 	0x01000000
87 
88 /*
89  * Miscellaneous stuff.
90  */
91 #define CRYPTO_MAX_ALG_NAME		64
92 
93 /*
94  * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
95  * declaration) is used to ensure that the crypto_tfm context structure is
96  * aligned correctly for the given architecture so that there are no alignment
97  * faults for C data types.  In particular, this is required on platforms such
98  * as arm where pointers are 32-bit aligned but there are data types such as
99  * u64 which require 64-bit alignment.
100  */
101 #if defined(ARCH_KMALLOC_MINALIGN)
102 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
103 #elif defined(ARCH_SLAB_MINALIGN)
104 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
105 #else
106 #define CRYPTO_MINALIGN __alignof__(unsigned long long)
107 #endif
108 
109 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
110 
111 struct scatterlist;
112 struct crypto_ablkcipher;
113 struct crypto_async_request;
114 struct crypto_aead;
115 struct crypto_blkcipher;
116 struct crypto_hash;
117 struct crypto_ahash;
118 struct crypto_rng;
119 struct crypto_tfm;
120 struct crypto_type;
121 struct aead_givcrypt_request;
122 struct skcipher_givcrypt_request;
123 
124 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
125 
126 struct crypto_async_request {
127 	struct list_head list;
128 	crypto_completion_t complete;
129 	void *data;
130 	struct crypto_tfm *tfm;
131 
132 	u32 flags;
133 };
134 
135 struct ablkcipher_request {
136 	struct crypto_async_request base;
137 
138 	unsigned int nbytes;
139 
140 	void *info;
141 
142 	struct scatterlist *src;
143 	struct scatterlist *dst;
144 
145 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
146 };
147 
148 struct ahash_request {
149 	struct crypto_async_request base;
150 
151 	unsigned int nbytes;
152 	struct scatterlist *src;
153 	u8		   *result;
154 
155 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
156 };
157 
158 /**
159  *	struct aead_request - AEAD request
160  *	@base: Common attributes for async crypto requests
161  *	@assoclen: Length in bytes of associated data for authentication
162  *	@cryptlen: Length of data to be encrypted or decrypted
163  *	@iv: Initialisation vector
164  *	@assoc: Associated data
165  *	@src: Source data
166  *	@dst: Destination data
167  *	@__ctx: Start of private context data
168  */
169 struct aead_request {
170 	struct crypto_async_request base;
171 
172 	unsigned int assoclen;
173 	unsigned int cryptlen;
174 
175 	u8 *iv;
176 
177 	struct scatterlist *assoc;
178 	struct scatterlist *src;
179 	struct scatterlist *dst;
180 
181 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
182 };
183 
184 struct blkcipher_desc {
185 	struct crypto_blkcipher *tfm;
186 	void *info;
187 	u32 flags;
188 };
189 
190 struct cipher_desc {
191 	struct crypto_tfm *tfm;
192 	void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
193 	unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
194 			     const u8 *src, unsigned int nbytes);
195 	void *info;
196 };
197 
198 struct hash_desc {
199 	struct crypto_hash *tfm;
200 	u32 flags;
201 };
202 
203 /*
204  * Algorithms: modular crypto algorithm implementations, managed
205  * via crypto_register_alg() and crypto_unregister_alg().
206  */
207 struct ablkcipher_alg {
208 	int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
209 	              unsigned int keylen);
210 	int (*encrypt)(struct ablkcipher_request *req);
211 	int (*decrypt)(struct ablkcipher_request *req);
212 	int (*givencrypt)(struct skcipher_givcrypt_request *req);
213 	int (*givdecrypt)(struct skcipher_givcrypt_request *req);
214 
215 	const char *geniv;
216 
217 	unsigned int min_keysize;
218 	unsigned int max_keysize;
219 	unsigned int ivsize;
220 };
221 
222 struct ahash_alg {
223 	int (*init)(struct ahash_request *req);
224 	int (*reinit)(struct ahash_request *req);
225 	int (*update)(struct ahash_request *req);
226 	int (*final)(struct ahash_request *req);
227 	int (*digest)(struct ahash_request *req);
228 	int (*setkey)(struct crypto_ahash *tfm, const u8 *key,
229 			unsigned int keylen);
230 
231 	unsigned int digestsize;
232 };
233 
234 struct aead_alg {
235 	int (*setkey)(struct crypto_aead *tfm, const u8 *key,
236 	              unsigned int keylen);
237 	int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize);
238 	int (*encrypt)(struct aead_request *req);
239 	int (*decrypt)(struct aead_request *req);
240 	int (*givencrypt)(struct aead_givcrypt_request *req);
241 	int (*givdecrypt)(struct aead_givcrypt_request *req);
242 
243 	const char *geniv;
244 
245 	unsigned int ivsize;
246 	unsigned int maxauthsize;
247 };
248 
249 struct blkcipher_alg {
250 	int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
251 	              unsigned int keylen);
252 	int (*encrypt)(struct blkcipher_desc *desc,
253 		       struct scatterlist *dst, struct scatterlist *src,
254 		       unsigned int nbytes);
255 	int (*decrypt)(struct blkcipher_desc *desc,
256 		       struct scatterlist *dst, struct scatterlist *src,
257 		       unsigned int nbytes);
258 
259 	const char *geniv;
260 
261 	unsigned int min_keysize;
262 	unsigned int max_keysize;
263 	unsigned int ivsize;
264 };
265 
266 struct cipher_alg {
267 	unsigned int cia_min_keysize;
268 	unsigned int cia_max_keysize;
269 	int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
270 	                  unsigned int keylen);
271 	void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
272 	void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
273 };
274 
275 struct digest_alg {
276 	unsigned int dia_digestsize;
277 	void (*dia_init)(struct crypto_tfm *tfm);
278 	void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
279 			   unsigned int len);
280 	void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
281 	int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
282 	                  unsigned int keylen);
283 };
284 
285 struct hash_alg {
286 	int (*init)(struct hash_desc *desc);
287 	int (*update)(struct hash_desc *desc, struct scatterlist *sg,
288 		      unsigned int nbytes);
289 	int (*final)(struct hash_desc *desc, u8 *out);
290 	int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
291 		      unsigned int nbytes, u8 *out);
292 	int (*setkey)(struct crypto_hash *tfm, const u8 *key,
293 		      unsigned int keylen);
294 
295 	unsigned int digestsize;
296 };
297 
298 struct compress_alg {
299 	int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
300 			    unsigned int slen, u8 *dst, unsigned int *dlen);
301 	int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
302 			      unsigned int slen, u8 *dst, unsigned int *dlen);
303 };
304 
305 struct rng_alg {
306 	int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata,
307 			       unsigned int dlen);
308 	int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
309 
310 	unsigned int seedsize;
311 };
312 
313 
314 #define cra_ablkcipher	cra_u.ablkcipher
315 #define cra_aead	cra_u.aead
316 #define cra_blkcipher	cra_u.blkcipher
317 #define cra_cipher	cra_u.cipher
318 #define cra_digest	cra_u.digest
319 #define cra_hash	cra_u.hash
320 #define cra_ahash	cra_u.ahash
321 #define cra_compress	cra_u.compress
322 #define cra_rng		cra_u.rng
323 
324 struct crypto_alg {
325 	struct list_head cra_list;
326 	struct list_head cra_users;
327 
328 	u32 cra_flags;
329 	unsigned int cra_blocksize;
330 	unsigned int cra_ctxsize;
331 	unsigned int cra_alignmask;
332 
333 	int cra_priority;
334 	atomic_t cra_refcnt;
335 
336 	char cra_name[CRYPTO_MAX_ALG_NAME];
337 	char cra_driver_name[CRYPTO_MAX_ALG_NAME];
338 
339 	const struct crypto_type *cra_type;
340 
341 	union {
342 		struct ablkcipher_alg ablkcipher;
343 		struct aead_alg aead;
344 		struct blkcipher_alg blkcipher;
345 		struct cipher_alg cipher;
346 		struct digest_alg digest;
347 		struct hash_alg hash;
348 		struct ahash_alg ahash;
349 		struct compress_alg compress;
350 		struct rng_alg rng;
351 	} cra_u;
352 
353 	int (*cra_init)(struct crypto_tfm *tfm);
354 	void (*cra_exit)(struct crypto_tfm *tfm);
355 	void (*cra_destroy)(struct crypto_alg *alg);
356 
357 	struct module *cra_module;
358 };
359 
360 /*
361  * Algorithm registration interface.
362  */
363 int crypto_register_alg(struct crypto_alg *alg);
364 int crypto_unregister_alg(struct crypto_alg *alg);
365 
366 /*
367  * Algorithm query interface.
368  */
369 int crypto_has_alg(const char *name, u32 type, u32 mask);
370 
371 /*
372  * Transforms: user-instantiated objects which encapsulate algorithms
373  * and core processing logic.  Managed via crypto_alloc_*() and
374  * crypto_free_*(), as well as the various helpers below.
375  */
376 
377 struct ablkcipher_tfm {
378 	int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
379 	              unsigned int keylen);
380 	int (*encrypt)(struct ablkcipher_request *req);
381 	int (*decrypt)(struct ablkcipher_request *req);
382 	int (*givencrypt)(struct skcipher_givcrypt_request *req);
383 	int (*givdecrypt)(struct skcipher_givcrypt_request *req);
384 
385 	struct crypto_ablkcipher *base;
386 
387 	unsigned int ivsize;
388 	unsigned int reqsize;
389 };
390 
391 struct aead_tfm {
392 	int (*setkey)(struct crypto_aead *tfm, const u8 *key,
393 	              unsigned int keylen);
394 	int (*encrypt)(struct aead_request *req);
395 	int (*decrypt)(struct aead_request *req);
396 	int (*givencrypt)(struct aead_givcrypt_request *req);
397 	int (*givdecrypt)(struct aead_givcrypt_request *req);
398 
399 	struct crypto_aead *base;
400 
401 	unsigned int ivsize;
402 	unsigned int authsize;
403 	unsigned int reqsize;
404 };
405 
406 struct blkcipher_tfm {
407 	void *iv;
408 	int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
409 		      unsigned int keylen);
410 	int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
411 		       struct scatterlist *src, unsigned int nbytes);
412 	int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
413 		       struct scatterlist *src, unsigned int nbytes);
414 };
415 
416 struct cipher_tfm {
417 	int (*cit_setkey)(struct crypto_tfm *tfm,
418 	                  const u8 *key, unsigned int keylen);
419 	void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
420 	void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
421 };
422 
423 struct hash_tfm {
424 	int (*init)(struct hash_desc *desc);
425 	int (*update)(struct hash_desc *desc,
426 		      struct scatterlist *sg, unsigned int nsg);
427 	int (*final)(struct hash_desc *desc, u8 *out);
428 	int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
429 		      unsigned int nsg, u8 *out);
430 	int (*setkey)(struct crypto_hash *tfm, const u8 *key,
431 		      unsigned int keylen);
432 	unsigned int digestsize;
433 };
434 
435 struct ahash_tfm {
436 	int (*init)(struct ahash_request *req);
437 	int (*update)(struct ahash_request *req);
438 	int (*final)(struct ahash_request *req);
439 	int (*digest)(struct ahash_request *req);
440 	int (*setkey)(struct crypto_ahash *tfm, const u8 *key,
441 			unsigned int keylen);
442 
443 	unsigned int digestsize;
444 	unsigned int reqsize;
445 };
446 
447 struct compress_tfm {
448 	int (*cot_compress)(struct crypto_tfm *tfm,
449 	                    const u8 *src, unsigned int slen,
450 	                    u8 *dst, unsigned int *dlen);
451 	int (*cot_decompress)(struct crypto_tfm *tfm,
452 	                      const u8 *src, unsigned int slen,
453 	                      u8 *dst, unsigned int *dlen);
454 };
455 
456 struct rng_tfm {
457 	int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata,
458 			      unsigned int dlen);
459 	int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
460 };
461 
462 #define crt_ablkcipher	crt_u.ablkcipher
463 #define crt_aead	crt_u.aead
464 #define crt_blkcipher	crt_u.blkcipher
465 #define crt_cipher	crt_u.cipher
466 #define crt_hash	crt_u.hash
467 #define crt_ahash	crt_u.ahash
468 #define crt_compress	crt_u.compress
469 #define crt_rng		crt_u.rng
470 
471 struct crypto_tfm {
472 
473 	u32 crt_flags;
474 
475 	union {
476 		struct ablkcipher_tfm ablkcipher;
477 		struct aead_tfm aead;
478 		struct blkcipher_tfm blkcipher;
479 		struct cipher_tfm cipher;
480 		struct hash_tfm hash;
481 		struct ahash_tfm ahash;
482 		struct compress_tfm compress;
483 		struct rng_tfm rng;
484 	} crt_u;
485 
486 	void (*exit)(struct crypto_tfm *tfm);
487 
488 	struct crypto_alg *__crt_alg;
489 
490 	void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
491 };
492 
493 struct crypto_ablkcipher {
494 	struct crypto_tfm base;
495 };
496 
497 struct crypto_aead {
498 	struct crypto_tfm base;
499 };
500 
501 struct crypto_blkcipher {
502 	struct crypto_tfm base;
503 };
504 
505 struct crypto_cipher {
506 	struct crypto_tfm base;
507 };
508 
509 struct crypto_comp {
510 	struct crypto_tfm base;
511 };
512 
513 struct crypto_hash {
514 	struct crypto_tfm base;
515 };
516 
517 struct crypto_rng {
518 	struct crypto_tfm base;
519 };
520 
521 enum {
522 	CRYPTOA_UNSPEC,
523 	CRYPTOA_ALG,
524 	CRYPTOA_TYPE,
525 	CRYPTOA_U32,
526 	__CRYPTOA_MAX,
527 };
528 
529 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
530 
531 /* Maximum number of (rtattr) parameters for each template. */
532 #define CRYPTO_MAX_ATTRS 32
533 
534 struct crypto_attr_alg {
535 	char name[CRYPTO_MAX_ALG_NAME];
536 };
537 
538 struct crypto_attr_type {
539 	u32 type;
540 	u32 mask;
541 };
542 
543 struct crypto_attr_u32 {
544 	u32 num;
545 };
546 
547 /*
548  * Transform user interface.
549  */
550 
551 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
552 				    const struct crypto_type *frontend,
553 				    u32 type, u32 mask);
554 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
555 void crypto_free_tfm(struct crypto_tfm *tfm);
556 
557 int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
558 
559 /*
560  * Transform helpers which query the underlying algorithm.
561  */
562 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
563 {
564 	return tfm->__crt_alg->cra_name;
565 }
566 
567 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
568 {
569 	return tfm->__crt_alg->cra_driver_name;
570 }
571 
572 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
573 {
574 	return tfm->__crt_alg->cra_priority;
575 }
576 
577 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
578 {
579 	return module_name(tfm->__crt_alg->cra_module);
580 }
581 
582 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
583 {
584 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
585 }
586 
587 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
588 {
589 	return tfm->__crt_alg->cra_blocksize;
590 }
591 
592 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
593 {
594 	return tfm->__crt_alg->cra_alignmask;
595 }
596 
597 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
598 {
599 	return tfm->crt_flags;
600 }
601 
602 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
603 {
604 	tfm->crt_flags |= flags;
605 }
606 
607 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
608 {
609 	tfm->crt_flags &= ~flags;
610 }
611 
612 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
613 {
614 	return tfm->__crt_ctx;
615 }
616 
617 static inline unsigned int crypto_tfm_ctx_alignment(void)
618 {
619 	struct crypto_tfm *tfm;
620 	return __alignof__(tfm->__crt_ctx);
621 }
622 
623 /*
624  * API wrappers.
625  */
626 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
627 	struct crypto_tfm *tfm)
628 {
629 	return (struct crypto_ablkcipher *)tfm;
630 }
631 
632 static inline u32 crypto_skcipher_type(u32 type)
633 {
634 	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
635 	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
636 	return type;
637 }
638 
639 static inline u32 crypto_skcipher_mask(u32 mask)
640 {
641 	mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
642 	mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
643 	return mask;
644 }
645 
646 struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
647 						  u32 type, u32 mask);
648 
649 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
650 	struct crypto_ablkcipher *tfm)
651 {
652 	return &tfm->base;
653 }
654 
655 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
656 {
657 	crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
658 }
659 
660 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
661 					u32 mask)
662 {
663 	return crypto_has_alg(alg_name, crypto_skcipher_type(type),
664 			      crypto_skcipher_mask(mask));
665 }
666 
667 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
668 	struct crypto_ablkcipher *tfm)
669 {
670 	return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
671 }
672 
673 static inline unsigned int crypto_ablkcipher_ivsize(
674 	struct crypto_ablkcipher *tfm)
675 {
676 	return crypto_ablkcipher_crt(tfm)->ivsize;
677 }
678 
679 static inline unsigned int crypto_ablkcipher_blocksize(
680 	struct crypto_ablkcipher *tfm)
681 {
682 	return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
683 }
684 
685 static inline unsigned int crypto_ablkcipher_alignmask(
686 	struct crypto_ablkcipher *tfm)
687 {
688 	return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
689 }
690 
691 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
692 {
693 	return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
694 }
695 
696 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
697 					       u32 flags)
698 {
699 	crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
700 }
701 
702 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
703 						 u32 flags)
704 {
705 	crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
706 }
707 
708 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
709 					   const u8 *key, unsigned int keylen)
710 {
711 	struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
712 
713 	return crt->setkey(crt->base, key, keylen);
714 }
715 
716 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
717 	struct ablkcipher_request *req)
718 {
719 	return __crypto_ablkcipher_cast(req->base.tfm);
720 }
721 
722 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
723 {
724 	struct ablkcipher_tfm *crt =
725 		crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
726 	return crt->encrypt(req);
727 }
728 
729 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
730 {
731 	struct ablkcipher_tfm *crt =
732 		crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
733 	return crt->decrypt(req);
734 }
735 
736 static inline unsigned int crypto_ablkcipher_reqsize(
737 	struct crypto_ablkcipher *tfm)
738 {
739 	return crypto_ablkcipher_crt(tfm)->reqsize;
740 }
741 
742 static inline void ablkcipher_request_set_tfm(
743 	struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
744 {
745 	req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base);
746 }
747 
748 static inline struct ablkcipher_request *ablkcipher_request_cast(
749 	struct crypto_async_request *req)
750 {
751 	return container_of(req, struct ablkcipher_request, base);
752 }
753 
754 static inline struct ablkcipher_request *ablkcipher_request_alloc(
755 	struct crypto_ablkcipher *tfm, gfp_t gfp)
756 {
757 	struct ablkcipher_request *req;
758 
759 	req = kmalloc(sizeof(struct ablkcipher_request) +
760 		      crypto_ablkcipher_reqsize(tfm), gfp);
761 
762 	if (likely(req))
763 		ablkcipher_request_set_tfm(req, tfm);
764 
765 	return req;
766 }
767 
768 static inline void ablkcipher_request_free(struct ablkcipher_request *req)
769 {
770 	kfree(req);
771 }
772 
773 static inline void ablkcipher_request_set_callback(
774 	struct ablkcipher_request *req,
775 	u32 flags, crypto_completion_t complete, void *data)
776 {
777 	req->base.complete = complete;
778 	req->base.data = data;
779 	req->base.flags = flags;
780 }
781 
782 static inline void ablkcipher_request_set_crypt(
783 	struct ablkcipher_request *req,
784 	struct scatterlist *src, struct scatterlist *dst,
785 	unsigned int nbytes, void *iv)
786 {
787 	req->src = src;
788 	req->dst = dst;
789 	req->nbytes = nbytes;
790 	req->info = iv;
791 }
792 
793 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm)
794 {
795 	return (struct crypto_aead *)tfm;
796 }
797 
798 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask);
799 
800 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm)
801 {
802 	return &tfm->base;
803 }
804 
805 static inline void crypto_free_aead(struct crypto_aead *tfm)
806 {
807 	crypto_free_tfm(crypto_aead_tfm(tfm));
808 }
809 
810 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm)
811 {
812 	return &crypto_aead_tfm(tfm)->crt_aead;
813 }
814 
815 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm)
816 {
817 	return crypto_aead_crt(tfm)->ivsize;
818 }
819 
820 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm)
821 {
822 	return crypto_aead_crt(tfm)->authsize;
823 }
824 
825 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm)
826 {
827 	return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm));
828 }
829 
830 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm)
831 {
832 	return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm));
833 }
834 
835 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm)
836 {
837 	return crypto_tfm_get_flags(crypto_aead_tfm(tfm));
838 }
839 
840 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags)
841 {
842 	crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags);
843 }
844 
845 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags)
846 {
847 	crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags);
848 }
849 
850 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key,
851 				     unsigned int keylen)
852 {
853 	struct aead_tfm *crt = crypto_aead_crt(tfm);
854 
855 	return crt->setkey(crt->base, key, keylen);
856 }
857 
858 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize);
859 
860 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req)
861 {
862 	return __crypto_aead_cast(req->base.tfm);
863 }
864 
865 static inline int crypto_aead_encrypt(struct aead_request *req)
866 {
867 	return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req);
868 }
869 
870 static inline int crypto_aead_decrypt(struct aead_request *req)
871 {
872 	return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req);
873 }
874 
875 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
876 {
877 	return crypto_aead_crt(tfm)->reqsize;
878 }
879 
880 static inline void aead_request_set_tfm(struct aead_request *req,
881 					struct crypto_aead *tfm)
882 {
883 	req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base);
884 }
885 
886 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm,
887 						      gfp_t gfp)
888 {
889 	struct aead_request *req;
890 
891 	req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp);
892 
893 	if (likely(req))
894 		aead_request_set_tfm(req, tfm);
895 
896 	return req;
897 }
898 
899 static inline void aead_request_free(struct aead_request *req)
900 {
901 	kfree(req);
902 }
903 
904 static inline void aead_request_set_callback(struct aead_request *req,
905 					     u32 flags,
906 					     crypto_completion_t complete,
907 					     void *data)
908 {
909 	req->base.complete = complete;
910 	req->base.data = data;
911 	req->base.flags = flags;
912 }
913 
914 static inline void aead_request_set_crypt(struct aead_request *req,
915 					  struct scatterlist *src,
916 					  struct scatterlist *dst,
917 					  unsigned int cryptlen, u8 *iv)
918 {
919 	req->src = src;
920 	req->dst = dst;
921 	req->cryptlen = cryptlen;
922 	req->iv = iv;
923 }
924 
925 static inline void aead_request_set_assoc(struct aead_request *req,
926 					  struct scatterlist *assoc,
927 					  unsigned int assoclen)
928 {
929 	req->assoc = assoc;
930 	req->assoclen = assoclen;
931 }
932 
933 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
934 	struct crypto_tfm *tfm)
935 {
936 	return (struct crypto_blkcipher *)tfm;
937 }
938 
939 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
940 	struct crypto_tfm *tfm)
941 {
942 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
943 	return __crypto_blkcipher_cast(tfm);
944 }
945 
946 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
947 	const char *alg_name, u32 type, u32 mask)
948 {
949 	type &= ~CRYPTO_ALG_TYPE_MASK;
950 	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
951 	mask |= CRYPTO_ALG_TYPE_MASK;
952 
953 	return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
954 }
955 
956 static inline struct crypto_tfm *crypto_blkcipher_tfm(
957 	struct crypto_blkcipher *tfm)
958 {
959 	return &tfm->base;
960 }
961 
962 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
963 {
964 	crypto_free_tfm(crypto_blkcipher_tfm(tfm));
965 }
966 
967 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
968 {
969 	type &= ~CRYPTO_ALG_TYPE_MASK;
970 	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
971 	mask |= CRYPTO_ALG_TYPE_MASK;
972 
973 	return crypto_has_alg(alg_name, type, mask);
974 }
975 
976 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
977 {
978 	return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
979 }
980 
981 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
982 	struct crypto_blkcipher *tfm)
983 {
984 	return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
985 }
986 
987 static inline struct blkcipher_alg *crypto_blkcipher_alg(
988 	struct crypto_blkcipher *tfm)
989 {
990 	return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
991 }
992 
993 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
994 {
995 	return crypto_blkcipher_alg(tfm)->ivsize;
996 }
997 
998 static inline unsigned int crypto_blkcipher_blocksize(
999 	struct crypto_blkcipher *tfm)
1000 {
1001 	return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
1002 }
1003 
1004 static inline unsigned int crypto_blkcipher_alignmask(
1005 	struct crypto_blkcipher *tfm)
1006 {
1007 	return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
1008 }
1009 
1010 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
1011 {
1012 	return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
1013 }
1014 
1015 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
1016 					      u32 flags)
1017 {
1018 	crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
1019 }
1020 
1021 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
1022 						u32 flags)
1023 {
1024 	crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
1025 }
1026 
1027 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
1028 					  const u8 *key, unsigned int keylen)
1029 {
1030 	return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
1031 						 key, keylen);
1032 }
1033 
1034 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
1035 					   struct scatterlist *dst,
1036 					   struct scatterlist *src,
1037 					   unsigned int nbytes)
1038 {
1039 	desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
1040 	return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
1041 }
1042 
1043 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
1044 					      struct scatterlist *dst,
1045 					      struct scatterlist *src,
1046 					      unsigned int nbytes)
1047 {
1048 	return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
1049 }
1050 
1051 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
1052 					   struct scatterlist *dst,
1053 					   struct scatterlist *src,
1054 					   unsigned int nbytes)
1055 {
1056 	desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
1057 	return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
1058 }
1059 
1060 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
1061 					      struct scatterlist *dst,
1062 					      struct scatterlist *src,
1063 					      unsigned int nbytes)
1064 {
1065 	return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
1066 }
1067 
1068 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
1069 					   const u8 *src, unsigned int len)
1070 {
1071 	memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
1072 }
1073 
1074 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
1075 					   u8 *dst, unsigned int len)
1076 {
1077 	memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
1078 }
1079 
1080 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
1081 {
1082 	return (struct crypto_cipher *)tfm;
1083 }
1084 
1085 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
1086 {
1087 	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
1088 	return __crypto_cipher_cast(tfm);
1089 }
1090 
1091 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
1092 							u32 type, u32 mask)
1093 {
1094 	type &= ~CRYPTO_ALG_TYPE_MASK;
1095 	type |= CRYPTO_ALG_TYPE_CIPHER;
1096 	mask |= CRYPTO_ALG_TYPE_MASK;
1097 
1098 	return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
1099 }
1100 
1101 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
1102 {
1103 	return &tfm->base;
1104 }
1105 
1106 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
1107 {
1108 	crypto_free_tfm(crypto_cipher_tfm(tfm));
1109 }
1110 
1111 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
1112 {
1113 	type &= ~CRYPTO_ALG_TYPE_MASK;
1114 	type |= CRYPTO_ALG_TYPE_CIPHER;
1115 	mask |= CRYPTO_ALG_TYPE_MASK;
1116 
1117 	return crypto_has_alg(alg_name, type, mask);
1118 }
1119 
1120 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
1121 {
1122 	return &crypto_cipher_tfm(tfm)->crt_cipher;
1123 }
1124 
1125 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
1126 {
1127 	return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
1128 }
1129 
1130 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
1131 {
1132 	return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
1133 }
1134 
1135 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
1136 {
1137 	return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
1138 }
1139 
1140 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
1141 					   u32 flags)
1142 {
1143 	crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
1144 }
1145 
1146 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
1147 					     u32 flags)
1148 {
1149 	crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
1150 }
1151 
1152 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
1153                                        const u8 *key, unsigned int keylen)
1154 {
1155 	return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
1156 						  key, keylen);
1157 }
1158 
1159 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
1160 					     u8 *dst, const u8 *src)
1161 {
1162 	crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
1163 						dst, src);
1164 }
1165 
1166 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
1167 					     u8 *dst, const u8 *src)
1168 {
1169 	crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
1170 						dst, src);
1171 }
1172 
1173 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
1174 {
1175 	return (struct crypto_hash *)tfm;
1176 }
1177 
1178 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
1179 {
1180 	BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
1181 	       CRYPTO_ALG_TYPE_HASH_MASK);
1182 	return __crypto_hash_cast(tfm);
1183 }
1184 
1185 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
1186 						    u32 type, u32 mask)
1187 {
1188 	type &= ~CRYPTO_ALG_TYPE_MASK;
1189 	mask &= ~CRYPTO_ALG_TYPE_MASK;
1190 	type |= CRYPTO_ALG_TYPE_HASH;
1191 	mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1192 
1193 	return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
1194 }
1195 
1196 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
1197 {
1198 	return &tfm->base;
1199 }
1200 
1201 static inline void crypto_free_hash(struct crypto_hash *tfm)
1202 {
1203 	crypto_free_tfm(crypto_hash_tfm(tfm));
1204 }
1205 
1206 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
1207 {
1208 	type &= ~CRYPTO_ALG_TYPE_MASK;
1209 	mask &= ~CRYPTO_ALG_TYPE_MASK;
1210 	type |= CRYPTO_ALG_TYPE_HASH;
1211 	mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1212 
1213 	return crypto_has_alg(alg_name, type, mask);
1214 }
1215 
1216 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
1217 {
1218 	return &crypto_hash_tfm(tfm)->crt_hash;
1219 }
1220 
1221 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
1222 {
1223 	return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
1224 }
1225 
1226 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
1227 {
1228 	return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
1229 }
1230 
1231 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
1232 {
1233 	return crypto_hash_crt(tfm)->digestsize;
1234 }
1235 
1236 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
1237 {
1238 	return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
1239 }
1240 
1241 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
1242 {
1243 	crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
1244 }
1245 
1246 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
1247 {
1248 	crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
1249 }
1250 
1251 static inline int crypto_hash_init(struct hash_desc *desc)
1252 {
1253 	return crypto_hash_crt(desc->tfm)->init(desc);
1254 }
1255 
1256 static inline int crypto_hash_update(struct hash_desc *desc,
1257 				     struct scatterlist *sg,
1258 				     unsigned int nbytes)
1259 {
1260 	return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
1261 }
1262 
1263 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
1264 {
1265 	return crypto_hash_crt(desc->tfm)->final(desc, out);
1266 }
1267 
1268 static inline int crypto_hash_digest(struct hash_desc *desc,
1269 				     struct scatterlist *sg,
1270 				     unsigned int nbytes, u8 *out)
1271 {
1272 	return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
1273 }
1274 
1275 static inline int crypto_hash_setkey(struct crypto_hash *hash,
1276 				     const u8 *key, unsigned int keylen)
1277 {
1278 	return crypto_hash_crt(hash)->setkey(hash, key, keylen);
1279 }
1280 
1281 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
1282 {
1283 	return (struct crypto_comp *)tfm;
1284 }
1285 
1286 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
1287 {
1288 	BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
1289 	       CRYPTO_ALG_TYPE_MASK);
1290 	return __crypto_comp_cast(tfm);
1291 }
1292 
1293 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
1294 						    u32 type, u32 mask)
1295 {
1296 	type &= ~CRYPTO_ALG_TYPE_MASK;
1297 	type |= CRYPTO_ALG_TYPE_COMPRESS;
1298 	mask |= CRYPTO_ALG_TYPE_MASK;
1299 
1300 	return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
1301 }
1302 
1303 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
1304 {
1305 	return &tfm->base;
1306 }
1307 
1308 static inline void crypto_free_comp(struct crypto_comp *tfm)
1309 {
1310 	crypto_free_tfm(crypto_comp_tfm(tfm));
1311 }
1312 
1313 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
1314 {
1315 	type &= ~CRYPTO_ALG_TYPE_MASK;
1316 	type |= CRYPTO_ALG_TYPE_COMPRESS;
1317 	mask |= CRYPTO_ALG_TYPE_MASK;
1318 
1319 	return crypto_has_alg(alg_name, type, mask);
1320 }
1321 
1322 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1323 {
1324 	return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1325 }
1326 
1327 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1328 {
1329 	return &crypto_comp_tfm(tfm)->crt_compress;
1330 }
1331 
1332 static inline int crypto_comp_compress(struct crypto_comp *tfm,
1333                                        const u8 *src, unsigned int slen,
1334                                        u8 *dst, unsigned int *dlen)
1335 {
1336 	return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1337 						  src, slen, dst, dlen);
1338 }
1339 
1340 static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1341                                          const u8 *src, unsigned int slen,
1342                                          u8 *dst, unsigned int *dlen)
1343 {
1344 	return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1345 						    src, slen, dst, dlen);
1346 }
1347 
1348 #endif	/* _LINUX_CRYPTO_H */
1349 
1350