1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <[email protected]> 5 * Copyright (c) 2002 David S. Miller ([email protected]) 6 * Copyright (c) 2005 Herbert Xu <[email protected]> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]> 9 * and Nettle, by Niels Möller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 #ifndef _LINUX_CRYPTO_H 18 #define _LINUX_CRYPTO_H 19 20 #include <asm/atomic.h> 21 #include <linux/module.h> 22 #include <linux/kernel.h> 23 #include <linux/list.h> 24 #include <linux/slab.h> 25 #include <linux/string.h> 26 #include <linux/uaccess.h> 27 28 /* 29 * Algorithm masks and types. 30 */ 31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 33 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002 34 #define CRYPTO_ALG_TYPE_AEAD 0x00000003 35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 36 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 37 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 38 #define CRYPTO_ALG_TYPE_DIGEST 0x00000008 39 #define CRYPTO_ALG_TYPE_HASH 0x00000009 40 #define CRYPTO_ALG_TYPE_AHASH 0x0000000a 41 #define CRYPTO_ALG_TYPE_RNG 0x0000000c 42 43 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e 44 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c 45 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c 46 47 #define CRYPTO_ALG_LARVAL 0x00000010 48 #define CRYPTO_ALG_DEAD 0x00000020 49 #define CRYPTO_ALG_DYING 0x00000040 50 #define CRYPTO_ALG_ASYNC 0x00000080 51 52 /* 53 * Set this bit if and only if the algorithm requires another algorithm of 54 * the same type to handle corner cases. 55 */ 56 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 57 58 /* 59 * This bit is set for symmetric key ciphers that have already been wrapped 60 * with a generic IV generator to prevent them from being wrapped again. 61 */ 62 #define CRYPTO_ALG_GENIV 0x00000200 63 64 /* 65 * Set if the algorithm has passed automated run-time testing. Note that 66 * if there is no run-time testing for a given algorithm it is considered 67 * to have passed. 68 */ 69 70 #define CRYPTO_ALG_TESTED 0x00000400 71 72 /* 73 * Transform masks and values (for crt_flags). 74 */ 75 #define CRYPTO_TFM_REQ_MASK 0x000fff00 76 #define CRYPTO_TFM_RES_MASK 0xfff00000 77 78 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 79 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 80 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 81 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 82 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000 83 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000 84 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000 85 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000 86 87 /* 88 * Miscellaneous stuff. 89 */ 90 #define CRYPTO_MAX_ALG_NAME 64 91 92 /* 93 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 94 * declaration) is used to ensure that the crypto_tfm context structure is 95 * aligned correctly for the given architecture so that there are no alignment 96 * faults for C data types. In particular, this is required on platforms such 97 * as arm where pointers are 32-bit aligned but there are data types such as 98 * u64 which require 64-bit alignment. 99 */ 100 #if defined(ARCH_KMALLOC_MINALIGN) 101 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 102 #elif defined(ARCH_SLAB_MINALIGN) 103 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN 104 #else 105 #define CRYPTO_MINALIGN __alignof__(unsigned long long) 106 #endif 107 108 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 109 110 struct scatterlist; 111 struct crypto_ablkcipher; 112 struct crypto_async_request; 113 struct crypto_aead; 114 struct crypto_blkcipher; 115 struct crypto_hash; 116 struct crypto_ahash; 117 struct crypto_rng; 118 struct crypto_tfm; 119 struct crypto_type; 120 struct aead_givcrypt_request; 121 struct skcipher_givcrypt_request; 122 123 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 124 125 struct crypto_async_request { 126 struct list_head list; 127 crypto_completion_t complete; 128 void *data; 129 struct crypto_tfm *tfm; 130 131 u32 flags; 132 }; 133 134 struct ablkcipher_request { 135 struct crypto_async_request base; 136 137 unsigned int nbytes; 138 139 void *info; 140 141 struct scatterlist *src; 142 struct scatterlist *dst; 143 144 void *__ctx[] CRYPTO_MINALIGN_ATTR; 145 }; 146 147 struct ahash_request { 148 struct crypto_async_request base; 149 150 unsigned int nbytes; 151 struct scatterlist *src; 152 u8 *result; 153 154 void *__ctx[] CRYPTO_MINALIGN_ATTR; 155 }; 156 157 /** 158 * struct aead_request - AEAD request 159 * @base: Common attributes for async crypto requests 160 * @assoclen: Length in bytes of associated data for authentication 161 * @cryptlen: Length of data to be encrypted or decrypted 162 * @iv: Initialisation vector 163 * @assoc: Associated data 164 * @src: Source data 165 * @dst: Destination data 166 * @__ctx: Start of private context data 167 */ 168 struct aead_request { 169 struct crypto_async_request base; 170 171 unsigned int assoclen; 172 unsigned int cryptlen; 173 174 u8 *iv; 175 176 struct scatterlist *assoc; 177 struct scatterlist *src; 178 struct scatterlist *dst; 179 180 void *__ctx[] CRYPTO_MINALIGN_ATTR; 181 }; 182 183 struct blkcipher_desc { 184 struct crypto_blkcipher *tfm; 185 void *info; 186 u32 flags; 187 }; 188 189 struct cipher_desc { 190 struct crypto_tfm *tfm; 191 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 192 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, 193 const u8 *src, unsigned int nbytes); 194 void *info; 195 }; 196 197 struct hash_desc { 198 struct crypto_hash *tfm; 199 u32 flags; 200 }; 201 202 /* 203 * Algorithms: modular crypto algorithm implementations, managed 204 * via crypto_register_alg() and crypto_unregister_alg(). 205 */ 206 struct ablkcipher_alg { 207 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 208 unsigned int keylen); 209 int (*encrypt)(struct ablkcipher_request *req); 210 int (*decrypt)(struct ablkcipher_request *req); 211 int (*givencrypt)(struct skcipher_givcrypt_request *req); 212 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 213 214 const char *geniv; 215 216 unsigned int min_keysize; 217 unsigned int max_keysize; 218 unsigned int ivsize; 219 }; 220 221 struct ahash_alg { 222 int (*init)(struct ahash_request *req); 223 int (*update)(struct ahash_request *req); 224 int (*final)(struct ahash_request *req); 225 int (*digest)(struct ahash_request *req); 226 int (*setkey)(struct crypto_ahash *tfm, const u8 *key, 227 unsigned int keylen); 228 229 unsigned int digestsize; 230 }; 231 232 struct aead_alg { 233 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 234 unsigned int keylen); 235 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize); 236 int (*encrypt)(struct aead_request *req); 237 int (*decrypt)(struct aead_request *req); 238 int (*givencrypt)(struct aead_givcrypt_request *req); 239 int (*givdecrypt)(struct aead_givcrypt_request *req); 240 241 const char *geniv; 242 243 unsigned int ivsize; 244 unsigned int maxauthsize; 245 }; 246 247 struct blkcipher_alg { 248 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 249 unsigned int keylen); 250 int (*encrypt)(struct blkcipher_desc *desc, 251 struct scatterlist *dst, struct scatterlist *src, 252 unsigned int nbytes); 253 int (*decrypt)(struct blkcipher_desc *desc, 254 struct scatterlist *dst, struct scatterlist *src, 255 unsigned int nbytes); 256 257 const char *geniv; 258 259 unsigned int min_keysize; 260 unsigned int max_keysize; 261 unsigned int ivsize; 262 }; 263 264 struct cipher_alg { 265 unsigned int cia_min_keysize; 266 unsigned int cia_max_keysize; 267 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 268 unsigned int keylen); 269 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 270 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 271 }; 272 273 struct digest_alg { 274 unsigned int dia_digestsize; 275 void (*dia_init)(struct crypto_tfm *tfm); 276 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data, 277 unsigned int len); 278 void (*dia_final)(struct crypto_tfm *tfm, u8 *out); 279 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key, 280 unsigned int keylen); 281 }; 282 283 struct hash_alg { 284 int (*init)(struct hash_desc *desc); 285 int (*update)(struct hash_desc *desc, struct scatterlist *sg, 286 unsigned int nbytes); 287 int (*final)(struct hash_desc *desc, u8 *out); 288 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 289 unsigned int nbytes, u8 *out); 290 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 291 unsigned int keylen); 292 293 unsigned int digestsize; 294 }; 295 296 struct compress_alg { 297 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 298 unsigned int slen, u8 *dst, unsigned int *dlen); 299 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 300 unsigned int slen, u8 *dst, unsigned int *dlen); 301 }; 302 303 struct rng_alg { 304 int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata, 305 unsigned int dlen); 306 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 307 308 unsigned int seedsize; 309 }; 310 311 312 #define cra_ablkcipher cra_u.ablkcipher 313 #define cra_aead cra_u.aead 314 #define cra_blkcipher cra_u.blkcipher 315 #define cra_cipher cra_u.cipher 316 #define cra_digest cra_u.digest 317 #define cra_hash cra_u.hash 318 #define cra_ahash cra_u.ahash 319 #define cra_compress cra_u.compress 320 #define cra_rng cra_u.rng 321 322 struct crypto_alg { 323 struct list_head cra_list; 324 struct list_head cra_users; 325 326 u32 cra_flags; 327 unsigned int cra_blocksize; 328 unsigned int cra_ctxsize; 329 unsigned int cra_alignmask; 330 331 int cra_priority; 332 atomic_t cra_refcnt; 333 334 char cra_name[CRYPTO_MAX_ALG_NAME]; 335 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 336 337 const struct crypto_type *cra_type; 338 339 union { 340 struct ablkcipher_alg ablkcipher; 341 struct aead_alg aead; 342 struct blkcipher_alg blkcipher; 343 struct cipher_alg cipher; 344 struct digest_alg digest; 345 struct hash_alg hash; 346 struct ahash_alg ahash; 347 struct compress_alg compress; 348 struct rng_alg rng; 349 } cra_u; 350 351 int (*cra_init)(struct crypto_tfm *tfm); 352 void (*cra_exit)(struct crypto_tfm *tfm); 353 void (*cra_destroy)(struct crypto_alg *alg); 354 355 struct module *cra_module; 356 }; 357 358 /* 359 * Algorithm registration interface. 360 */ 361 int crypto_register_alg(struct crypto_alg *alg); 362 int crypto_unregister_alg(struct crypto_alg *alg); 363 364 /* 365 * Algorithm query interface. 366 */ 367 int crypto_has_alg(const char *name, u32 type, u32 mask); 368 369 /* 370 * Transforms: user-instantiated objects which encapsulate algorithms 371 * and core processing logic. Managed via crypto_alloc_*() and 372 * crypto_free_*(), as well as the various helpers below. 373 */ 374 375 struct ablkcipher_tfm { 376 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 377 unsigned int keylen); 378 int (*encrypt)(struct ablkcipher_request *req); 379 int (*decrypt)(struct ablkcipher_request *req); 380 int (*givencrypt)(struct skcipher_givcrypt_request *req); 381 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 382 383 struct crypto_ablkcipher *base; 384 385 unsigned int ivsize; 386 unsigned int reqsize; 387 }; 388 389 struct aead_tfm { 390 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 391 unsigned int keylen); 392 int (*encrypt)(struct aead_request *req); 393 int (*decrypt)(struct aead_request *req); 394 int (*givencrypt)(struct aead_givcrypt_request *req); 395 int (*givdecrypt)(struct aead_givcrypt_request *req); 396 397 struct crypto_aead *base; 398 399 unsigned int ivsize; 400 unsigned int authsize; 401 unsigned int reqsize; 402 }; 403 404 struct blkcipher_tfm { 405 void *iv; 406 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 407 unsigned int keylen); 408 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 409 struct scatterlist *src, unsigned int nbytes); 410 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 411 struct scatterlist *src, unsigned int nbytes); 412 }; 413 414 struct cipher_tfm { 415 int (*cit_setkey)(struct crypto_tfm *tfm, 416 const u8 *key, unsigned int keylen); 417 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 418 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 419 }; 420 421 struct hash_tfm { 422 int (*init)(struct hash_desc *desc); 423 int (*update)(struct hash_desc *desc, 424 struct scatterlist *sg, unsigned int nsg); 425 int (*final)(struct hash_desc *desc, u8 *out); 426 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 427 unsigned int nsg, u8 *out); 428 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 429 unsigned int keylen); 430 unsigned int digestsize; 431 }; 432 433 struct ahash_tfm { 434 int (*init)(struct ahash_request *req); 435 int (*update)(struct ahash_request *req); 436 int (*final)(struct ahash_request *req); 437 int (*digest)(struct ahash_request *req); 438 int (*setkey)(struct crypto_ahash *tfm, const u8 *key, 439 unsigned int keylen); 440 441 unsigned int digestsize; 442 unsigned int reqsize; 443 }; 444 445 struct compress_tfm { 446 int (*cot_compress)(struct crypto_tfm *tfm, 447 const u8 *src, unsigned int slen, 448 u8 *dst, unsigned int *dlen); 449 int (*cot_decompress)(struct crypto_tfm *tfm, 450 const u8 *src, unsigned int slen, 451 u8 *dst, unsigned int *dlen); 452 }; 453 454 struct rng_tfm { 455 int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata, 456 unsigned int dlen); 457 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 458 }; 459 460 #define crt_ablkcipher crt_u.ablkcipher 461 #define crt_aead crt_u.aead 462 #define crt_blkcipher crt_u.blkcipher 463 #define crt_cipher crt_u.cipher 464 #define crt_hash crt_u.hash 465 #define crt_ahash crt_u.ahash 466 #define crt_compress crt_u.compress 467 #define crt_rng crt_u.rng 468 469 struct crypto_tfm { 470 471 u32 crt_flags; 472 473 union { 474 struct ablkcipher_tfm ablkcipher; 475 struct aead_tfm aead; 476 struct blkcipher_tfm blkcipher; 477 struct cipher_tfm cipher; 478 struct hash_tfm hash; 479 struct ahash_tfm ahash; 480 struct compress_tfm compress; 481 struct rng_tfm rng; 482 } crt_u; 483 484 struct crypto_alg *__crt_alg; 485 486 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 487 }; 488 489 struct crypto_ablkcipher { 490 struct crypto_tfm base; 491 }; 492 493 struct crypto_aead { 494 struct crypto_tfm base; 495 }; 496 497 struct crypto_blkcipher { 498 struct crypto_tfm base; 499 }; 500 501 struct crypto_cipher { 502 struct crypto_tfm base; 503 }; 504 505 struct crypto_comp { 506 struct crypto_tfm base; 507 }; 508 509 struct crypto_hash { 510 struct crypto_tfm base; 511 }; 512 513 struct crypto_rng { 514 struct crypto_tfm base; 515 }; 516 517 enum { 518 CRYPTOA_UNSPEC, 519 CRYPTOA_ALG, 520 CRYPTOA_TYPE, 521 CRYPTOA_U32, 522 __CRYPTOA_MAX, 523 }; 524 525 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1) 526 527 /* Maximum number of (rtattr) parameters for each template. */ 528 #define CRYPTO_MAX_ATTRS 32 529 530 struct crypto_attr_alg { 531 char name[CRYPTO_MAX_ALG_NAME]; 532 }; 533 534 struct crypto_attr_type { 535 u32 type; 536 u32 mask; 537 }; 538 539 struct crypto_attr_u32 { 540 u32 num; 541 }; 542 543 /* 544 * Transform user interface. 545 */ 546 547 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags); 548 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 549 void crypto_free_tfm(struct crypto_tfm *tfm); 550 551 int alg_test(const char *driver, const char *alg, u32 type, u32 mask); 552 553 /* 554 * Transform helpers which query the underlying algorithm. 555 */ 556 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 557 { 558 return tfm->__crt_alg->cra_name; 559 } 560 561 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 562 { 563 return tfm->__crt_alg->cra_driver_name; 564 } 565 566 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) 567 { 568 return tfm->__crt_alg->cra_priority; 569 } 570 571 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm) 572 { 573 return module_name(tfm->__crt_alg->cra_module); 574 } 575 576 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) 577 { 578 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; 579 } 580 581 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 582 { 583 return tfm->__crt_alg->cra_blocksize; 584 } 585 586 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 587 { 588 return tfm->__crt_alg->cra_alignmask; 589 } 590 591 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 592 { 593 return tfm->crt_flags; 594 } 595 596 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 597 { 598 tfm->crt_flags |= flags; 599 } 600 601 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 602 { 603 tfm->crt_flags &= ~flags; 604 } 605 606 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 607 { 608 return tfm->__crt_ctx; 609 } 610 611 static inline unsigned int crypto_tfm_ctx_alignment(void) 612 { 613 struct crypto_tfm *tfm; 614 return __alignof__(tfm->__crt_ctx); 615 } 616 617 /* 618 * API wrappers. 619 */ 620 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( 621 struct crypto_tfm *tfm) 622 { 623 return (struct crypto_ablkcipher *)tfm; 624 } 625 626 static inline u32 crypto_skcipher_type(u32 type) 627 { 628 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 629 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 630 return type; 631 } 632 633 static inline u32 crypto_skcipher_mask(u32 mask) 634 { 635 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 636 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; 637 return mask; 638 } 639 640 struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, 641 u32 type, u32 mask); 642 643 static inline struct crypto_tfm *crypto_ablkcipher_tfm( 644 struct crypto_ablkcipher *tfm) 645 { 646 return &tfm->base; 647 } 648 649 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) 650 { 651 crypto_free_tfm(crypto_ablkcipher_tfm(tfm)); 652 } 653 654 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, 655 u32 mask) 656 { 657 return crypto_has_alg(alg_name, crypto_skcipher_type(type), 658 crypto_skcipher_mask(mask)); 659 } 660 661 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( 662 struct crypto_ablkcipher *tfm) 663 { 664 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher; 665 } 666 667 static inline unsigned int crypto_ablkcipher_ivsize( 668 struct crypto_ablkcipher *tfm) 669 { 670 return crypto_ablkcipher_crt(tfm)->ivsize; 671 } 672 673 static inline unsigned int crypto_ablkcipher_blocksize( 674 struct crypto_ablkcipher *tfm) 675 { 676 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm)); 677 } 678 679 static inline unsigned int crypto_ablkcipher_alignmask( 680 struct crypto_ablkcipher *tfm) 681 { 682 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm)); 683 } 684 685 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm) 686 { 687 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm)); 688 } 689 690 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm, 691 u32 flags) 692 { 693 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags); 694 } 695 696 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, 697 u32 flags) 698 { 699 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags); 700 } 701 702 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 703 const u8 *key, unsigned int keylen) 704 { 705 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm); 706 707 return crt->setkey(crt->base, key, keylen); 708 } 709 710 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( 711 struct ablkcipher_request *req) 712 { 713 return __crypto_ablkcipher_cast(req->base.tfm); 714 } 715 716 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req) 717 { 718 struct ablkcipher_tfm *crt = 719 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 720 return crt->encrypt(req); 721 } 722 723 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) 724 { 725 struct ablkcipher_tfm *crt = 726 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 727 return crt->decrypt(req); 728 } 729 730 static inline unsigned int crypto_ablkcipher_reqsize( 731 struct crypto_ablkcipher *tfm) 732 { 733 return crypto_ablkcipher_crt(tfm)->reqsize; 734 } 735 736 static inline void ablkcipher_request_set_tfm( 737 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) 738 { 739 req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base); 740 } 741 742 static inline struct ablkcipher_request *ablkcipher_request_cast( 743 struct crypto_async_request *req) 744 { 745 return container_of(req, struct ablkcipher_request, base); 746 } 747 748 static inline struct ablkcipher_request *ablkcipher_request_alloc( 749 struct crypto_ablkcipher *tfm, gfp_t gfp) 750 { 751 struct ablkcipher_request *req; 752 753 req = kmalloc(sizeof(struct ablkcipher_request) + 754 crypto_ablkcipher_reqsize(tfm), gfp); 755 756 if (likely(req)) 757 ablkcipher_request_set_tfm(req, tfm); 758 759 return req; 760 } 761 762 static inline void ablkcipher_request_free(struct ablkcipher_request *req) 763 { 764 kfree(req); 765 } 766 767 static inline void ablkcipher_request_set_callback( 768 struct ablkcipher_request *req, 769 u32 flags, crypto_completion_t complete, void *data) 770 { 771 req->base.complete = complete; 772 req->base.data = data; 773 req->base.flags = flags; 774 } 775 776 static inline void ablkcipher_request_set_crypt( 777 struct ablkcipher_request *req, 778 struct scatterlist *src, struct scatterlist *dst, 779 unsigned int nbytes, void *iv) 780 { 781 req->src = src; 782 req->dst = dst; 783 req->nbytes = nbytes; 784 req->info = iv; 785 } 786 787 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) 788 { 789 return (struct crypto_aead *)tfm; 790 } 791 792 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask); 793 794 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) 795 { 796 return &tfm->base; 797 } 798 799 static inline void crypto_free_aead(struct crypto_aead *tfm) 800 { 801 crypto_free_tfm(crypto_aead_tfm(tfm)); 802 } 803 804 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm) 805 { 806 return &crypto_aead_tfm(tfm)->crt_aead; 807 } 808 809 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm) 810 { 811 return crypto_aead_crt(tfm)->ivsize; 812 } 813 814 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm) 815 { 816 return crypto_aead_crt(tfm)->authsize; 817 } 818 819 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm) 820 { 821 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm)); 822 } 823 824 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm) 825 { 826 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm)); 827 } 828 829 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm) 830 { 831 return crypto_tfm_get_flags(crypto_aead_tfm(tfm)); 832 } 833 834 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags) 835 { 836 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags); 837 } 838 839 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) 840 { 841 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags); 842 } 843 844 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, 845 unsigned int keylen) 846 { 847 struct aead_tfm *crt = crypto_aead_crt(tfm); 848 849 return crt->setkey(crt->base, key, keylen); 850 } 851 852 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize); 853 854 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) 855 { 856 return __crypto_aead_cast(req->base.tfm); 857 } 858 859 static inline int crypto_aead_encrypt(struct aead_request *req) 860 { 861 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req); 862 } 863 864 static inline int crypto_aead_decrypt(struct aead_request *req) 865 { 866 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req); 867 } 868 869 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) 870 { 871 return crypto_aead_crt(tfm)->reqsize; 872 } 873 874 static inline void aead_request_set_tfm(struct aead_request *req, 875 struct crypto_aead *tfm) 876 { 877 req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base); 878 } 879 880 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, 881 gfp_t gfp) 882 { 883 struct aead_request *req; 884 885 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp); 886 887 if (likely(req)) 888 aead_request_set_tfm(req, tfm); 889 890 return req; 891 } 892 893 static inline void aead_request_free(struct aead_request *req) 894 { 895 kfree(req); 896 } 897 898 static inline void aead_request_set_callback(struct aead_request *req, 899 u32 flags, 900 crypto_completion_t complete, 901 void *data) 902 { 903 req->base.complete = complete; 904 req->base.data = data; 905 req->base.flags = flags; 906 } 907 908 static inline void aead_request_set_crypt(struct aead_request *req, 909 struct scatterlist *src, 910 struct scatterlist *dst, 911 unsigned int cryptlen, u8 *iv) 912 { 913 req->src = src; 914 req->dst = dst; 915 req->cryptlen = cryptlen; 916 req->iv = iv; 917 } 918 919 static inline void aead_request_set_assoc(struct aead_request *req, 920 struct scatterlist *assoc, 921 unsigned int assoclen) 922 { 923 req->assoc = assoc; 924 req->assoclen = assoclen; 925 } 926 927 static inline struct crypto_blkcipher *__crypto_blkcipher_cast( 928 struct crypto_tfm *tfm) 929 { 930 return (struct crypto_blkcipher *)tfm; 931 } 932 933 static inline struct crypto_blkcipher *crypto_blkcipher_cast( 934 struct crypto_tfm *tfm) 935 { 936 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER); 937 return __crypto_blkcipher_cast(tfm); 938 } 939 940 static inline struct crypto_blkcipher *crypto_alloc_blkcipher( 941 const char *alg_name, u32 type, u32 mask) 942 { 943 type &= ~CRYPTO_ALG_TYPE_MASK; 944 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 945 mask |= CRYPTO_ALG_TYPE_MASK; 946 947 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); 948 } 949 950 static inline struct crypto_tfm *crypto_blkcipher_tfm( 951 struct crypto_blkcipher *tfm) 952 { 953 return &tfm->base; 954 } 955 956 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) 957 { 958 crypto_free_tfm(crypto_blkcipher_tfm(tfm)); 959 } 960 961 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) 962 { 963 type &= ~CRYPTO_ALG_TYPE_MASK; 964 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 965 mask |= CRYPTO_ALG_TYPE_MASK; 966 967 return crypto_has_alg(alg_name, type, mask); 968 } 969 970 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm) 971 { 972 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm)); 973 } 974 975 static inline struct blkcipher_tfm *crypto_blkcipher_crt( 976 struct crypto_blkcipher *tfm) 977 { 978 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher; 979 } 980 981 static inline struct blkcipher_alg *crypto_blkcipher_alg( 982 struct crypto_blkcipher *tfm) 983 { 984 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher; 985 } 986 987 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm) 988 { 989 return crypto_blkcipher_alg(tfm)->ivsize; 990 } 991 992 static inline unsigned int crypto_blkcipher_blocksize( 993 struct crypto_blkcipher *tfm) 994 { 995 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm)); 996 } 997 998 static inline unsigned int crypto_blkcipher_alignmask( 999 struct crypto_blkcipher *tfm) 1000 { 1001 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm)); 1002 } 1003 1004 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm) 1005 { 1006 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm)); 1007 } 1008 1009 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm, 1010 u32 flags) 1011 { 1012 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags); 1013 } 1014 1015 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm, 1016 u32 flags) 1017 { 1018 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags); 1019 } 1020 1021 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm, 1022 const u8 *key, unsigned int keylen) 1023 { 1024 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm), 1025 key, keylen); 1026 } 1027 1028 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc, 1029 struct scatterlist *dst, 1030 struct scatterlist *src, 1031 unsigned int nbytes) 1032 { 1033 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 1034 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 1035 } 1036 1037 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc, 1038 struct scatterlist *dst, 1039 struct scatterlist *src, 1040 unsigned int nbytes) 1041 { 1042 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 1043 } 1044 1045 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc, 1046 struct scatterlist *dst, 1047 struct scatterlist *src, 1048 unsigned int nbytes) 1049 { 1050 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 1051 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 1052 } 1053 1054 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc, 1055 struct scatterlist *dst, 1056 struct scatterlist *src, 1057 unsigned int nbytes) 1058 { 1059 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 1060 } 1061 1062 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm, 1063 const u8 *src, unsigned int len) 1064 { 1065 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len); 1066 } 1067 1068 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm, 1069 u8 *dst, unsigned int len) 1070 { 1071 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); 1072 } 1073 1074 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 1075 { 1076 return (struct crypto_cipher *)tfm; 1077 } 1078 1079 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm) 1080 { 1081 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); 1082 return __crypto_cipher_cast(tfm); 1083 } 1084 1085 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 1086 u32 type, u32 mask) 1087 { 1088 type &= ~CRYPTO_ALG_TYPE_MASK; 1089 type |= CRYPTO_ALG_TYPE_CIPHER; 1090 mask |= CRYPTO_ALG_TYPE_MASK; 1091 1092 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 1093 } 1094 1095 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 1096 { 1097 return &tfm->base; 1098 } 1099 1100 static inline void crypto_free_cipher(struct crypto_cipher *tfm) 1101 { 1102 crypto_free_tfm(crypto_cipher_tfm(tfm)); 1103 } 1104 1105 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 1106 { 1107 type &= ~CRYPTO_ALG_TYPE_MASK; 1108 type |= CRYPTO_ALG_TYPE_CIPHER; 1109 mask |= CRYPTO_ALG_TYPE_MASK; 1110 1111 return crypto_has_alg(alg_name, type, mask); 1112 } 1113 1114 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm) 1115 { 1116 return &crypto_cipher_tfm(tfm)->crt_cipher; 1117 } 1118 1119 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 1120 { 1121 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 1122 } 1123 1124 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 1125 { 1126 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 1127 } 1128 1129 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 1130 { 1131 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 1132 } 1133 1134 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 1135 u32 flags) 1136 { 1137 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 1138 } 1139 1140 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 1141 u32 flags) 1142 { 1143 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 1144 } 1145 1146 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm, 1147 const u8 *key, unsigned int keylen) 1148 { 1149 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm), 1150 key, keylen); 1151 } 1152 1153 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 1154 u8 *dst, const u8 *src) 1155 { 1156 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm), 1157 dst, src); 1158 } 1159 1160 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 1161 u8 *dst, const u8 *src) 1162 { 1163 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm), 1164 dst, src); 1165 } 1166 1167 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm) 1168 { 1169 return (struct crypto_hash *)tfm; 1170 } 1171 1172 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm) 1173 { 1174 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) & 1175 CRYPTO_ALG_TYPE_HASH_MASK); 1176 return __crypto_hash_cast(tfm); 1177 } 1178 1179 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, 1180 u32 type, u32 mask) 1181 { 1182 type &= ~CRYPTO_ALG_TYPE_MASK; 1183 mask &= ~CRYPTO_ALG_TYPE_MASK; 1184 type |= CRYPTO_ALG_TYPE_HASH; 1185 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1186 1187 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask)); 1188 } 1189 1190 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm) 1191 { 1192 return &tfm->base; 1193 } 1194 1195 static inline void crypto_free_hash(struct crypto_hash *tfm) 1196 { 1197 crypto_free_tfm(crypto_hash_tfm(tfm)); 1198 } 1199 1200 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) 1201 { 1202 type &= ~CRYPTO_ALG_TYPE_MASK; 1203 mask &= ~CRYPTO_ALG_TYPE_MASK; 1204 type |= CRYPTO_ALG_TYPE_HASH; 1205 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1206 1207 return crypto_has_alg(alg_name, type, mask); 1208 } 1209 1210 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm) 1211 { 1212 return &crypto_hash_tfm(tfm)->crt_hash; 1213 } 1214 1215 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm) 1216 { 1217 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm)); 1218 } 1219 1220 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm) 1221 { 1222 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm)); 1223 } 1224 1225 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm) 1226 { 1227 return crypto_hash_crt(tfm)->digestsize; 1228 } 1229 1230 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm) 1231 { 1232 return crypto_tfm_get_flags(crypto_hash_tfm(tfm)); 1233 } 1234 1235 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags) 1236 { 1237 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags); 1238 } 1239 1240 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags) 1241 { 1242 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags); 1243 } 1244 1245 static inline int crypto_hash_init(struct hash_desc *desc) 1246 { 1247 return crypto_hash_crt(desc->tfm)->init(desc); 1248 } 1249 1250 static inline int crypto_hash_update(struct hash_desc *desc, 1251 struct scatterlist *sg, 1252 unsigned int nbytes) 1253 { 1254 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes); 1255 } 1256 1257 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out) 1258 { 1259 return crypto_hash_crt(desc->tfm)->final(desc, out); 1260 } 1261 1262 static inline int crypto_hash_digest(struct hash_desc *desc, 1263 struct scatterlist *sg, 1264 unsigned int nbytes, u8 *out) 1265 { 1266 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out); 1267 } 1268 1269 static inline int crypto_hash_setkey(struct crypto_hash *hash, 1270 const u8 *key, unsigned int keylen) 1271 { 1272 return crypto_hash_crt(hash)->setkey(hash, key, keylen); 1273 } 1274 1275 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 1276 { 1277 return (struct crypto_comp *)tfm; 1278 } 1279 1280 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm) 1281 { 1282 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) & 1283 CRYPTO_ALG_TYPE_MASK); 1284 return __crypto_comp_cast(tfm); 1285 } 1286 1287 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 1288 u32 type, u32 mask) 1289 { 1290 type &= ~CRYPTO_ALG_TYPE_MASK; 1291 type |= CRYPTO_ALG_TYPE_COMPRESS; 1292 mask |= CRYPTO_ALG_TYPE_MASK; 1293 1294 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 1295 } 1296 1297 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 1298 { 1299 return &tfm->base; 1300 } 1301 1302 static inline void crypto_free_comp(struct crypto_comp *tfm) 1303 { 1304 crypto_free_tfm(crypto_comp_tfm(tfm)); 1305 } 1306 1307 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 1308 { 1309 type &= ~CRYPTO_ALG_TYPE_MASK; 1310 type |= CRYPTO_ALG_TYPE_COMPRESS; 1311 mask |= CRYPTO_ALG_TYPE_MASK; 1312 1313 return crypto_has_alg(alg_name, type, mask); 1314 } 1315 1316 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 1317 { 1318 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 1319 } 1320 1321 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm) 1322 { 1323 return &crypto_comp_tfm(tfm)->crt_compress; 1324 } 1325 1326 static inline int crypto_comp_compress(struct crypto_comp *tfm, 1327 const u8 *src, unsigned int slen, 1328 u8 *dst, unsigned int *dlen) 1329 { 1330 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm), 1331 src, slen, dst, dlen); 1332 } 1333 1334 static inline int crypto_comp_decompress(struct crypto_comp *tfm, 1335 const u8 *src, unsigned int slen, 1336 u8 *dst, unsigned int *dlen) 1337 { 1338 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm), 1339 src, slen, dst, dlen); 1340 } 1341 1342 #endif /* _LINUX_CRYPTO_H */ 1343 1344