1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <[email protected]> 5 * Copyright (c) 2002 David S. Miller ([email protected]) 6 * Copyright (c) 2005 Herbert Xu <[email protected]> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]> 9 * and Nettle, by Niels Möller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 #ifndef _LINUX_CRYPTO_H 18 #define _LINUX_CRYPTO_H 19 20 #include <asm/atomic.h> 21 #include <linux/module.h> 22 #include <linux/kernel.h> 23 #include <linux/list.h> 24 #include <linux/slab.h> 25 #include <linux/string.h> 26 #include <linux/uaccess.h> 27 28 /* 29 * Algorithm masks and types. 30 */ 31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 33 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002 34 #define CRYPTO_ALG_TYPE_AEAD 0x00000003 35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 36 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 37 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 38 #define CRYPTO_ALG_TYPE_DIGEST 0x00000008 39 #define CRYPTO_ALG_TYPE_HASH 0x00000008 40 #define CRYPTO_ALG_TYPE_SHASH 0x00000009 41 #define CRYPTO_ALG_TYPE_AHASH 0x0000000a 42 #define CRYPTO_ALG_TYPE_RNG 0x0000000c 43 #define CRYPTO_ALG_TYPE_PCOMPRESS 0x0000000f 44 45 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e 46 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c 47 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c 48 49 #define CRYPTO_ALG_LARVAL 0x00000010 50 #define CRYPTO_ALG_DEAD 0x00000020 51 #define CRYPTO_ALG_DYING 0x00000040 52 #define CRYPTO_ALG_ASYNC 0x00000080 53 54 /* 55 * Set this bit if and only if the algorithm requires another algorithm of 56 * the same type to handle corner cases. 57 */ 58 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 59 60 /* 61 * This bit is set for symmetric key ciphers that have already been wrapped 62 * with a generic IV generator to prevent them from being wrapped again. 63 */ 64 #define CRYPTO_ALG_GENIV 0x00000200 65 66 /* 67 * Set if the algorithm has passed automated run-time testing. Note that 68 * if there is no run-time testing for a given algorithm it is considered 69 * to have passed. 70 */ 71 72 #define CRYPTO_ALG_TESTED 0x00000400 73 74 /* 75 * Transform masks and values (for crt_flags). 76 */ 77 #define CRYPTO_TFM_REQ_MASK 0x000fff00 78 #define CRYPTO_TFM_RES_MASK 0xfff00000 79 80 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 81 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 82 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 83 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 84 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000 85 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000 86 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000 87 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000 88 89 /* 90 * Miscellaneous stuff. 91 */ 92 #define CRYPTO_MAX_ALG_NAME 64 93 94 /* 95 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 96 * declaration) is used to ensure that the crypto_tfm context structure is 97 * aligned correctly for the given architecture so that there are no alignment 98 * faults for C data types. In particular, this is required on platforms such 99 * as arm where pointers are 32-bit aligned but there are data types such as 100 * u64 which require 64-bit alignment. 101 */ 102 #if defined(ARCH_KMALLOC_MINALIGN) 103 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 104 #elif defined(ARCH_SLAB_MINALIGN) 105 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN 106 #else 107 #define CRYPTO_MINALIGN __alignof__(unsigned long long) 108 #endif 109 110 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 111 112 struct scatterlist; 113 struct crypto_ablkcipher; 114 struct crypto_async_request; 115 struct crypto_aead; 116 struct crypto_blkcipher; 117 struct crypto_hash; 118 struct crypto_rng; 119 struct crypto_tfm; 120 struct crypto_type; 121 struct aead_givcrypt_request; 122 struct skcipher_givcrypt_request; 123 124 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 125 126 struct crypto_async_request { 127 struct list_head list; 128 crypto_completion_t complete; 129 void *data; 130 struct crypto_tfm *tfm; 131 132 u32 flags; 133 }; 134 135 struct ablkcipher_request { 136 struct crypto_async_request base; 137 138 unsigned int nbytes; 139 140 void *info; 141 142 struct scatterlist *src; 143 struct scatterlist *dst; 144 145 void *__ctx[] CRYPTO_MINALIGN_ATTR; 146 }; 147 148 /** 149 * struct aead_request - AEAD request 150 * @base: Common attributes for async crypto requests 151 * @assoclen: Length in bytes of associated data for authentication 152 * @cryptlen: Length of data to be encrypted or decrypted 153 * @iv: Initialisation vector 154 * @assoc: Associated data 155 * @src: Source data 156 * @dst: Destination data 157 * @__ctx: Start of private context data 158 */ 159 struct aead_request { 160 struct crypto_async_request base; 161 162 unsigned int assoclen; 163 unsigned int cryptlen; 164 165 u8 *iv; 166 167 struct scatterlist *assoc; 168 struct scatterlist *src; 169 struct scatterlist *dst; 170 171 void *__ctx[] CRYPTO_MINALIGN_ATTR; 172 }; 173 174 struct blkcipher_desc { 175 struct crypto_blkcipher *tfm; 176 void *info; 177 u32 flags; 178 }; 179 180 struct cipher_desc { 181 struct crypto_tfm *tfm; 182 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 183 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, 184 const u8 *src, unsigned int nbytes); 185 void *info; 186 }; 187 188 struct hash_desc { 189 struct crypto_hash *tfm; 190 u32 flags; 191 }; 192 193 /* 194 * Algorithms: modular crypto algorithm implementations, managed 195 * via crypto_register_alg() and crypto_unregister_alg(). 196 */ 197 struct ablkcipher_alg { 198 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 199 unsigned int keylen); 200 int (*encrypt)(struct ablkcipher_request *req); 201 int (*decrypt)(struct ablkcipher_request *req); 202 int (*givencrypt)(struct skcipher_givcrypt_request *req); 203 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 204 205 const char *geniv; 206 207 unsigned int min_keysize; 208 unsigned int max_keysize; 209 unsigned int ivsize; 210 }; 211 212 struct aead_alg { 213 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 214 unsigned int keylen); 215 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize); 216 int (*encrypt)(struct aead_request *req); 217 int (*decrypt)(struct aead_request *req); 218 int (*givencrypt)(struct aead_givcrypt_request *req); 219 int (*givdecrypt)(struct aead_givcrypt_request *req); 220 221 const char *geniv; 222 223 unsigned int ivsize; 224 unsigned int maxauthsize; 225 }; 226 227 struct blkcipher_alg { 228 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 229 unsigned int keylen); 230 int (*encrypt)(struct blkcipher_desc *desc, 231 struct scatterlist *dst, struct scatterlist *src, 232 unsigned int nbytes); 233 int (*decrypt)(struct blkcipher_desc *desc, 234 struct scatterlist *dst, struct scatterlist *src, 235 unsigned int nbytes); 236 237 const char *geniv; 238 239 unsigned int min_keysize; 240 unsigned int max_keysize; 241 unsigned int ivsize; 242 }; 243 244 struct cipher_alg { 245 unsigned int cia_min_keysize; 246 unsigned int cia_max_keysize; 247 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 248 unsigned int keylen); 249 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 250 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 251 }; 252 253 struct digest_alg { 254 unsigned int dia_digestsize; 255 void (*dia_init)(struct crypto_tfm *tfm); 256 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data, 257 unsigned int len); 258 void (*dia_final)(struct crypto_tfm *tfm, u8 *out); 259 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key, 260 unsigned int keylen); 261 }; 262 263 struct hash_alg { 264 int (*init)(struct hash_desc *desc); 265 int (*update)(struct hash_desc *desc, struct scatterlist *sg, 266 unsigned int nbytes); 267 int (*final)(struct hash_desc *desc, u8 *out); 268 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 269 unsigned int nbytes, u8 *out); 270 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 271 unsigned int keylen); 272 273 unsigned int digestsize; 274 }; 275 276 struct compress_alg { 277 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 278 unsigned int slen, u8 *dst, unsigned int *dlen); 279 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 280 unsigned int slen, u8 *dst, unsigned int *dlen); 281 }; 282 283 struct rng_alg { 284 int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata, 285 unsigned int dlen); 286 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 287 288 unsigned int seedsize; 289 }; 290 291 292 #define cra_ablkcipher cra_u.ablkcipher 293 #define cra_aead cra_u.aead 294 #define cra_blkcipher cra_u.blkcipher 295 #define cra_cipher cra_u.cipher 296 #define cra_digest cra_u.digest 297 #define cra_hash cra_u.hash 298 #define cra_compress cra_u.compress 299 #define cra_rng cra_u.rng 300 301 struct crypto_alg { 302 struct list_head cra_list; 303 struct list_head cra_users; 304 305 u32 cra_flags; 306 unsigned int cra_blocksize; 307 unsigned int cra_ctxsize; 308 unsigned int cra_alignmask; 309 310 int cra_priority; 311 atomic_t cra_refcnt; 312 313 char cra_name[CRYPTO_MAX_ALG_NAME]; 314 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 315 316 const struct crypto_type *cra_type; 317 318 union { 319 struct ablkcipher_alg ablkcipher; 320 struct aead_alg aead; 321 struct blkcipher_alg blkcipher; 322 struct cipher_alg cipher; 323 struct digest_alg digest; 324 struct hash_alg hash; 325 struct compress_alg compress; 326 struct rng_alg rng; 327 } cra_u; 328 329 int (*cra_init)(struct crypto_tfm *tfm); 330 void (*cra_exit)(struct crypto_tfm *tfm); 331 void (*cra_destroy)(struct crypto_alg *alg); 332 333 struct module *cra_module; 334 }; 335 336 /* 337 * Algorithm registration interface. 338 */ 339 int crypto_register_alg(struct crypto_alg *alg); 340 int crypto_unregister_alg(struct crypto_alg *alg); 341 342 /* 343 * Algorithm query interface. 344 */ 345 int crypto_has_alg(const char *name, u32 type, u32 mask); 346 347 /* 348 * Transforms: user-instantiated objects which encapsulate algorithms 349 * and core processing logic. Managed via crypto_alloc_*() and 350 * crypto_free_*(), as well as the various helpers below. 351 */ 352 353 struct ablkcipher_tfm { 354 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 355 unsigned int keylen); 356 int (*encrypt)(struct ablkcipher_request *req); 357 int (*decrypt)(struct ablkcipher_request *req); 358 int (*givencrypt)(struct skcipher_givcrypt_request *req); 359 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 360 361 struct crypto_ablkcipher *base; 362 363 unsigned int ivsize; 364 unsigned int reqsize; 365 }; 366 367 struct aead_tfm { 368 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 369 unsigned int keylen); 370 int (*encrypt)(struct aead_request *req); 371 int (*decrypt)(struct aead_request *req); 372 int (*givencrypt)(struct aead_givcrypt_request *req); 373 int (*givdecrypt)(struct aead_givcrypt_request *req); 374 375 struct crypto_aead *base; 376 377 unsigned int ivsize; 378 unsigned int authsize; 379 unsigned int reqsize; 380 }; 381 382 struct blkcipher_tfm { 383 void *iv; 384 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 385 unsigned int keylen); 386 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 387 struct scatterlist *src, unsigned int nbytes); 388 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 389 struct scatterlist *src, unsigned int nbytes); 390 }; 391 392 struct cipher_tfm { 393 int (*cit_setkey)(struct crypto_tfm *tfm, 394 const u8 *key, unsigned int keylen); 395 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 396 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 397 }; 398 399 struct hash_tfm { 400 int (*init)(struct hash_desc *desc); 401 int (*update)(struct hash_desc *desc, 402 struct scatterlist *sg, unsigned int nsg); 403 int (*final)(struct hash_desc *desc, u8 *out); 404 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 405 unsigned int nsg, u8 *out); 406 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 407 unsigned int keylen); 408 unsigned int digestsize; 409 }; 410 411 struct compress_tfm { 412 int (*cot_compress)(struct crypto_tfm *tfm, 413 const u8 *src, unsigned int slen, 414 u8 *dst, unsigned int *dlen); 415 int (*cot_decompress)(struct crypto_tfm *tfm, 416 const u8 *src, unsigned int slen, 417 u8 *dst, unsigned int *dlen); 418 }; 419 420 struct rng_tfm { 421 int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata, 422 unsigned int dlen); 423 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 424 }; 425 426 #define crt_ablkcipher crt_u.ablkcipher 427 #define crt_aead crt_u.aead 428 #define crt_blkcipher crt_u.blkcipher 429 #define crt_cipher crt_u.cipher 430 #define crt_hash crt_u.hash 431 #define crt_compress crt_u.compress 432 #define crt_rng crt_u.rng 433 434 struct crypto_tfm { 435 436 u32 crt_flags; 437 438 union { 439 struct ablkcipher_tfm ablkcipher; 440 struct aead_tfm aead; 441 struct blkcipher_tfm blkcipher; 442 struct cipher_tfm cipher; 443 struct hash_tfm hash; 444 struct compress_tfm compress; 445 struct rng_tfm rng; 446 } crt_u; 447 448 void (*exit)(struct crypto_tfm *tfm); 449 450 struct crypto_alg *__crt_alg; 451 452 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 453 }; 454 455 struct crypto_ablkcipher { 456 struct crypto_tfm base; 457 }; 458 459 struct crypto_aead { 460 struct crypto_tfm base; 461 }; 462 463 struct crypto_blkcipher { 464 struct crypto_tfm base; 465 }; 466 467 struct crypto_cipher { 468 struct crypto_tfm base; 469 }; 470 471 struct crypto_comp { 472 struct crypto_tfm base; 473 }; 474 475 struct crypto_hash { 476 struct crypto_tfm base; 477 }; 478 479 struct crypto_rng { 480 struct crypto_tfm base; 481 }; 482 483 enum { 484 CRYPTOA_UNSPEC, 485 CRYPTOA_ALG, 486 CRYPTOA_TYPE, 487 CRYPTOA_U32, 488 __CRYPTOA_MAX, 489 }; 490 491 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1) 492 493 /* Maximum number of (rtattr) parameters for each template. */ 494 #define CRYPTO_MAX_ATTRS 32 495 496 struct crypto_attr_alg { 497 char name[CRYPTO_MAX_ALG_NAME]; 498 }; 499 500 struct crypto_attr_type { 501 u32 type; 502 u32 mask; 503 }; 504 505 struct crypto_attr_u32 { 506 u32 num; 507 }; 508 509 /* 510 * Transform user interface. 511 */ 512 513 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 514 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm); 515 516 static inline void crypto_free_tfm(struct crypto_tfm *tfm) 517 { 518 return crypto_destroy_tfm(tfm, tfm); 519 } 520 521 int alg_test(const char *driver, const char *alg, u32 type, u32 mask); 522 523 /* 524 * Transform helpers which query the underlying algorithm. 525 */ 526 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 527 { 528 return tfm->__crt_alg->cra_name; 529 } 530 531 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 532 { 533 return tfm->__crt_alg->cra_driver_name; 534 } 535 536 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) 537 { 538 return tfm->__crt_alg->cra_priority; 539 } 540 541 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm) 542 { 543 return module_name(tfm->__crt_alg->cra_module); 544 } 545 546 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) 547 { 548 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; 549 } 550 551 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 552 { 553 return tfm->__crt_alg->cra_blocksize; 554 } 555 556 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 557 { 558 return tfm->__crt_alg->cra_alignmask; 559 } 560 561 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 562 { 563 return tfm->crt_flags; 564 } 565 566 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 567 { 568 tfm->crt_flags |= flags; 569 } 570 571 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 572 { 573 tfm->crt_flags &= ~flags; 574 } 575 576 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 577 { 578 return tfm->__crt_ctx; 579 } 580 581 static inline unsigned int crypto_tfm_ctx_alignment(void) 582 { 583 struct crypto_tfm *tfm; 584 return __alignof__(tfm->__crt_ctx); 585 } 586 587 /* 588 * API wrappers. 589 */ 590 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( 591 struct crypto_tfm *tfm) 592 { 593 return (struct crypto_ablkcipher *)tfm; 594 } 595 596 static inline u32 crypto_skcipher_type(u32 type) 597 { 598 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 599 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 600 return type; 601 } 602 603 static inline u32 crypto_skcipher_mask(u32 mask) 604 { 605 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 606 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; 607 return mask; 608 } 609 610 struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, 611 u32 type, u32 mask); 612 613 static inline struct crypto_tfm *crypto_ablkcipher_tfm( 614 struct crypto_ablkcipher *tfm) 615 { 616 return &tfm->base; 617 } 618 619 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) 620 { 621 crypto_free_tfm(crypto_ablkcipher_tfm(tfm)); 622 } 623 624 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, 625 u32 mask) 626 { 627 return crypto_has_alg(alg_name, crypto_skcipher_type(type), 628 crypto_skcipher_mask(mask)); 629 } 630 631 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( 632 struct crypto_ablkcipher *tfm) 633 { 634 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher; 635 } 636 637 static inline unsigned int crypto_ablkcipher_ivsize( 638 struct crypto_ablkcipher *tfm) 639 { 640 return crypto_ablkcipher_crt(tfm)->ivsize; 641 } 642 643 static inline unsigned int crypto_ablkcipher_blocksize( 644 struct crypto_ablkcipher *tfm) 645 { 646 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm)); 647 } 648 649 static inline unsigned int crypto_ablkcipher_alignmask( 650 struct crypto_ablkcipher *tfm) 651 { 652 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm)); 653 } 654 655 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm) 656 { 657 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm)); 658 } 659 660 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm, 661 u32 flags) 662 { 663 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags); 664 } 665 666 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, 667 u32 flags) 668 { 669 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags); 670 } 671 672 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 673 const u8 *key, unsigned int keylen) 674 { 675 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm); 676 677 return crt->setkey(crt->base, key, keylen); 678 } 679 680 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( 681 struct ablkcipher_request *req) 682 { 683 return __crypto_ablkcipher_cast(req->base.tfm); 684 } 685 686 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req) 687 { 688 struct ablkcipher_tfm *crt = 689 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 690 return crt->encrypt(req); 691 } 692 693 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) 694 { 695 struct ablkcipher_tfm *crt = 696 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 697 return crt->decrypt(req); 698 } 699 700 static inline unsigned int crypto_ablkcipher_reqsize( 701 struct crypto_ablkcipher *tfm) 702 { 703 return crypto_ablkcipher_crt(tfm)->reqsize; 704 } 705 706 static inline void ablkcipher_request_set_tfm( 707 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) 708 { 709 req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base); 710 } 711 712 static inline struct ablkcipher_request *ablkcipher_request_cast( 713 struct crypto_async_request *req) 714 { 715 return container_of(req, struct ablkcipher_request, base); 716 } 717 718 static inline struct ablkcipher_request *ablkcipher_request_alloc( 719 struct crypto_ablkcipher *tfm, gfp_t gfp) 720 { 721 struct ablkcipher_request *req; 722 723 req = kmalloc(sizeof(struct ablkcipher_request) + 724 crypto_ablkcipher_reqsize(tfm), gfp); 725 726 if (likely(req)) 727 ablkcipher_request_set_tfm(req, tfm); 728 729 return req; 730 } 731 732 static inline void ablkcipher_request_free(struct ablkcipher_request *req) 733 { 734 kzfree(req); 735 } 736 737 static inline void ablkcipher_request_set_callback( 738 struct ablkcipher_request *req, 739 u32 flags, crypto_completion_t complete, void *data) 740 { 741 req->base.complete = complete; 742 req->base.data = data; 743 req->base.flags = flags; 744 } 745 746 static inline void ablkcipher_request_set_crypt( 747 struct ablkcipher_request *req, 748 struct scatterlist *src, struct scatterlist *dst, 749 unsigned int nbytes, void *iv) 750 { 751 req->src = src; 752 req->dst = dst; 753 req->nbytes = nbytes; 754 req->info = iv; 755 } 756 757 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) 758 { 759 return (struct crypto_aead *)tfm; 760 } 761 762 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask); 763 764 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) 765 { 766 return &tfm->base; 767 } 768 769 static inline void crypto_free_aead(struct crypto_aead *tfm) 770 { 771 crypto_free_tfm(crypto_aead_tfm(tfm)); 772 } 773 774 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm) 775 { 776 return &crypto_aead_tfm(tfm)->crt_aead; 777 } 778 779 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm) 780 { 781 return crypto_aead_crt(tfm)->ivsize; 782 } 783 784 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm) 785 { 786 return crypto_aead_crt(tfm)->authsize; 787 } 788 789 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm) 790 { 791 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm)); 792 } 793 794 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm) 795 { 796 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm)); 797 } 798 799 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm) 800 { 801 return crypto_tfm_get_flags(crypto_aead_tfm(tfm)); 802 } 803 804 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags) 805 { 806 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags); 807 } 808 809 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) 810 { 811 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags); 812 } 813 814 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, 815 unsigned int keylen) 816 { 817 struct aead_tfm *crt = crypto_aead_crt(tfm); 818 819 return crt->setkey(crt->base, key, keylen); 820 } 821 822 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize); 823 824 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) 825 { 826 return __crypto_aead_cast(req->base.tfm); 827 } 828 829 static inline int crypto_aead_encrypt(struct aead_request *req) 830 { 831 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req); 832 } 833 834 static inline int crypto_aead_decrypt(struct aead_request *req) 835 { 836 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req); 837 } 838 839 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) 840 { 841 return crypto_aead_crt(tfm)->reqsize; 842 } 843 844 static inline void aead_request_set_tfm(struct aead_request *req, 845 struct crypto_aead *tfm) 846 { 847 req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base); 848 } 849 850 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, 851 gfp_t gfp) 852 { 853 struct aead_request *req; 854 855 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp); 856 857 if (likely(req)) 858 aead_request_set_tfm(req, tfm); 859 860 return req; 861 } 862 863 static inline void aead_request_free(struct aead_request *req) 864 { 865 kzfree(req); 866 } 867 868 static inline void aead_request_set_callback(struct aead_request *req, 869 u32 flags, 870 crypto_completion_t complete, 871 void *data) 872 { 873 req->base.complete = complete; 874 req->base.data = data; 875 req->base.flags = flags; 876 } 877 878 static inline void aead_request_set_crypt(struct aead_request *req, 879 struct scatterlist *src, 880 struct scatterlist *dst, 881 unsigned int cryptlen, u8 *iv) 882 { 883 req->src = src; 884 req->dst = dst; 885 req->cryptlen = cryptlen; 886 req->iv = iv; 887 } 888 889 static inline void aead_request_set_assoc(struct aead_request *req, 890 struct scatterlist *assoc, 891 unsigned int assoclen) 892 { 893 req->assoc = assoc; 894 req->assoclen = assoclen; 895 } 896 897 static inline struct crypto_blkcipher *__crypto_blkcipher_cast( 898 struct crypto_tfm *tfm) 899 { 900 return (struct crypto_blkcipher *)tfm; 901 } 902 903 static inline struct crypto_blkcipher *crypto_blkcipher_cast( 904 struct crypto_tfm *tfm) 905 { 906 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER); 907 return __crypto_blkcipher_cast(tfm); 908 } 909 910 static inline struct crypto_blkcipher *crypto_alloc_blkcipher( 911 const char *alg_name, u32 type, u32 mask) 912 { 913 type &= ~CRYPTO_ALG_TYPE_MASK; 914 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 915 mask |= CRYPTO_ALG_TYPE_MASK; 916 917 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); 918 } 919 920 static inline struct crypto_tfm *crypto_blkcipher_tfm( 921 struct crypto_blkcipher *tfm) 922 { 923 return &tfm->base; 924 } 925 926 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) 927 { 928 crypto_free_tfm(crypto_blkcipher_tfm(tfm)); 929 } 930 931 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) 932 { 933 type &= ~CRYPTO_ALG_TYPE_MASK; 934 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 935 mask |= CRYPTO_ALG_TYPE_MASK; 936 937 return crypto_has_alg(alg_name, type, mask); 938 } 939 940 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm) 941 { 942 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm)); 943 } 944 945 static inline struct blkcipher_tfm *crypto_blkcipher_crt( 946 struct crypto_blkcipher *tfm) 947 { 948 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher; 949 } 950 951 static inline struct blkcipher_alg *crypto_blkcipher_alg( 952 struct crypto_blkcipher *tfm) 953 { 954 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher; 955 } 956 957 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm) 958 { 959 return crypto_blkcipher_alg(tfm)->ivsize; 960 } 961 962 static inline unsigned int crypto_blkcipher_blocksize( 963 struct crypto_blkcipher *tfm) 964 { 965 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm)); 966 } 967 968 static inline unsigned int crypto_blkcipher_alignmask( 969 struct crypto_blkcipher *tfm) 970 { 971 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm)); 972 } 973 974 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm) 975 { 976 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm)); 977 } 978 979 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm, 980 u32 flags) 981 { 982 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags); 983 } 984 985 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm, 986 u32 flags) 987 { 988 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags); 989 } 990 991 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm, 992 const u8 *key, unsigned int keylen) 993 { 994 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm), 995 key, keylen); 996 } 997 998 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc, 999 struct scatterlist *dst, 1000 struct scatterlist *src, 1001 unsigned int nbytes) 1002 { 1003 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 1004 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 1005 } 1006 1007 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc, 1008 struct scatterlist *dst, 1009 struct scatterlist *src, 1010 unsigned int nbytes) 1011 { 1012 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 1013 } 1014 1015 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc, 1016 struct scatterlist *dst, 1017 struct scatterlist *src, 1018 unsigned int nbytes) 1019 { 1020 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 1021 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 1022 } 1023 1024 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc, 1025 struct scatterlist *dst, 1026 struct scatterlist *src, 1027 unsigned int nbytes) 1028 { 1029 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 1030 } 1031 1032 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm, 1033 const u8 *src, unsigned int len) 1034 { 1035 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len); 1036 } 1037 1038 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm, 1039 u8 *dst, unsigned int len) 1040 { 1041 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); 1042 } 1043 1044 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 1045 { 1046 return (struct crypto_cipher *)tfm; 1047 } 1048 1049 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm) 1050 { 1051 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); 1052 return __crypto_cipher_cast(tfm); 1053 } 1054 1055 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 1056 u32 type, u32 mask) 1057 { 1058 type &= ~CRYPTO_ALG_TYPE_MASK; 1059 type |= CRYPTO_ALG_TYPE_CIPHER; 1060 mask |= CRYPTO_ALG_TYPE_MASK; 1061 1062 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 1063 } 1064 1065 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 1066 { 1067 return &tfm->base; 1068 } 1069 1070 static inline void crypto_free_cipher(struct crypto_cipher *tfm) 1071 { 1072 crypto_free_tfm(crypto_cipher_tfm(tfm)); 1073 } 1074 1075 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 1076 { 1077 type &= ~CRYPTO_ALG_TYPE_MASK; 1078 type |= CRYPTO_ALG_TYPE_CIPHER; 1079 mask |= CRYPTO_ALG_TYPE_MASK; 1080 1081 return crypto_has_alg(alg_name, type, mask); 1082 } 1083 1084 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm) 1085 { 1086 return &crypto_cipher_tfm(tfm)->crt_cipher; 1087 } 1088 1089 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 1090 { 1091 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 1092 } 1093 1094 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 1095 { 1096 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 1097 } 1098 1099 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 1100 { 1101 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 1102 } 1103 1104 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 1105 u32 flags) 1106 { 1107 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 1108 } 1109 1110 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 1111 u32 flags) 1112 { 1113 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 1114 } 1115 1116 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm, 1117 const u8 *key, unsigned int keylen) 1118 { 1119 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm), 1120 key, keylen); 1121 } 1122 1123 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 1124 u8 *dst, const u8 *src) 1125 { 1126 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm), 1127 dst, src); 1128 } 1129 1130 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 1131 u8 *dst, const u8 *src) 1132 { 1133 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm), 1134 dst, src); 1135 } 1136 1137 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm) 1138 { 1139 return (struct crypto_hash *)tfm; 1140 } 1141 1142 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm) 1143 { 1144 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) & 1145 CRYPTO_ALG_TYPE_HASH_MASK); 1146 return __crypto_hash_cast(tfm); 1147 } 1148 1149 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, 1150 u32 type, u32 mask) 1151 { 1152 type &= ~CRYPTO_ALG_TYPE_MASK; 1153 mask &= ~CRYPTO_ALG_TYPE_MASK; 1154 type |= CRYPTO_ALG_TYPE_HASH; 1155 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1156 1157 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask)); 1158 } 1159 1160 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm) 1161 { 1162 return &tfm->base; 1163 } 1164 1165 static inline void crypto_free_hash(struct crypto_hash *tfm) 1166 { 1167 crypto_free_tfm(crypto_hash_tfm(tfm)); 1168 } 1169 1170 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) 1171 { 1172 type &= ~CRYPTO_ALG_TYPE_MASK; 1173 mask &= ~CRYPTO_ALG_TYPE_MASK; 1174 type |= CRYPTO_ALG_TYPE_HASH; 1175 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1176 1177 return crypto_has_alg(alg_name, type, mask); 1178 } 1179 1180 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm) 1181 { 1182 return &crypto_hash_tfm(tfm)->crt_hash; 1183 } 1184 1185 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm) 1186 { 1187 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm)); 1188 } 1189 1190 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm) 1191 { 1192 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm)); 1193 } 1194 1195 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm) 1196 { 1197 return crypto_hash_crt(tfm)->digestsize; 1198 } 1199 1200 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm) 1201 { 1202 return crypto_tfm_get_flags(crypto_hash_tfm(tfm)); 1203 } 1204 1205 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags) 1206 { 1207 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags); 1208 } 1209 1210 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags) 1211 { 1212 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags); 1213 } 1214 1215 static inline int crypto_hash_init(struct hash_desc *desc) 1216 { 1217 return crypto_hash_crt(desc->tfm)->init(desc); 1218 } 1219 1220 static inline int crypto_hash_update(struct hash_desc *desc, 1221 struct scatterlist *sg, 1222 unsigned int nbytes) 1223 { 1224 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes); 1225 } 1226 1227 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out) 1228 { 1229 return crypto_hash_crt(desc->tfm)->final(desc, out); 1230 } 1231 1232 static inline int crypto_hash_digest(struct hash_desc *desc, 1233 struct scatterlist *sg, 1234 unsigned int nbytes, u8 *out) 1235 { 1236 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out); 1237 } 1238 1239 static inline int crypto_hash_setkey(struct crypto_hash *hash, 1240 const u8 *key, unsigned int keylen) 1241 { 1242 return crypto_hash_crt(hash)->setkey(hash, key, keylen); 1243 } 1244 1245 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 1246 { 1247 return (struct crypto_comp *)tfm; 1248 } 1249 1250 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm) 1251 { 1252 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) & 1253 CRYPTO_ALG_TYPE_MASK); 1254 return __crypto_comp_cast(tfm); 1255 } 1256 1257 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 1258 u32 type, u32 mask) 1259 { 1260 type &= ~CRYPTO_ALG_TYPE_MASK; 1261 type |= CRYPTO_ALG_TYPE_COMPRESS; 1262 mask |= CRYPTO_ALG_TYPE_MASK; 1263 1264 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 1265 } 1266 1267 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 1268 { 1269 return &tfm->base; 1270 } 1271 1272 static inline void crypto_free_comp(struct crypto_comp *tfm) 1273 { 1274 crypto_free_tfm(crypto_comp_tfm(tfm)); 1275 } 1276 1277 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 1278 { 1279 type &= ~CRYPTO_ALG_TYPE_MASK; 1280 type |= CRYPTO_ALG_TYPE_COMPRESS; 1281 mask |= CRYPTO_ALG_TYPE_MASK; 1282 1283 return crypto_has_alg(alg_name, type, mask); 1284 } 1285 1286 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 1287 { 1288 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 1289 } 1290 1291 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm) 1292 { 1293 return &crypto_comp_tfm(tfm)->crt_compress; 1294 } 1295 1296 static inline int crypto_comp_compress(struct crypto_comp *tfm, 1297 const u8 *src, unsigned int slen, 1298 u8 *dst, unsigned int *dlen) 1299 { 1300 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm), 1301 src, slen, dst, dlen); 1302 } 1303 1304 static inline int crypto_comp_decompress(struct crypto_comp *tfm, 1305 const u8 *src, unsigned int slen, 1306 u8 *dst, unsigned int *dlen) 1307 { 1308 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm), 1309 src, slen, dst, dlen); 1310 } 1311 1312 #endif /* _LINUX_CRYPTO_H */ 1313 1314