1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <[email protected]> 5 * Copyright (c) 2002 David S. Miller ([email protected]) 6 * Copyright (c) 2005 Herbert Xu <[email protected]> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]> 9 * and Nettle, by Niels M�ller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 #ifndef _LINUX_CRYPTO_H 18 #define _LINUX_CRYPTO_H 19 20 #include <asm/atomic.h> 21 #include <linux/module.h> 22 #include <linux/kernel.h> 23 #include <linux/list.h> 24 #include <linux/slab.h> 25 #include <linux/string.h> 26 #include <linux/uaccess.h> 27 28 /* 29 * Algorithm masks and types. 30 */ 31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002 34 #define CRYPTO_ALG_TYPE_HASH 0x00000003 35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 36 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000005 37 #define CRYPTO_ALG_TYPE_AEAD 0x00000006 38 39 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e 40 41 #define CRYPTO_ALG_LARVAL 0x00000010 42 #define CRYPTO_ALG_DEAD 0x00000020 43 #define CRYPTO_ALG_DYING 0x00000040 44 #define CRYPTO_ALG_ASYNC 0x00000080 45 46 /* 47 * Set this bit if and only if the algorithm requires another algorithm of 48 * the same type to handle corner cases. 49 */ 50 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 51 52 /* 53 * Transform masks and values (for crt_flags). 54 */ 55 #define CRYPTO_TFM_REQ_MASK 0x000fff00 56 #define CRYPTO_TFM_RES_MASK 0xfff00000 57 58 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 59 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 60 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 61 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 62 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000 63 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000 64 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000 65 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000 66 67 /* 68 * Miscellaneous stuff. 69 */ 70 #define CRYPTO_MAX_ALG_NAME 64 71 72 /* 73 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 74 * declaration) is used to ensure that the crypto_tfm context structure is 75 * aligned correctly for the given architecture so that there are no alignment 76 * faults for C data types. In particular, this is required on platforms such 77 * as arm where pointers are 32-bit aligned but there are data types such as 78 * u64 which require 64-bit alignment. 79 */ 80 #if defined(ARCH_KMALLOC_MINALIGN) 81 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 82 #elif defined(ARCH_SLAB_MINALIGN) 83 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN 84 #endif 85 86 #ifdef CRYPTO_MINALIGN 87 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 88 #else 89 #define CRYPTO_MINALIGN_ATTR 90 #endif 91 92 struct scatterlist; 93 struct crypto_ablkcipher; 94 struct crypto_async_request; 95 struct crypto_aead; 96 struct crypto_blkcipher; 97 struct crypto_hash; 98 struct crypto_tfm; 99 struct crypto_type; 100 101 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 102 103 struct crypto_async_request { 104 struct list_head list; 105 crypto_completion_t complete; 106 void *data; 107 struct crypto_tfm *tfm; 108 109 u32 flags; 110 }; 111 112 struct ablkcipher_request { 113 struct crypto_async_request base; 114 115 unsigned int nbytes; 116 117 void *info; 118 119 struct scatterlist *src; 120 struct scatterlist *dst; 121 122 void *__ctx[] CRYPTO_MINALIGN_ATTR; 123 }; 124 125 /** 126 * struct aead_request - AEAD request 127 * @base: Common attributes for async crypto requests 128 * @assoclen: Length in bytes of associated data for authentication 129 * @cryptlen: Length of data to be encrypted or decrypted 130 * @iv: Initialisation vector 131 * @assoc: Associated data 132 * @src: Source data 133 * @dst: Destination data 134 * @__ctx: Start of private context data 135 */ 136 struct aead_request { 137 struct crypto_async_request base; 138 139 unsigned int assoclen; 140 unsigned int cryptlen; 141 142 u8 *iv; 143 144 struct scatterlist *assoc; 145 struct scatterlist *src; 146 struct scatterlist *dst; 147 148 void *__ctx[] CRYPTO_MINALIGN_ATTR; 149 }; 150 151 struct blkcipher_desc { 152 struct crypto_blkcipher *tfm; 153 void *info; 154 u32 flags; 155 }; 156 157 struct cipher_desc { 158 struct crypto_tfm *tfm; 159 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 160 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, 161 const u8 *src, unsigned int nbytes); 162 void *info; 163 }; 164 165 struct hash_desc { 166 struct crypto_hash *tfm; 167 u32 flags; 168 }; 169 170 /* 171 * Algorithms: modular crypto algorithm implementations, managed 172 * via crypto_register_alg() and crypto_unregister_alg(). 173 */ 174 struct ablkcipher_alg { 175 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 176 unsigned int keylen); 177 int (*encrypt)(struct ablkcipher_request *req); 178 int (*decrypt)(struct ablkcipher_request *req); 179 180 unsigned int min_keysize; 181 unsigned int max_keysize; 182 unsigned int ivsize; 183 }; 184 185 struct aead_alg { 186 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 187 unsigned int keylen); 188 int (*encrypt)(struct aead_request *req); 189 int (*decrypt)(struct aead_request *req); 190 191 unsigned int ivsize; 192 unsigned int authsize; 193 }; 194 195 struct blkcipher_alg { 196 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 197 unsigned int keylen); 198 int (*encrypt)(struct blkcipher_desc *desc, 199 struct scatterlist *dst, struct scatterlist *src, 200 unsigned int nbytes); 201 int (*decrypt)(struct blkcipher_desc *desc, 202 struct scatterlist *dst, struct scatterlist *src, 203 unsigned int nbytes); 204 205 unsigned int min_keysize; 206 unsigned int max_keysize; 207 unsigned int ivsize; 208 }; 209 210 struct cipher_alg { 211 unsigned int cia_min_keysize; 212 unsigned int cia_max_keysize; 213 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 214 unsigned int keylen); 215 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 216 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 217 }; 218 219 struct digest_alg { 220 unsigned int dia_digestsize; 221 void (*dia_init)(struct crypto_tfm *tfm); 222 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data, 223 unsigned int len); 224 void (*dia_final)(struct crypto_tfm *tfm, u8 *out); 225 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key, 226 unsigned int keylen); 227 }; 228 229 struct hash_alg { 230 int (*init)(struct hash_desc *desc); 231 int (*update)(struct hash_desc *desc, struct scatterlist *sg, 232 unsigned int nbytes); 233 int (*final)(struct hash_desc *desc, u8 *out); 234 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 235 unsigned int nbytes, u8 *out); 236 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 237 unsigned int keylen); 238 239 unsigned int digestsize; 240 }; 241 242 struct compress_alg { 243 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 244 unsigned int slen, u8 *dst, unsigned int *dlen); 245 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 246 unsigned int slen, u8 *dst, unsigned int *dlen); 247 }; 248 249 #define cra_ablkcipher cra_u.ablkcipher 250 #define cra_aead cra_u.aead 251 #define cra_blkcipher cra_u.blkcipher 252 #define cra_cipher cra_u.cipher 253 #define cra_digest cra_u.digest 254 #define cra_hash cra_u.hash 255 #define cra_compress cra_u.compress 256 257 struct crypto_alg { 258 struct list_head cra_list; 259 struct list_head cra_users; 260 261 u32 cra_flags; 262 unsigned int cra_blocksize; 263 unsigned int cra_ctxsize; 264 unsigned int cra_alignmask; 265 266 int cra_priority; 267 atomic_t cra_refcnt; 268 269 char cra_name[CRYPTO_MAX_ALG_NAME]; 270 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 271 272 const struct crypto_type *cra_type; 273 274 union { 275 struct ablkcipher_alg ablkcipher; 276 struct aead_alg aead; 277 struct blkcipher_alg blkcipher; 278 struct cipher_alg cipher; 279 struct digest_alg digest; 280 struct hash_alg hash; 281 struct compress_alg compress; 282 } cra_u; 283 284 int (*cra_init)(struct crypto_tfm *tfm); 285 void (*cra_exit)(struct crypto_tfm *tfm); 286 void (*cra_destroy)(struct crypto_alg *alg); 287 288 struct module *cra_module; 289 }; 290 291 /* 292 * Algorithm registration interface. 293 */ 294 int crypto_register_alg(struct crypto_alg *alg); 295 int crypto_unregister_alg(struct crypto_alg *alg); 296 297 /* 298 * Algorithm query interface. 299 */ 300 #ifdef CONFIG_CRYPTO 301 int crypto_has_alg(const char *name, u32 type, u32 mask); 302 #else 303 static inline int crypto_has_alg(const char *name, u32 type, u32 mask) 304 { 305 return 0; 306 } 307 #endif 308 309 /* 310 * Transforms: user-instantiated objects which encapsulate algorithms 311 * and core processing logic. Managed via crypto_alloc_*() and 312 * crypto_free_*(), as well as the various helpers below. 313 */ 314 315 struct ablkcipher_tfm { 316 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 317 unsigned int keylen); 318 int (*encrypt)(struct ablkcipher_request *req); 319 int (*decrypt)(struct ablkcipher_request *req); 320 unsigned int ivsize; 321 unsigned int reqsize; 322 }; 323 324 struct aead_tfm { 325 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 326 unsigned int keylen); 327 int (*encrypt)(struct aead_request *req); 328 int (*decrypt)(struct aead_request *req); 329 unsigned int ivsize; 330 unsigned int authsize; 331 unsigned int reqsize; 332 }; 333 334 struct blkcipher_tfm { 335 void *iv; 336 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 337 unsigned int keylen); 338 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 339 struct scatterlist *src, unsigned int nbytes); 340 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 341 struct scatterlist *src, unsigned int nbytes); 342 }; 343 344 struct cipher_tfm { 345 int (*cit_setkey)(struct crypto_tfm *tfm, 346 const u8 *key, unsigned int keylen); 347 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 348 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 349 }; 350 351 struct hash_tfm { 352 int (*init)(struct hash_desc *desc); 353 int (*update)(struct hash_desc *desc, 354 struct scatterlist *sg, unsigned int nsg); 355 int (*final)(struct hash_desc *desc, u8 *out); 356 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 357 unsigned int nsg, u8 *out); 358 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 359 unsigned int keylen); 360 unsigned int digestsize; 361 }; 362 363 struct compress_tfm { 364 int (*cot_compress)(struct crypto_tfm *tfm, 365 const u8 *src, unsigned int slen, 366 u8 *dst, unsigned int *dlen); 367 int (*cot_decompress)(struct crypto_tfm *tfm, 368 const u8 *src, unsigned int slen, 369 u8 *dst, unsigned int *dlen); 370 }; 371 372 #define crt_ablkcipher crt_u.ablkcipher 373 #define crt_aead crt_u.aead 374 #define crt_blkcipher crt_u.blkcipher 375 #define crt_cipher crt_u.cipher 376 #define crt_hash crt_u.hash 377 #define crt_compress crt_u.compress 378 379 struct crypto_tfm { 380 381 u32 crt_flags; 382 383 union { 384 struct ablkcipher_tfm ablkcipher; 385 struct aead_tfm aead; 386 struct blkcipher_tfm blkcipher; 387 struct cipher_tfm cipher; 388 struct hash_tfm hash; 389 struct compress_tfm compress; 390 } crt_u; 391 392 struct crypto_alg *__crt_alg; 393 394 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 395 }; 396 397 struct crypto_ablkcipher { 398 struct crypto_tfm base; 399 }; 400 401 struct crypto_aead { 402 struct crypto_tfm base; 403 }; 404 405 struct crypto_blkcipher { 406 struct crypto_tfm base; 407 }; 408 409 struct crypto_cipher { 410 struct crypto_tfm base; 411 }; 412 413 struct crypto_comp { 414 struct crypto_tfm base; 415 }; 416 417 struct crypto_hash { 418 struct crypto_tfm base; 419 }; 420 421 enum { 422 CRYPTOA_UNSPEC, 423 CRYPTOA_ALG, 424 CRYPTOA_TYPE, 425 CRYPTOA_U32, 426 __CRYPTOA_MAX, 427 }; 428 429 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1) 430 431 /* Maximum number of (rtattr) parameters for each template. */ 432 #define CRYPTO_MAX_ATTRS 32 433 434 struct crypto_attr_alg { 435 char name[CRYPTO_MAX_ALG_NAME]; 436 }; 437 438 struct crypto_attr_type { 439 u32 type; 440 u32 mask; 441 }; 442 443 struct crypto_attr_u32 { 444 u32 num; 445 }; 446 447 /* 448 * Transform user interface. 449 */ 450 451 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags); 452 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 453 void crypto_free_tfm(struct crypto_tfm *tfm); 454 455 /* 456 * Transform helpers which query the underlying algorithm. 457 */ 458 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 459 { 460 return tfm->__crt_alg->cra_name; 461 } 462 463 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 464 { 465 return tfm->__crt_alg->cra_driver_name; 466 } 467 468 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) 469 { 470 return tfm->__crt_alg->cra_priority; 471 } 472 473 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm) 474 { 475 return module_name(tfm->__crt_alg->cra_module); 476 } 477 478 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) 479 { 480 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; 481 } 482 483 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 484 { 485 return tfm->__crt_alg->cra_blocksize; 486 } 487 488 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 489 { 490 return tfm->__crt_alg->cra_alignmask; 491 } 492 493 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 494 { 495 return tfm->crt_flags; 496 } 497 498 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 499 { 500 tfm->crt_flags |= flags; 501 } 502 503 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 504 { 505 tfm->crt_flags &= ~flags; 506 } 507 508 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 509 { 510 return tfm->__crt_ctx; 511 } 512 513 static inline unsigned int crypto_tfm_ctx_alignment(void) 514 { 515 struct crypto_tfm *tfm; 516 return __alignof__(tfm->__crt_ctx); 517 } 518 519 /* 520 * API wrappers. 521 */ 522 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( 523 struct crypto_tfm *tfm) 524 { 525 return (struct crypto_ablkcipher *)tfm; 526 } 527 528 static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher( 529 const char *alg_name, u32 type, u32 mask) 530 { 531 type &= ~CRYPTO_ALG_TYPE_MASK; 532 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 533 mask |= CRYPTO_ALG_TYPE_MASK; 534 535 return __crypto_ablkcipher_cast( 536 crypto_alloc_base(alg_name, type, mask)); 537 } 538 539 static inline struct crypto_tfm *crypto_ablkcipher_tfm( 540 struct crypto_ablkcipher *tfm) 541 { 542 return &tfm->base; 543 } 544 545 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) 546 { 547 crypto_free_tfm(crypto_ablkcipher_tfm(tfm)); 548 } 549 550 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, 551 u32 mask) 552 { 553 type &= ~CRYPTO_ALG_TYPE_MASK; 554 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 555 mask |= CRYPTO_ALG_TYPE_MASK; 556 557 return crypto_has_alg(alg_name, type, mask); 558 } 559 560 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( 561 struct crypto_ablkcipher *tfm) 562 { 563 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher; 564 } 565 566 static inline unsigned int crypto_ablkcipher_ivsize( 567 struct crypto_ablkcipher *tfm) 568 { 569 return crypto_ablkcipher_crt(tfm)->ivsize; 570 } 571 572 static inline unsigned int crypto_ablkcipher_blocksize( 573 struct crypto_ablkcipher *tfm) 574 { 575 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm)); 576 } 577 578 static inline unsigned int crypto_ablkcipher_alignmask( 579 struct crypto_ablkcipher *tfm) 580 { 581 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm)); 582 } 583 584 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm) 585 { 586 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm)); 587 } 588 589 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm, 590 u32 flags) 591 { 592 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags); 593 } 594 595 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, 596 u32 flags) 597 { 598 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags); 599 } 600 601 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 602 const u8 *key, unsigned int keylen) 603 { 604 return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen); 605 } 606 607 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( 608 struct ablkcipher_request *req) 609 { 610 return __crypto_ablkcipher_cast(req->base.tfm); 611 } 612 613 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req) 614 { 615 struct ablkcipher_tfm *crt = 616 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 617 return crt->encrypt(req); 618 } 619 620 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) 621 { 622 struct ablkcipher_tfm *crt = 623 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 624 return crt->decrypt(req); 625 } 626 627 static inline unsigned int crypto_ablkcipher_reqsize( 628 struct crypto_ablkcipher *tfm) 629 { 630 return crypto_ablkcipher_crt(tfm)->reqsize; 631 } 632 633 static inline void ablkcipher_request_set_tfm( 634 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) 635 { 636 req->base.tfm = crypto_ablkcipher_tfm(tfm); 637 } 638 639 static inline struct ablkcipher_request *ablkcipher_request_cast( 640 struct crypto_async_request *req) 641 { 642 return container_of(req, struct ablkcipher_request, base); 643 } 644 645 static inline struct ablkcipher_request *ablkcipher_request_alloc( 646 struct crypto_ablkcipher *tfm, gfp_t gfp) 647 { 648 struct ablkcipher_request *req; 649 650 req = kmalloc(sizeof(struct ablkcipher_request) + 651 crypto_ablkcipher_reqsize(tfm), gfp); 652 653 if (likely(req)) 654 ablkcipher_request_set_tfm(req, tfm); 655 656 return req; 657 } 658 659 static inline void ablkcipher_request_free(struct ablkcipher_request *req) 660 { 661 kfree(req); 662 } 663 664 static inline void ablkcipher_request_set_callback( 665 struct ablkcipher_request *req, 666 u32 flags, crypto_completion_t complete, void *data) 667 { 668 req->base.complete = complete; 669 req->base.data = data; 670 req->base.flags = flags; 671 } 672 673 static inline void ablkcipher_request_set_crypt( 674 struct ablkcipher_request *req, 675 struct scatterlist *src, struct scatterlist *dst, 676 unsigned int nbytes, void *iv) 677 { 678 req->src = src; 679 req->dst = dst; 680 req->nbytes = nbytes; 681 req->info = iv; 682 } 683 684 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) 685 { 686 return (struct crypto_aead *)tfm; 687 } 688 689 static inline struct crypto_aead *crypto_alloc_aead(const char *alg_name, 690 u32 type, u32 mask) 691 { 692 type &= ~CRYPTO_ALG_TYPE_MASK; 693 type |= CRYPTO_ALG_TYPE_AEAD; 694 mask |= CRYPTO_ALG_TYPE_MASK; 695 696 return __crypto_aead_cast(crypto_alloc_base(alg_name, type, mask)); 697 } 698 699 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) 700 { 701 return &tfm->base; 702 } 703 704 static inline void crypto_free_aead(struct crypto_aead *tfm) 705 { 706 crypto_free_tfm(crypto_aead_tfm(tfm)); 707 } 708 709 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm) 710 { 711 return &crypto_aead_tfm(tfm)->crt_aead; 712 } 713 714 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm) 715 { 716 return crypto_aead_crt(tfm)->ivsize; 717 } 718 719 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm) 720 { 721 return crypto_aead_crt(tfm)->authsize; 722 } 723 724 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm) 725 { 726 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm)); 727 } 728 729 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm) 730 { 731 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm)); 732 } 733 734 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm) 735 { 736 return crypto_tfm_get_flags(crypto_aead_tfm(tfm)); 737 } 738 739 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags) 740 { 741 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags); 742 } 743 744 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) 745 { 746 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags); 747 } 748 749 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, 750 unsigned int keylen) 751 { 752 return crypto_aead_crt(tfm)->setkey(tfm, key, keylen); 753 } 754 755 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) 756 { 757 return __crypto_aead_cast(req->base.tfm); 758 } 759 760 static inline int crypto_aead_encrypt(struct aead_request *req) 761 { 762 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req); 763 } 764 765 static inline int crypto_aead_decrypt(struct aead_request *req) 766 { 767 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req); 768 } 769 770 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) 771 { 772 return crypto_aead_crt(tfm)->reqsize; 773 } 774 775 static inline void aead_request_set_tfm(struct aead_request *req, 776 struct crypto_aead *tfm) 777 { 778 req->base.tfm = crypto_aead_tfm(tfm); 779 } 780 781 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, 782 gfp_t gfp) 783 { 784 struct aead_request *req; 785 786 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp); 787 788 if (likely(req)) 789 aead_request_set_tfm(req, tfm); 790 791 return req; 792 } 793 794 static inline void aead_request_free(struct aead_request *req) 795 { 796 kfree(req); 797 } 798 799 static inline void aead_request_set_callback(struct aead_request *req, 800 u32 flags, 801 crypto_completion_t complete, 802 void *data) 803 { 804 req->base.complete = complete; 805 req->base.data = data; 806 req->base.flags = flags; 807 } 808 809 static inline void aead_request_set_crypt(struct aead_request *req, 810 struct scatterlist *src, 811 struct scatterlist *dst, 812 unsigned int cryptlen, u8 *iv) 813 { 814 req->src = src; 815 req->dst = dst; 816 req->cryptlen = cryptlen; 817 req->iv = iv; 818 } 819 820 static inline void aead_request_set_assoc(struct aead_request *req, 821 struct scatterlist *assoc, 822 unsigned int assoclen) 823 { 824 req->assoc = assoc; 825 req->assoclen = assoclen; 826 } 827 828 static inline struct crypto_blkcipher *__crypto_blkcipher_cast( 829 struct crypto_tfm *tfm) 830 { 831 return (struct crypto_blkcipher *)tfm; 832 } 833 834 static inline struct crypto_blkcipher *crypto_blkcipher_cast( 835 struct crypto_tfm *tfm) 836 { 837 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER); 838 return __crypto_blkcipher_cast(tfm); 839 } 840 841 static inline struct crypto_blkcipher *crypto_alloc_blkcipher( 842 const char *alg_name, u32 type, u32 mask) 843 { 844 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 845 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 846 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; 847 848 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); 849 } 850 851 static inline struct crypto_tfm *crypto_blkcipher_tfm( 852 struct crypto_blkcipher *tfm) 853 { 854 return &tfm->base; 855 } 856 857 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) 858 { 859 crypto_free_tfm(crypto_blkcipher_tfm(tfm)); 860 } 861 862 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) 863 { 864 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 865 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 866 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; 867 868 return crypto_has_alg(alg_name, type, mask); 869 } 870 871 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm) 872 { 873 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm)); 874 } 875 876 static inline struct blkcipher_tfm *crypto_blkcipher_crt( 877 struct crypto_blkcipher *tfm) 878 { 879 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher; 880 } 881 882 static inline struct blkcipher_alg *crypto_blkcipher_alg( 883 struct crypto_blkcipher *tfm) 884 { 885 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher; 886 } 887 888 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm) 889 { 890 return crypto_blkcipher_alg(tfm)->ivsize; 891 } 892 893 static inline unsigned int crypto_blkcipher_blocksize( 894 struct crypto_blkcipher *tfm) 895 { 896 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm)); 897 } 898 899 static inline unsigned int crypto_blkcipher_alignmask( 900 struct crypto_blkcipher *tfm) 901 { 902 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm)); 903 } 904 905 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm) 906 { 907 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm)); 908 } 909 910 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm, 911 u32 flags) 912 { 913 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags); 914 } 915 916 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm, 917 u32 flags) 918 { 919 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags); 920 } 921 922 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm, 923 const u8 *key, unsigned int keylen) 924 { 925 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm), 926 key, keylen); 927 } 928 929 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc, 930 struct scatterlist *dst, 931 struct scatterlist *src, 932 unsigned int nbytes) 933 { 934 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 935 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 936 } 937 938 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc, 939 struct scatterlist *dst, 940 struct scatterlist *src, 941 unsigned int nbytes) 942 { 943 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 944 } 945 946 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc, 947 struct scatterlist *dst, 948 struct scatterlist *src, 949 unsigned int nbytes) 950 { 951 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 952 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 953 } 954 955 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc, 956 struct scatterlist *dst, 957 struct scatterlist *src, 958 unsigned int nbytes) 959 { 960 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 961 } 962 963 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm, 964 const u8 *src, unsigned int len) 965 { 966 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len); 967 } 968 969 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm, 970 u8 *dst, unsigned int len) 971 { 972 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); 973 } 974 975 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 976 { 977 return (struct crypto_cipher *)tfm; 978 } 979 980 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm) 981 { 982 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); 983 return __crypto_cipher_cast(tfm); 984 } 985 986 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 987 u32 type, u32 mask) 988 { 989 type &= ~CRYPTO_ALG_TYPE_MASK; 990 type |= CRYPTO_ALG_TYPE_CIPHER; 991 mask |= CRYPTO_ALG_TYPE_MASK; 992 993 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 994 } 995 996 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 997 { 998 return &tfm->base; 999 } 1000 1001 static inline void crypto_free_cipher(struct crypto_cipher *tfm) 1002 { 1003 crypto_free_tfm(crypto_cipher_tfm(tfm)); 1004 } 1005 1006 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 1007 { 1008 type &= ~CRYPTO_ALG_TYPE_MASK; 1009 type |= CRYPTO_ALG_TYPE_CIPHER; 1010 mask |= CRYPTO_ALG_TYPE_MASK; 1011 1012 return crypto_has_alg(alg_name, type, mask); 1013 } 1014 1015 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm) 1016 { 1017 return &crypto_cipher_tfm(tfm)->crt_cipher; 1018 } 1019 1020 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 1021 { 1022 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 1023 } 1024 1025 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 1026 { 1027 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 1028 } 1029 1030 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 1031 { 1032 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 1033 } 1034 1035 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 1036 u32 flags) 1037 { 1038 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 1039 } 1040 1041 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 1042 u32 flags) 1043 { 1044 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 1045 } 1046 1047 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm, 1048 const u8 *key, unsigned int keylen) 1049 { 1050 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm), 1051 key, keylen); 1052 } 1053 1054 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 1055 u8 *dst, const u8 *src) 1056 { 1057 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm), 1058 dst, src); 1059 } 1060 1061 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 1062 u8 *dst, const u8 *src) 1063 { 1064 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm), 1065 dst, src); 1066 } 1067 1068 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm) 1069 { 1070 return (struct crypto_hash *)tfm; 1071 } 1072 1073 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm) 1074 { 1075 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) & 1076 CRYPTO_ALG_TYPE_HASH_MASK); 1077 return __crypto_hash_cast(tfm); 1078 } 1079 1080 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, 1081 u32 type, u32 mask) 1082 { 1083 type &= ~CRYPTO_ALG_TYPE_MASK; 1084 type |= CRYPTO_ALG_TYPE_HASH; 1085 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1086 1087 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask)); 1088 } 1089 1090 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm) 1091 { 1092 return &tfm->base; 1093 } 1094 1095 static inline void crypto_free_hash(struct crypto_hash *tfm) 1096 { 1097 crypto_free_tfm(crypto_hash_tfm(tfm)); 1098 } 1099 1100 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) 1101 { 1102 type &= ~CRYPTO_ALG_TYPE_MASK; 1103 type |= CRYPTO_ALG_TYPE_HASH; 1104 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1105 1106 return crypto_has_alg(alg_name, type, mask); 1107 } 1108 1109 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm) 1110 { 1111 return &crypto_hash_tfm(tfm)->crt_hash; 1112 } 1113 1114 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm) 1115 { 1116 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm)); 1117 } 1118 1119 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm) 1120 { 1121 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm)); 1122 } 1123 1124 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm) 1125 { 1126 return crypto_hash_crt(tfm)->digestsize; 1127 } 1128 1129 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm) 1130 { 1131 return crypto_tfm_get_flags(crypto_hash_tfm(tfm)); 1132 } 1133 1134 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags) 1135 { 1136 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags); 1137 } 1138 1139 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags) 1140 { 1141 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags); 1142 } 1143 1144 static inline int crypto_hash_init(struct hash_desc *desc) 1145 { 1146 return crypto_hash_crt(desc->tfm)->init(desc); 1147 } 1148 1149 static inline int crypto_hash_update(struct hash_desc *desc, 1150 struct scatterlist *sg, 1151 unsigned int nbytes) 1152 { 1153 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes); 1154 } 1155 1156 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out) 1157 { 1158 return crypto_hash_crt(desc->tfm)->final(desc, out); 1159 } 1160 1161 static inline int crypto_hash_digest(struct hash_desc *desc, 1162 struct scatterlist *sg, 1163 unsigned int nbytes, u8 *out) 1164 { 1165 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out); 1166 } 1167 1168 static inline int crypto_hash_setkey(struct crypto_hash *hash, 1169 const u8 *key, unsigned int keylen) 1170 { 1171 return crypto_hash_crt(hash)->setkey(hash, key, keylen); 1172 } 1173 1174 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 1175 { 1176 return (struct crypto_comp *)tfm; 1177 } 1178 1179 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm) 1180 { 1181 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) & 1182 CRYPTO_ALG_TYPE_MASK); 1183 return __crypto_comp_cast(tfm); 1184 } 1185 1186 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 1187 u32 type, u32 mask) 1188 { 1189 type &= ~CRYPTO_ALG_TYPE_MASK; 1190 type |= CRYPTO_ALG_TYPE_COMPRESS; 1191 mask |= CRYPTO_ALG_TYPE_MASK; 1192 1193 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 1194 } 1195 1196 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 1197 { 1198 return &tfm->base; 1199 } 1200 1201 static inline void crypto_free_comp(struct crypto_comp *tfm) 1202 { 1203 crypto_free_tfm(crypto_comp_tfm(tfm)); 1204 } 1205 1206 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 1207 { 1208 type &= ~CRYPTO_ALG_TYPE_MASK; 1209 type |= CRYPTO_ALG_TYPE_COMPRESS; 1210 mask |= CRYPTO_ALG_TYPE_MASK; 1211 1212 return crypto_has_alg(alg_name, type, mask); 1213 } 1214 1215 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 1216 { 1217 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 1218 } 1219 1220 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm) 1221 { 1222 return &crypto_comp_tfm(tfm)->crt_compress; 1223 } 1224 1225 static inline int crypto_comp_compress(struct crypto_comp *tfm, 1226 const u8 *src, unsigned int slen, 1227 u8 *dst, unsigned int *dlen) 1228 { 1229 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm), 1230 src, slen, dst, dlen); 1231 } 1232 1233 static inline int crypto_comp_decompress(struct crypto_comp *tfm, 1234 const u8 *src, unsigned int slen, 1235 u8 *dst, unsigned int *dlen) 1236 { 1237 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm), 1238 src, slen, dst, dlen); 1239 } 1240 1241 #endif /* _LINUX_CRYPTO_H */ 1242 1243