1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <[email protected]> 5 * Copyright (c) 2002 David S. Miller ([email protected]) 6 * Copyright (c) 2005 Herbert Xu <[email protected]> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]> 9 * and Nettle, by Niels Möller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 #ifndef _LINUX_CRYPTO_H 18 #define _LINUX_CRYPTO_H 19 20 #include <linux/atomic.h> 21 #include <linux/kernel.h> 22 #include <linux/list.h> 23 #include <linux/bug.h> 24 #include <linux/slab.h> 25 #include <linux/string.h> 26 #include <linux/uaccess.h> 27 28 /* 29 * Algorithm masks and types. 30 */ 31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 33 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002 34 #define CRYPTO_ALG_TYPE_AEAD 0x00000003 35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 36 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 37 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 38 #define CRYPTO_ALG_TYPE_DIGEST 0x00000008 39 #define CRYPTO_ALG_TYPE_HASH 0x00000008 40 #define CRYPTO_ALG_TYPE_SHASH 0x00000009 41 #define CRYPTO_ALG_TYPE_AHASH 0x0000000a 42 #define CRYPTO_ALG_TYPE_RNG 0x0000000c 43 #define CRYPTO_ALG_TYPE_PCOMPRESS 0x0000000f 44 45 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e 46 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c 47 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c 48 49 #define CRYPTO_ALG_LARVAL 0x00000010 50 #define CRYPTO_ALG_DEAD 0x00000020 51 #define CRYPTO_ALG_DYING 0x00000040 52 #define CRYPTO_ALG_ASYNC 0x00000080 53 54 /* 55 * Set this bit if and only if the algorithm requires another algorithm of 56 * the same type to handle corner cases. 57 */ 58 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 59 60 /* 61 * This bit is set for symmetric key ciphers that have already been wrapped 62 * with a generic IV generator to prevent them from being wrapped again. 63 */ 64 #define CRYPTO_ALG_GENIV 0x00000200 65 66 /* 67 * Set if the algorithm has passed automated run-time testing. Note that 68 * if there is no run-time testing for a given algorithm it is considered 69 * to have passed. 70 */ 71 72 #define CRYPTO_ALG_TESTED 0x00000400 73 74 /* 75 * Set if the algorithm is an instance that is build from templates. 76 */ 77 #define CRYPTO_ALG_INSTANCE 0x00000800 78 79 /* Set this bit if the algorithm provided is hardware accelerated but 80 * not available to userspace via instruction set or so. 81 */ 82 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000 83 84 /* 85 * Transform masks and values (for crt_flags). 86 */ 87 #define CRYPTO_TFM_REQ_MASK 0x000fff00 88 #define CRYPTO_TFM_RES_MASK 0xfff00000 89 90 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 91 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 92 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 93 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 94 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000 95 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000 96 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000 97 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000 98 99 /* 100 * Miscellaneous stuff. 101 */ 102 #define CRYPTO_MAX_ALG_NAME 64 103 104 /* 105 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 106 * declaration) is used to ensure that the crypto_tfm context structure is 107 * aligned correctly for the given architecture so that there are no alignment 108 * faults for C data types. In particular, this is required on platforms such 109 * as arm where pointers are 32-bit aligned but there are data types such as 110 * u64 which require 64-bit alignment. 111 */ 112 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 113 114 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 115 116 struct scatterlist; 117 struct crypto_ablkcipher; 118 struct crypto_async_request; 119 struct crypto_aead; 120 struct crypto_blkcipher; 121 struct crypto_hash; 122 struct crypto_rng; 123 struct crypto_tfm; 124 struct crypto_type; 125 struct aead_givcrypt_request; 126 struct skcipher_givcrypt_request; 127 128 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 129 130 struct crypto_async_request { 131 struct list_head list; 132 crypto_completion_t complete; 133 void *data; 134 struct crypto_tfm *tfm; 135 136 u32 flags; 137 }; 138 139 struct ablkcipher_request { 140 struct crypto_async_request base; 141 142 unsigned int nbytes; 143 144 void *info; 145 146 struct scatterlist *src; 147 struct scatterlist *dst; 148 149 void *__ctx[] CRYPTO_MINALIGN_ATTR; 150 }; 151 152 /** 153 * struct aead_request - AEAD request 154 * @base: Common attributes for async crypto requests 155 * @assoclen: Length in bytes of associated data for authentication 156 * @cryptlen: Length of data to be encrypted or decrypted 157 * @iv: Initialisation vector 158 * @assoc: Associated data 159 * @src: Source data 160 * @dst: Destination data 161 * @__ctx: Start of private context data 162 */ 163 struct aead_request { 164 struct crypto_async_request base; 165 166 unsigned int assoclen; 167 unsigned int cryptlen; 168 169 u8 *iv; 170 171 struct scatterlist *assoc; 172 struct scatterlist *src; 173 struct scatterlist *dst; 174 175 void *__ctx[] CRYPTO_MINALIGN_ATTR; 176 }; 177 178 struct blkcipher_desc { 179 struct crypto_blkcipher *tfm; 180 void *info; 181 u32 flags; 182 }; 183 184 struct cipher_desc { 185 struct crypto_tfm *tfm; 186 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 187 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, 188 const u8 *src, unsigned int nbytes); 189 void *info; 190 }; 191 192 struct hash_desc { 193 struct crypto_hash *tfm; 194 u32 flags; 195 }; 196 197 /* 198 * Algorithms: modular crypto algorithm implementations, managed 199 * via crypto_register_alg() and crypto_unregister_alg(). 200 */ 201 struct ablkcipher_alg { 202 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 203 unsigned int keylen); 204 int (*encrypt)(struct ablkcipher_request *req); 205 int (*decrypt)(struct ablkcipher_request *req); 206 int (*givencrypt)(struct skcipher_givcrypt_request *req); 207 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 208 209 const char *geniv; 210 211 unsigned int min_keysize; 212 unsigned int max_keysize; 213 unsigned int ivsize; 214 }; 215 216 struct aead_alg { 217 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 218 unsigned int keylen); 219 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize); 220 int (*encrypt)(struct aead_request *req); 221 int (*decrypt)(struct aead_request *req); 222 int (*givencrypt)(struct aead_givcrypt_request *req); 223 int (*givdecrypt)(struct aead_givcrypt_request *req); 224 225 const char *geniv; 226 227 unsigned int ivsize; 228 unsigned int maxauthsize; 229 }; 230 231 struct blkcipher_alg { 232 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 233 unsigned int keylen); 234 int (*encrypt)(struct blkcipher_desc *desc, 235 struct scatterlist *dst, struct scatterlist *src, 236 unsigned int nbytes); 237 int (*decrypt)(struct blkcipher_desc *desc, 238 struct scatterlist *dst, struct scatterlist *src, 239 unsigned int nbytes); 240 241 const char *geniv; 242 243 unsigned int min_keysize; 244 unsigned int max_keysize; 245 unsigned int ivsize; 246 }; 247 248 struct cipher_alg { 249 unsigned int cia_min_keysize; 250 unsigned int cia_max_keysize; 251 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 252 unsigned int keylen); 253 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 254 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 255 }; 256 257 struct compress_alg { 258 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 259 unsigned int slen, u8 *dst, unsigned int *dlen); 260 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 261 unsigned int slen, u8 *dst, unsigned int *dlen); 262 }; 263 264 struct rng_alg { 265 int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata, 266 unsigned int dlen); 267 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 268 269 unsigned int seedsize; 270 }; 271 272 273 #define cra_ablkcipher cra_u.ablkcipher 274 #define cra_aead cra_u.aead 275 #define cra_blkcipher cra_u.blkcipher 276 #define cra_cipher cra_u.cipher 277 #define cra_compress cra_u.compress 278 #define cra_rng cra_u.rng 279 280 struct crypto_alg { 281 struct list_head cra_list; 282 struct list_head cra_users; 283 284 u32 cra_flags; 285 unsigned int cra_blocksize; 286 unsigned int cra_ctxsize; 287 unsigned int cra_alignmask; 288 289 int cra_priority; 290 atomic_t cra_refcnt; 291 292 char cra_name[CRYPTO_MAX_ALG_NAME]; 293 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 294 295 const struct crypto_type *cra_type; 296 297 union { 298 struct ablkcipher_alg ablkcipher; 299 struct aead_alg aead; 300 struct blkcipher_alg blkcipher; 301 struct cipher_alg cipher; 302 struct compress_alg compress; 303 struct rng_alg rng; 304 } cra_u; 305 306 int (*cra_init)(struct crypto_tfm *tfm); 307 void (*cra_exit)(struct crypto_tfm *tfm); 308 void (*cra_destroy)(struct crypto_alg *alg); 309 310 struct module *cra_module; 311 }; 312 313 /* 314 * Algorithm registration interface. 315 */ 316 int crypto_register_alg(struct crypto_alg *alg); 317 int crypto_unregister_alg(struct crypto_alg *alg); 318 int crypto_register_algs(struct crypto_alg *algs, int count); 319 int crypto_unregister_algs(struct crypto_alg *algs, int count); 320 321 /* 322 * Algorithm query interface. 323 */ 324 int crypto_has_alg(const char *name, u32 type, u32 mask); 325 326 /* 327 * Transforms: user-instantiated objects which encapsulate algorithms 328 * and core processing logic. Managed via crypto_alloc_*() and 329 * crypto_free_*(), as well as the various helpers below. 330 */ 331 332 struct ablkcipher_tfm { 333 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 334 unsigned int keylen); 335 int (*encrypt)(struct ablkcipher_request *req); 336 int (*decrypt)(struct ablkcipher_request *req); 337 int (*givencrypt)(struct skcipher_givcrypt_request *req); 338 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 339 340 struct crypto_ablkcipher *base; 341 342 unsigned int ivsize; 343 unsigned int reqsize; 344 }; 345 346 struct aead_tfm { 347 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 348 unsigned int keylen); 349 int (*encrypt)(struct aead_request *req); 350 int (*decrypt)(struct aead_request *req); 351 int (*givencrypt)(struct aead_givcrypt_request *req); 352 int (*givdecrypt)(struct aead_givcrypt_request *req); 353 354 struct crypto_aead *base; 355 356 unsigned int ivsize; 357 unsigned int authsize; 358 unsigned int reqsize; 359 }; 360 361 struct blkcipher_tfm { 362 void *iv; 363 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 364 unsigned int keylen); 365 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 366 struct scatterlist *src, unsigned int nbytes); 367 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 368 struct scatterlist *src, unsigned int nbytes); 369 }; 370 371 struct cipher_tfm { 372 int (*cit_setkey)(struct crypto_tfm *tfm, 373 const u8 *key, unsigned int keylen); 374 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 375 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 376 }; 377 378 struct hash_tfm { 379 int (*init)(struct hash_desc *desc); 380 int (*update)(struct hash_desc *desc, 381 struct scatterlist *sg, unsigned int nsg); 382 int (*final)(struct hash_desc *desc, u8 *out); 383 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 384 unsigned int nsg, u8 *out); 385 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 386 unsigned int keylen); 387 unsigned int digestsize; 388 }; 389 390 struct compress_tfm { 391 int (*cot_compress)(struct crypto_tfm *tfm, 392 const u8 *src, unsigned int slen, 393 u8 *dst, unsigned int *dlen); 394 int (*cot_decompress)(struct crypto_tfm *tfm, 395 const u8 *src, unsigned int slen, 396 u8 *dst, unsigned int *dlen); 397 }; 398 399 struct rng_tfm { 400 int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata, 401 unsigned int dlen); 402 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 403 }; 404 405 #define crt_ablkcipher crt_u.ablkcipher 406 #define crt_aead crt_u.aead 407 #define crt_blkcipher crt_u.blkcipher 408 #define crt_cipher crt_u.cipher 409 #define crt_hash crt_u.hash 410 #define crt_compress crt_u.compress 411 #define crt_rng crt_u.rng 412 413 struct crypto_tfm { 414 415 u32 crt_flags; 416 417 union { 418 struct ablkcipher_tfm ablkcipher; 419 struct aead_tfm aead; 420 struct blkcipher_tfm blkcipher; 421 struct cipher_tfm cipher; 422 struct hash_tfm hash; 423 struct compress_tfm compress; 424 struct rng_tfm rng; 425 } crt_u; 426 427 void (*exit)(struct crypto_tfm *tfm); 428 429 struct crypto_alg *__crt_alg; 430 431 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 432 }; 433 434 struct crypto_ablkcipher { 435 struct crypto_tfm base; 436 }; 437 438 struct crypto_aead { 439 struct crypto_tfm base; 440 }; 441 442 struct crypto_blkcipher { 443 struct crypto_tfm base; 444 }; 445 446 struct crypto_cipher { 447 struct crypto_tfm base; 448 }; 449 450 struct crypto_comp { 451 struct crypto_tfm base; 452 }; 453 454 struct crypto_hash { 455 struct crypto_tfm base; 456 }; 457 458 struct crypto_rng { 459 struct crypto_tfm base; 460 }; 461 462 enum { 463 CRYPTOA_UNSPEC, 464 CRYPTOA_ALG, 465 CRYPTOA_TYPE, 466 CRYPTOA_U32, 467 __CRYPTOA_MAX, 468 }; 469 470 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1) 471 472 /* Maximum number of (rtattr) parameters for each template. */ 473 #define CRYPTO_MAX_ATTRS 32 474 475 struct crypto_attr_alg { 476 char name[CRYPTO_MAX_ALG_NAME]; 477 }; 478 479 struct crypto_attr_type { 480 u32 type; 481 u32 mask; 482 }; 483 484 struct crypto_attr_u32 { 485 u32 num; 486 }; 487 488 /* 489 * Transform user interface. 490 */ 491 492 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 493 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm); 494 495 static inline void crypto_free_tfm(struct crypto_tfm *tfm) 496 { 497 return crypto_destroy_tfm(tfm, tfm); 498 } 499 500 int alg_test(const char *driver, const char *alg, u32 type, u32 mask); 501 502 /* 503 * Transform helpers which query the underlying algorithm. 504 */ 505 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 506 { 507 return tfm->__crt_alg->cra_name; 508 } 509 510 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 511 { 512 return tfm->__crt_alg->cra_driver_name; 513 } 514 515 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) 516 { 517 return tfm->__crt_alg->cra_priority; 518 } 519 520 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) 521 { 522 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; 523 } 524 525 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 526 { 527 return tfm->__crt_alg->cra_blocksize; 528 } 529 530 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 531 { 532 return tfm->__crt_alg->cra_alignmask; 533 } 534 535 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 536 { 537 return tfm->crt_flags; 538 } 539 540 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 541 { 542 tfm->crt_flags |= flags; 543 } 544 545 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 546 { 547 tfm->crt_flags &= ~flags; 548 } 549 550 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 551 { 552 return tfm->__crt_ctx; 553 } 554 555 static inline unsigned int crypto_tfm_ctx_alignment(void) 556 { 557 struct crypto_tfm *tfm; 558 return __alignof__(tfm->__crt_ctx); 559 } 560 561 /* 562 * API wrappers. 563 */ 564 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( 565 struct crypto_tfm *tfm) 566 { 567 return (struct crypto_ablkcipher *)tfm; 568 } 569 570 static inline u32 crypto_skcipher_type(u32 type) 571 { 572 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 573 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 574 return type; 575 } 576 577 static inline u32 crypto_skcipher_mask(u32 mask) 578 { 579 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 580 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; 581 return mask; 582 } 583 584 struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, 585 u32 type, u32 mask); 586 587 static inline struct crypto_tfm *crypto_ablkcipher_tfm( 588 struct crypto_ablkcipher *tfm) 589 { 590 return &tfm->base; 591 } 592 593 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) 594 { 595 crypto_free_tfm(crypto_ablkcipher_tfm(tfm)); 596 } 597 598 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, 599 u32 mask) 600 { 601 return crypto_has_alg(alg_name, crypto_skcipher_type(type), 602 crypto_skcipher_mask(mask)); 603 } 604 605 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( 606 struct crypto_ablkcipher *tfm) 607 { 608 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher; 609 } 610 611 static inline unsigned int crypto_ablkcipher_ivsize( 612 struct crypto_ablkcipher *tfm) 613 { 614 return crypto_ablkcipher_crt(tfm)->ivsize; 615 } 616 617 static inline unsigned int crypto_ablkcipher_blocksize( 618 struct crypto_ablkcipher *tfm) 619 { 620 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm)); 621 } 622 623 static inline unsigned int crypto_ablkcipher_alignmask( 624 struct crypto_ablkcipher *tfm) 625 { 626 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm)); 627 } 628 629 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm) 630 { 631 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm)); 632 } 633 634 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm, 635 u32 flags) 636 { 637 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags); 638 } 639 640 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, 641 u32 flags) 642 { 643 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags); 644 } 645 646 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 647 const u8 *key, unsigned int keylen) 648 { 649 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm); 650 651 return crt->setkey(crt->base, key, keylen); 652 } 653 654 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( 655 struct ablkcipher_request *req) 656 { 657 return __crypto_ablkcipher_cast(req->base.tfm); 658 } 659 660 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req) 661 { 662 struct ablkcipher_tfm *crt = 663 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 664 return crt->encrypt(req); 665 } 666 667 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) 668 { 669 struct ablkcipher_tfm *crt = 670 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 671 return crt->decrypt(req); 672 } 673 674 static inline unsigned int crypto_ablkcipher_reqsize( 675 struct crypto_ablkcipher *tfm) 676 { 677 return crypto_ablkcipher_crt(tfm)->reqsize; 678 } 679 680 static inline void ablkcipher_request_set_tfm( 681 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) 682 { 683 req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base); 684 } 685 686 static inline struct ablkcipher_request *ablkcipher_request_cast( 687 struct crypto_async_request *req) 688 { 689 return container_of(req, struct ablkcipher_request, base); 690 } 691 692 static inline struct ablkcipher_request *ablkcipher_request_alloc( 693 struct crypto_ablkcipher *tfm, gfp_t gfp) 694 { 695 struct ablkcipher_request *req; 696 697 req = kmalloc(sizeof(struct ablkcipher_request) + 698 crypto_ablkcipher_reqsize(tfm), gfp); 699 700 if (likely(req)) 701 ablkcipher_request_set_tfm(req, tfm); 702 703 return req; 704 } 705 706 static inline void ablkcipher_request_free(struct ablkcipher_request *req) 707 { 708 kzfree(req); 709 } 710 711 static inline void ablkcipher_request_set_callback( 712 struct ablkcipher_request *req, 713 u32 flags, crypto_completion_t complete, void *data) 714 { 715 req->base.complete = complete; 716 req->base.data = data; 717 req->base.flags = flags; 718 } 719 720 static inline void ablkcipher_request_set_crypt( 721 struct ablkcipher_request *req, 722 struct scatterlist *src, struct scatterlist *dst, 723 unsigned int nbytes, void *iv) 724 { 725 req->src = src; 726 req->dst = dst; 727 req->nbytes = nbytes; 728 req->info = iv; 729 } 730 731 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) 732 { 733 return (struct crypto_aead *)tfm; 734 } 735 736 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask); 737 738 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) 739 { 740 return &tfm->base; 741 } 742 743 static inline void crypto_free_aead(struct crypto_aead *tfm) 744 { 745 crypto_free_tfm(crypto_aead_tfm(tfm)); 746 } 747 748 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm) 749 { 750 return &crypto_aead_tfm(tfm)->crt_aead; 751 } 752 753 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm) 754 { 755 return crypto_aead_crt(tfm)->ivsize; 756 } 757 758 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm) 759 { 760 return crypto_aead_crt(tfm)->authsize; 761 } 762 763 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm) 764 { 765 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm)); 766 } 767 768 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm) 769 { 770 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm)); 771 } 772 773 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm) 774 { 775 return crypto_tfm_get_flags(crypto_aead_tfm(tfm)); 776 } 777 778 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags) 779 { 780 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags); 781 } 782 783 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) 784 { 785 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags); 786 } 787 788 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, 789 unsigned int keylen) 790 { 791 struct aead_tfm *crt = crypto_aead_crt(tfm); 792 793 return crt->setkey(crt->base, key, keylen); 794 } 795 796 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize); 797 798 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) 799 { 800 return __crypto_aead_cast(req->base.tfm); 801 } 802 803 static inline int crypto_aead_encrypt(struct aead_request *req) 804 { 805 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req); 806 } 807 808 static inline int crypto_aead_decrypt(struct aead_request *req) 809 { 810 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req); 811 } 812 813 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) 814 { 815 return crypto_aead_crt(tfm)->reqsize; 816 } 817 818 static inline void aead_request_set_tfm(struct aead_request *req, 819 struct crypto_aead *tfm) 820 { 821 req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base); 822 } 823 824 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, 825 gfp_t gfp) 826 { 827 struct aead_request *req; 828 829 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp); 830 831 if (likely(req)) 832 aead_request_set_tfm(req, tfm); 833 834 return req; 835 } 836 837 static inline void aead_request_free(struct aead_request *req) 838 { 839 kzfree(req); 840 } 841 842 static inline void aead_request_set_callback(struct aead_request *req, 843 u32 flags, 844 crypto_completion_t complete, 845 void *data) 846 { 847 req->base.complete = complete; 848 req->base.data = data; 849 req->base.flags = flags; 850 } 851 852 static inline void aead_request_set_crypt(struct aead_request *req, 853 struct scatterlist *src, 854 struct scatterlist *dst, 855 unsigned int cryptlen, u8 *iv) 856 { 857 req->src = src; 858 req->dst = dst; 859 req->cryptlen = cryptlen; 860 req->iv = iv; 861 } 862 863 static inline void aead_request_set_assoc(struct aead_request *req, 864 struct scatterlist *assoc, 865 unsigned int assoclen) 866 { 867 req->assoc = assoc; 868 req->assoclen = assoclen; 869 } 870 871 static inline struct crypto_blkcipher *__crypto_blkcipher_cast( 872 struct crypto_tfm *tfm) 873 { 874 return (struct crypto_blkcipher *)tfm; 875 } 876 877 static inline struct crypto_blkcipher *crypto_blkcipher_cast( 878 struct crypto_tfm *tfm) 879 { 880 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER); 881 return __crypto_blkcipher_cast(tfm); 882 } 883 884 static inline struct crypto_blkcipher *crypto_alloc_blkcipher( 885 const char *alg_name, u32 type, u32 mask) 886 { 887 type &= ~CRYPTO_ALG_TYPE_MASK; 888 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 889 mask |= CRYPTO_ALG_TYPE_MASK; 890 891 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); 892 } 893 894 static inline struct crypto_tfm *crypto_blkcipher_tfm( 895 struct crypto_blkcipher *tfm) 896 { 897 return &tfm->base; 898 } 899 900 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) 901 { 902 crypto_free_tfm(crypto_blkcipher_tfm(tfm)); 903 } 904 905 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) 906 { 907 type &= ~CRYPTO_ALG_TYPE_MASK; 908 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 909 mask |= CRYPTO_ALG_TYPE_MASK; 910 911 return crypto_has_alg(alg_name, type, mask); 912 } 913 914 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm) 915 { 916 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm)); 917 } 918 919 static inline struct blkcipher_tfm *crypto_blkcipher_crt( 920 struct crypto_blkcipher *tfm) 921 { 922 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher; 923 } 924 925 static inline struct blkcipher_alg *crypto_blkcipher_alg( 926 struct crypto_blkcipher *tfm) 927 { 928 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher; 929 } 930 931 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm) 932 { 933 return crypto_blkcipher_alg(tfm)->ivsize; 934 } 935 936 static inline unsigned int crypto_blkcipher_blocksize( 937 struct crypto_blkcipher *tfm) 938 { 939 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm)); 940 } 941 942 static inline unsigned int crypto_blkcipher_alignmask( 943 struct crypto_blkcipher *tfm) 944 { 945 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm)); 946 } 947 948 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm) 949 { 950 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm)); 951 } 952 953 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm, 954 u32 flags) 955 { 956 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags); 957 } 958 959 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm, 960 u32 flags) 961 { 962 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags); 963 } 964 965 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm, 966 const u8 *key, unsigned int keylen) 967 { 968 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm), 969 key, keylen); 970 } 971 972 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc, 973 struct scatterlist *dst, 974 struct scatterlist *src, 975 unsigned int nbytes) 976 { 977 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 978 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 979 } 980 981 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc, 982 struct scatterlist *dst, 983 struct scatterlist *src, 984 unsigned int nbytes) 985 { 986 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 987 } 988 989 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc, 990 struct scatterlist *dst, 991 struct scatterlist *src, 992 unsigned int nbytes) 993 { 994 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 995 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 996 } 997 998 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc, 999 struct scatterlist *dst, 1000 struct scatterlist *src, 1001 unsigned int nbytes) 1002 { 1003 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 1004 } 1005 1006 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm, 1007 const u8 *src, unsigned int len) 1008 { 1009 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len); 1010 } 1011 1012 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm, 1013 u8 *dst, unsigned int len) 1014 { 1015 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); 1016 } 1017 1018 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 1019 { 1020 return (struct crypto_cipher *)tfm; 1021 } 1022 1023 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm) 1024 { 1025 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); 1026 return __crypto_cipher_cast(tfm); 1027 } 1028 1029 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 1030 u32 type, u32 mask) 1031 { 1032 type &= ~CRYPTO_ALG_TYPE_MASK; 1033 type |= CRYPTO_ALG_TYPE_CIPHER; 1034 mask |= CRYPTO_ALG_TYPE_MASK; 1035 1036 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 1037 } 1038 1039 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 1040 { 1041 return &tfm->base; 1042 } 1043 1044 static inline void crypto_free_cipher(struct crypto_cipher *tfm) 1045 { 1046 crypto_free_tfm(crypto_cipher_tfm(tfm)); 1047 } 1048 1049 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 1050 { 1051 type &= ~CRYPTO_ALG_TYPE_MASK; 1052 type |= CRYPTO_ALG_TYPE_CIPHER; 1053 mask |= CRYPTO_ALG_TYPE_MASK; 1054 1055 return crypto_has_alg(alg_name, type, mask); 1056 } 1057 1058 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm) 1059 { 1060 return &crypto_cipher_tfm(tfm)->crt_cipher; 1061 } 1062 1063 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 1064 { 1065 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 1066 } 1067 1068 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 1069 { 1070 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 1071 } 1072 1073 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 1074 { 1075 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 1076 } 1077 1078 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 1079 u32 flags) 1080 { 1081 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 1082 } 1083 1084 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 1085 u32 flags) 1086 { 1087 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 1088 } 1089 1090 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm, 1091 const u8 *key, unsigned int keylen) 1092 { 1093 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm), 1094 key, keylen); 1095 } 1096 1097 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 1098 u8 *dst, const u8 *src) 1099 { 1100 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm), 1101 dst, src); 1102 } 1103 1104 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 1105 u8 *dst, const u8 *src) 1106 { 1107 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm), 1108 dst, src); 1109 } 1110 1111 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm) 1112 { 1113 return (struct crypto_hash *)tfm; 1114 } 1115 1116 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm) 1117 { 1118 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) & 1119 CRYPTO_ALG_TYPE_HASH_MASK); 1120 return __crypto_hash_cast(tfm); 1121 } 1122 1123 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, 1124 u32 type, u32 mask) 1125 { 1126 type &= ~CRYPTO_ALG_TYPE_MASK; 1127 mask &= ~CRYPTO_ALG_TYPE_MASK; 1128 type |= CRYPTO_ALG_TYPE_HASH; 1129 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1130 1131 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask)); 1132 } 1133 1134 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm) 1135 { 1136 return &tfm->base; 1137 } 1138 1139 static inline void crypto_free_hash(struct crypto_hash *tfm) 1140 { 1141 crypto_free_tfm(crypto_hash_tfm(tfm)); 1142 } 1143 1144 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) 1145 { 1146 type &= ~CRYPTO_ALG_TYPE_MASK; 1147 mask &= ~CRYPTO_ALG_TYPE_MASK; 1148 type |= CRYPTO_ALG_TYPE_HASH; 1149 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1150 1151 return crypto_has_alg(alg_name, type, mask); 1152 } 1153 1154 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm) 1155 { 1156 return &crypto_hash_tfm(tfm)->crt_hash; 1157 } 1158 1159 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm) 1160 { 1161 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm)); 1162 } 1163 1164 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm) 1165 { 1166 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm)); 1167 } 1168 1169 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm) 1170 { 1171 return crypto_hash_crt(tfm)->digestsize; 1172 } 1173 1174 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm) 1175 { 1176 return crypto_tfm_get_flags(crypto_hash_tfm(tfm)); 1177 } 1178 1179 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags) 1180 { 1181 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags); 1182 } 1183 1184 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags) 1185 { 1186 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags); 1187 } 1188 1189 static inline int crypto_hash_init(struct hash_desc *desc) 1190 { 1191 return crypto_hash_crt(desc->tfm)->init(desc); 1192 } 1193 1194 static inline int crypto_hash_update(struct hash_desc *desc, 1195 struct scatterlist *sg, 1196 unsigned int nbytes) 1197 { 1198 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes); 1199 } 1200 1201 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out) 1202 { 1203 return crypto_hash_crt(desc->tfm)->final(desc, out); 1204 } 1205 1206 static inline int crypto_hash_digest(struct hash_desc *desc, 1207 struct scatterlist *sg, 1208 unsigned int nbytes, u8 *out) 1209 { 1210 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out); 1211 } 1212 1213 static inline int crypto_hash_setkey(struct crypto_hash *hash, 1214 const u8 *key, unsigned int keylen) 1215 { 1216 return crypto_hash_crt(hash)->setkey(hash, key, keylen); 1217 } 1218 1219 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 1220 { 1221 return (struct crypto_comp *)tfm; 1222 } 1223 1224 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm) 1225 { 1226 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) & 1227 CRYPTO_ALG_TYPE_MASK); 1228 return __crypto_comp_cast(tfm); 1229 } 1230 1231 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 1232 u32 type, u32 mask) 1233 { 1234 type &= ~CRYPTO_ALG_TYPE_MASK; 1235 type |= CRYPTO_ALG_TYPE_COMPRESS; 1236 mask |= CRYPTO_ALG_TYPE_MASK; 1237 1238 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 1239 } 1240 1241 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 1242 { 1243 return &tfm->base; 1244 } 1245 1246 static inline void crypto_free_comp(struct crypto_comp *tfm) 1247 { 1248 crypto_free_tfm(crypto_comp_tfm(tfm)); 1249 } 1250 1251 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 1252 { 1253 type &= ~CRYPTO_ALG_TYPE_MASK; 1254 type |= CRYPTO_ALG_TYPE_COMPRESS; 1255 mask |= CRYPTO_ALG_TYPE_MASK; 1256 1257 return crypto_has_alg(alg_name, type, mask); 1258 } 1259 1260 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 1261 { 1262 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 1263 } 1264 1265 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm) 1266 { 1267 return &crypto_comp_tfm(tfm)->crt_compress; 1268 } 1269 1270 static inline int crypto_comp_compress(struct crypto_comp *tfm, 1271 const u8 *src, unsigned int slen, 1272 u8 *dst, unsigned int *dlen) 1273 { 1274 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm), 1275 src, slen, dst, dlen); 1276 } 1277 1278 static inline int crypto_comp_decompress(struct crypto_comp *tfm, 1279 const u8 *src, unsigned int slen, 1280 u8 *dst, unsigned int *dlen) 1281 { 1282 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm), 1283 src, slen, dst, dlen); 1284 } 1285 1286 #endif /* _LINUX_CRYPTO_H */ 1287 1288