1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <[email protected]> 5 * Copyright (c) 2002 David S. Miller ([email protected]) 6 * Copyright (c) 2005 Herbert Xu <[email protected]> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]> 9 * and Nettle, by Niels Möller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 #ifndef _LINUX_CRYPTO_H 18 #define _LINUX_CRYPTO_H 19 20 #include <linux/atomic.h> 21 #include <linux/module.h> 22 #include <linux/kernel.h> 23 #include <linux/list.h> 24 #include <linux/slab.h> 25 #include <linux/string.h> 26 #include <linux/uaccess.h> 27 28 /* 29 * Algorithm masks and types. 30 */ 31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 33 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002 34 #define CRYPTO_ALG_TYPE_AEAD 0x00000003 35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 36 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 37 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 38 #define CRYPTO_ALG_TYPE_DIGEST 0x00000008 39 #define CRYPTO_ALG_TYPE_HASH 0x00000008 40 #define CRYPTO_ALG_TYPE_SHASH 0x00000009 41 #define CRYPTO_ALG_TYPE_AHASH 0x0000000a 42 #define CRYPTO_ALG_TYPE_RNG 0x0000000c 43 #define CRYPTO_ALG_TYPE_PCOMPRESS 0x0000000f 44 45 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e 46 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000c 47 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c 48 49 #define CRYPTO_ALG_LARVAL 0x00000010 50 #define CRYPTO_ALG_DEAD 0x00000020 51 #define CRYPTO_ALG_DYING 0x00000040 52 #define CRYPTO_ALG_ASYNC 0x00000080 53 54 /* 55 * Set this bit if and only if the algorithm requires another algorithm of 56 * the same type to handle corner cases. 57 */ 58 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 59 60 /* 61 * This bit is set for symmetric key ciphers that have already been wrapped 62 * with a generic IV generator to prevent them from being wrapped again. 63 */ 64 #define CRYPTO_ALG_GENIV 0x00000200 65 66 /* 67 * Set if the algorithm has passed automated run-time testing. Note that 68 * if there is no run-time testing for a given algorithm it is considered 69 * to have passed. 70 */ 71 72 #define CRYPTO_ALG_TESTED 0x00000400 73 74 /* 75 * Transform masks and values (for crt_flags). 76 */ 77 #define CRYPTO_TFM_REQ_MASK 0x000fff00 78 #define CRYPTO_TFM_RES_MASK 0xfff00000 79 80 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 81 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 82 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 83 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 84 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000 85 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000 86 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000 87 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000 88 89 /* 90 * Miscellaneous stuff. 91 */ 92 #define CRYPTO_MAX_ALG_NAME 64 93 94 /* 95 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 96 * declaration) is used to ensure that the crypto_tfm context structure is 97 * aligned correctly for the given architecture so that there are no alignment 98 * faults for C data types. In particular, this is required on platforms such 99 * as arm where pointers are 32-bit aligned but there are data types such as 100 * u64 which require 64-bit alignment. 101 */ 102 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 103 104 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 105 106 struct scatterlist; 107 struct crypto_ablkcipher; 108 struct crypto_async_request; 109 struct crypto_aead; 110 struct crypto_blkcipher; 111 struct crypto_hash; 112 struct crypto_rng; 113 struct crypto_tfm; 114 struct crypto_type; 115 struct aead_givcrypt_request; 116 struct skcipher_givcrypt_request; 117 118 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 119 120 struct crypto_async_request { 121 struct list_head list; 122 crypto_completion_t complete; 123 void *data; 124 struct crypto_tfm *tfm; 125 126 u32 flags; 127 }; 128 129 struct ablkcipher_request { 130 struct crypto_async_request base; 131 132 unsigned int nbytes; 133 134 void *info; 135 136 struct scatterlist *src; 137 struct scatterlist *dst; 138 139 void *__ctx[] CRYPTO_MINALIGN_ATTR; 140 }; 141 142 /** 143 * struct aead_request - AEAD request 144 * @base: Common attributes for async crypto requests 145 * @assoclen: Length in bytes of associated data for authentication 146 * @cryptlen: Length of data to be encrypted or decrypted 147 * @iv: Initialisation vector 148 * @assoc: Associated data 149 * @src: Source data 150 * @dst: Destination data 151 * @__ctx: Start of private context data 152 */ 153 struct aead_request { 154 struct crypto_async_request base; 155 156 unsigned int assoclen; 157 unsigned int cryptlen; 158 159 u8 *iv; 160 161 struct scatterlist *assoc; 162 struct scatterlist *src; 163 struct scatterlist *dst; 164 165 void *__ctx[] CRYPTO_MINALIGN_ATTR; 166 }; 167 168 struct blkcipher_desc { 169 struct crypto_blkcipher *tfm; 170 void *info; 171 u32 flags; 172 }; 173 174 struct cipher_desc { 175 struct crypto_tfm *tfm; 176 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 177 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, 178 const u8 *src, unsigned int nbytes); 179 void *info; 180 }; 181 182 struct hash_desc { 183 struct crypto_hash *tfm; 184 u32 flags; 185 }; 186 187 /* 188 * Algorithms: modular crypto algorithm implementations, managed 189 * via crypto_register_alg() and crypto_unregister_alg(). 190 */ 191 struct ablkcipher_alg { 192 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 193 unsigned int keylen); 194 int (*encrypt)(struct ablkcipher_request *req); 195 int (*decrypt)(struct ablkcipher_request *req); 196 int (*givencrypt)(struct skcipher_givcrypt_request *req); 197 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 198 199 const char *geniv; 200 201 unsigned int min_keysize; 202 unsigned int max_keysize; 203 unsigned int ivsize; 204 }; 205 206 struct aead_alg { 207 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 208 unsigned int keylen); 209 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize); 210 int (*encrypt)(struct aead_request *req); 211 int (*decrypt)(struct aead_request *req); 212 int (*givencrypt)(struct aead_givcrypt_request *req); 213 int (*givdecrypt)(struct aead_givcrypt_request *req); 214 215 const char *geniv; 216 217 unsigned int ivsize; 218 unsigned int maxauthsize; 219 }; 220 221 struct blkcipher_alg { 222 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 223 unsigned int keylen); 224 int (*encrypt)(struct blkcipher_desc *desc, 225 struct scatterlist *dst, struct scatterlist *src, 226 unsigned int nbytes); 227 int (*decrypt)(struct blkcipher_desc *desc, 228 struct scatterlist *dst, struct scatterlist *src, 229 unsigned int nbytes); 230 231 const char *geniv; 232 233 unsigned int min_keysize; 234 unsigned int max_keysize; 235 unsigned int ivsize; 236 }; 237 238 struct cipher_alg { 239 unsigned int cia_min_keysize; 240 unsigned int cia_max_keysize; 241 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 242 unsigned int keylen); 243 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 244 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 245 }; 246 247 struct compress_alg { 248 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 249 unsigned int slen, u8 *dst, unsigned int *dlen); 250 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 251 unsigned int slen, u8 *dst, unsigned int *dlen); 252 }; 253 254 struct rng_alg { 255 int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata, 256 unsigned int dlen); 257 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 258 259 unsigned int seedsize; 260 }; 261 262 263 #define cra_ablkcipher cra_u.ablkcipher 264 #define cra_aead cra_u.aead 265 #define cra_blkcipher cra_u.blkcipher 266 #define cra_cipher cra_u.cipher 267 #define cra_compress cra_u.compress 268 #define cra_rng cra_u.rng 269 270 struct crypto_alg { 271 struct list_head cra_list; 272 struct list_head cra_users; 273 274 u32 cra_flags; 275 unsigned int cra_blocksize; 276 unsigned int cra_ctxsize; 277 unsigned int cra_alignmask; 278 279 int cra_priority; 280 atomic_t cra_refcnt; 281 282 char cra_name[CRYPTO_MAX_ALG_NAME]; 283 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 284 285 const struct crypto_type *cra_type; 286 287 union { 288 struct ablkcipher_alg ablkcipher; 289 struct aead_alg aead; 290 struct blkcipher_alg blkcipher; 291 struct cipher_alg cipher; 292 struct compress_alg compress; 293 struct rng_alg rng; 294 } cra_u; 295 296 int (*cra_init)(struct crypto_tfm *tfm); 297 void (*cra_exit)(struct crypto_tfm *tfm); 298 void (*cra_destroy)(struct crypto_alg *alg); 299 300 struct module *cra_module; 301 }; 302 303 /* 304 * Algorithm registration interface. 305 */ 306 int crypto_register_alg(struct crypto_alg *alg); 307 int crypto_unregister_alg(struct crypto_alg *alg); 308 309 /* 310 * Algorithm query interface. 311 */ 312 int crypto_has_alg(const char *name, u32 type, u32 mask); 313 314 /* 315 * Transforms: user-instantiated objects which encapsulate algorithms 316 * and core processing logic. Managed via crypto_alloc_*() and 317 * crypto_free_*(), as well as the various helpers below. 318 */ 319 320 struct ablkcipher_tfm { 321 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 322 unsigned int keylen); 323 int (*encrypt)(struct ablkcipher_request *req); 324 int (*decrypt)(struct ablkcipher_request *req); 325 int (*givencrypt)(struct skcipher_givcrypt_request *req); 326 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 327 328 struct crypto_ablkcipher *base; 329 330 unsigned int ivsize; 331 unsigned int reqsize; 332 }; 333 334 struct aead_tfm { 335 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 336 unsigned int keylen); 337 int (*encrypt)(struct aead_request *req); 338 int (*decrypt)(struct aead_request *req); 339 int (*givencrypt)(struct aead_givcrypt_request *req); 340 int (*givdecrypt)(struct aead_givcrypt_request *req); 341 342 struct crypto_aead *base; 343 344 unsigned int ivsize; 345 unsigned int authsize; 346 unsigned int reqsize; 347 }; 348 349 struct blkcipher_tfm { 350 void *iv; 351 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 352 unsigned int keylen); 353 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 354 struct scatterlist *src, unsigned int nbytes); 355 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 356 struct scatterlist *src, unsigned int nbytes); 357 }; 358 359 struct cipher_tfm { 360 int (*cit_setkey)(struct crypto_tfm *tfm, 361 const u8 *key, unsigned int keylen); 362 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 363 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 364 }; 365 366 struct hash_tfm { 367 int (*init)(struct hash_desc *desc); 368 int (*update)(struct hash_desc *desc, 369 struct scatterlist *sg, unsigned int nsg); 370 int (*final)(struct hash_desc *desc, u8 *out); 371 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 372 unsigned int nsg, u8 *out); 373 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 374 unsigned int keylen); 375 unsigned int digestsize; 376 }; 377 378 struct compress_tfm { 379 int (*cot_compress)(struct crypto_tfm *tfm, 380 const u8 *src, unsigned int slen, 381 u8 *dst, unsigned int *dlen); 382 int (*cot_decompress)(struct crypto_tfm *tfm, 383 const u8 *src, unsigned int slen, 384 u8 *dst, unsigned int *dlen); 385 }; 386 387 struct rng_tfm { 388 int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata, 389 unsigned int dlen); 390 int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen); 391 }; 392 393 #define crt_ablkcipher crt_u.ablkcipher 394 #define crt_aead crt_u.aead 395 #define crt_blkcipher crt_u.blkcipher 396 #define crt_cipher crt_u.cipher 397 #define crt_hash crt_u.hash 398 #define crt_compress crt_u.compress 399 #define crt_rng crt_u.rng 400 401 struct crypto_tfm { 402 403 u32 crt_flags; 404 405 union { 406 struct ablkcipher_tfm ablkcipher; 407 struct aead_tfm aead; 408 struct blkcipher_tfm blkcipher; 409 struct cipher_tfm cipher; 410 struct hash_tfm hash; 411 struct compress_tfm compress; 412 struct rng_tfm rng; 413 } crt_u; 414 415 void (*exit)(struct crypto_tfm *tfm); 416 417 struct crypto_alg *__crt_alg; 418 419 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 420 }; 421 422 struct crypto_ablkcipher { 423 struct crypto_tfm base; 424 }; 425 426 struct crypto_aead { 427 struct crypto_tfm base; 428 }; 429 430 struct crypto_blkcipher { 431 struct crypto_tfm base; 432 }; 433 434 struct crypto_cipher { 435 struct crypto_tfm base; 436 }; 437 438 struct crypto_comp { 439 struct crypto_tfm base; 440 }; 441 442 struct crypto_hash { 443 struct crypto_tfm base; 444 }; 445 446 struct crypto_rng { 447 struct crypto_tfm base; 448 }; 449 450 enum { 451 CRYPTOA_UNSPEC, 452 CRYPTOA_ALG, 453 CRYPTOA_TYPE, 454 CRYPTOA_U32, 455 __CRYPTOA_MAX, 456 }; 457 458 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1) 459 460 /* Maximum number of (rtattr) parameters for each template. */ 461 #define CRYPTO_MAX_ATTRS 32 462 463 struct crypto_attr_alg { 464 char name[CRYPTO_MAX_ALG_NAME]; 465 }; 466 467 struct crypto_attr_type { 468 u32 type; 469 u32 mask; 470 }; 471 472 struct crypto_attr_u32 { 473 u32 num; 474 }; 475 476 /* 477 * Transform user interface. 478 */ 479 480 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 481 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm); 482 483 static inline void crypto_free_tfm(struct crypto_tfm *tfm) 484 { 485 return crypto_destroy_tfm(tfm, tfm); 486 } 487 488 int alg_test(const char *driver, const char *alg, u32 type, u32 mask); 489 490 /* 491 * Transform helpers which query the underlying algorithm. 492 */ 493 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 494 { 495 return tfm->__crt_alg->cra_name; 496 } 497 498 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 499 { 500 return tfm->__crt_alg->cra_driver_name; 501 } 502 503 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) 504 { 505 return tfm->__crt_alg->cra_priority; 506 } 507 508 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm) 509 { 510 return module_name(tfm->__crt_alg->cra_module); 511 } 512 513 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) 514 { 515 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; 516 } 517 518 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 519 { 520 return tfm->__crt_alg->cra_blocksize; 521 } 522 523 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 524 { 525 return tfm->__crt_alg->cra_alignmask; 526 } 527 528 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 529 { 530 return tfm->crt_flags; 531 } 532 533 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 534 { 535 tfm->crt_flags |= flags; 536 } 537 538 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 539 { 540 tfm->crt_flags &= ~flags; 541 } 542 543 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 544 { 545 return tfm->__crt_ctx; 546 } 547 548 static inline unsigned int crypto_tfm_ctx_alignment(void) 549 { 550 struct crypto_tfm *tfm; 551 return __alignof__(tfm->__crt_ctx); 552 } 553 554 /* 555 * API wrappers. 556 */ 557 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( 558 struct crypto_tfm *tfm) 559 { 560 return (struct crypto_ablkcipher *)tfm; 561 } 562 563 static inline u32 crypto_skcipher_type(u32 type) 564 { 565 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 566 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 567 return type; 568 } 569 570 static inline u32 crypto_skcipher_mask(u32 mask) 571 { 572 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 573 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; 574 return mask; 575 } 576 577 struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, 578 u32 type, u32 mask); 579 580 static inline struct crypto_tfm *crypto_ablkcipher_tfm( 581 struct crypto_ablkcipher *tfm) 582 { 583 return &tfm->base; 584 } 585 586 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) 587 { 588 crypto_free_tfm(crypto_ablkcipher_tfm(tfm)); 589 } 590 591 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, 592 u32 mask) 593 { 594 return crypto_has_alg(alg_name, crypto_skcipher_type(type), 595 crypto_skcipher_mask(mask)); 596 } 597 598 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( 599 struct crypto_ablkcipher *tfm) 600 { 601 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher; 602 } 603 604 static inline unsigned int crypto_ablkcipher_ivsize( 605 struct crypto_ablkcipher *tfm) 606 { 607 return crypto_ablkcipher_crt(tfm)->ivsize; 608 } 609 610 static inline unsigned int crypto_ablkcipher_blocksize( 611 struct crypto_ablkcipher *tfm) 612 { 613 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm)); 614 } 615 616 static inline unsigned int crypto_ablkcipher_alignmask( 617 struct crypto_ablkcipher *tfm) 618 { 619 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm)); 620 } 621 622 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm) 623 { 624 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm)); 625 } 626 627 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm, 628 u32 flags) 629 { 630 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags); 631 } 632 633 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, 634 u32 flags) 635 { 636 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags); 637 } 638 639 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 640 const u8 *key, unsigned int keylen) 641 { 642 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm); 643 644 return crt->setkey(crt->base, key, keylen); 645 } 646 647 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( 648 struct ablkcipher_request *req) 649 { 650 return __crypto_ablkcipher_cast(req->base.tfm); 651 } 652 653 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req) 654 { 655 struct ablkcipher_tfm *crt = 656 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 657 return crt->encrypt(req); 658 } 659 660 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) 661 { 662 struct ablkcipher_tfm *crt = 663 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 664 return crt->decrypt(req); 665 } 666 667 static inline unsigned int crypto_ablkcipher_reqsize( 668 struct crypto_ablkcipher *tfm) 669 { 670 return crypto_ablkcipher_crt(tfm)->reqsize; 671 } 672 673 static inline void ablkcipher_request_set_tfm( 674 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) 675 { 676 req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base); 677 } 678 679 static inline struct ablkcipher_request *ablkcipher_request_cast( 680 struct crypto_async_request *req) 681 { 682 return container_of(req, struct ablkcipher_request, base); 683 } 684 685 static inline struct ablkcipher_request *ablkcipher_request_alloc( 686 struct crypto_ablkcipher *tfm, gfp_t gfp) 687 { 688 struct ablkcipher_request *req; 689 690 req = kmalloc(sizeof(struct ablkcipher_request) + 691 crypto_ablkcipher_reqsize(tfm), gfp); 692 693 if (likely(req)) 694 ablkcipher_request_set_tfm(req, tfm); 695 696 return req; 697 } 698 699 static inline void ablkcipher_request_free(struct ablkcipher_request *req) 700 { 701 kzfree(req); 702 } 703 704 static inline void ablkcipher_request_set_callback( 705 struct ablkcipher_request *req, 706 u32 flags, crypto_completion_t complete, void *data) 707 { 708 req->base.complete = complete; 709 req->base.data = data; 710 req->base.flags = flags; 711 } 712 713 static inline void ablkcipher_request_set_crypt( 714 struct ablkcipher_request *req, 715 struct scatterlist *src, struct scatterlist *dst, 716 unsigned int nbytes, void *iv) 717 { 718 req->src = src; 719 req->dst = dst; 720 req->nbytes = nbytes; 721 req->info = iv; 722 } 723 724 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) 725 { 726 return (struct crypto_aead *)tfm; 727 } 728 729 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask); 730 731 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) 732 { 733 return &tfm->base; 734 } 735 736 static inline void crypto_free_aead(struct crypto_aead *tfm) 737 { 738 crypto_free_tfm(crypto_aead_tfm(tfm)); 739 } 740 741 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm) 742 { 743 return &crypto_aead_tfm(tfm)->crt_aead; 744 } 745 746 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm) 747 { 748 return crypto_aead_crt(tfm)->ivsize; 749 } 750 751 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm) 752 { 753 return crypto_aead_crt(tfm)->authsize; 754 } 755 756 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm) 757 { 758 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm)); 759 } 760 761 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm) 762 { 763 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm)); 764 } 765 766 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm) 767 { 768 return crypto_tfm_get_flags(crypto_aead_tfm(tfm)); 769 } 770 771 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags) 772 { 773 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags); 774 } 775 776 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) 777 { 778 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags); 779 } 780 781 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, 782 unsigned int keylen) 783 { 784 struct aead_tfm *crt = crypto_aead_crt(tfm); 785 786 return crt->setkey(crt->base, key, keylen); 787 } 788 789 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize); 790 791 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) 792 { 793 return __crypto_aead_cast(req->base.tfm); 794 } 795 796 static inline int crypto_aead_encrypt(struct aead_request *req) 797 { 798 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req); 799 } 800 801 static inline int crypto_aead_decrypt(struct aead_request *req) 802 { 803 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req); 804 } 805 806 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) 807 { 808 return crypto_aead_crt(tfm)->reqsize; 809 } 810 811 static inline void aead_request_set_tfm(struct aead_request *req, 812 struct crypto_aead *tfm) 813 { 814 req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base); 815 } 816 817 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, 818 gfp_t gfp) 819 { 820 struct aead_request *req; 821 822 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp); 823 824 if (likely(req)) 825 aead_request_set_tfm(req, tfm); 826 827 return req; 828 } 829 830 static inline void aead_request_free(struct aead_request *req) 831 { 832 kzfree(req); 833 } 834 835 static inline void aead_request_set_callback(struct aead_request *req, 836 u32 flags, 837 crypto_completion_t complete, 838 void *data) 839 { 840 req->base.complete = complete; 841 req->base.data = data; 842 req->base.flags = flags; 843 } 844 845 static inline void aead_request_set_crypt(struct aead_request *req, 846 struct scatterlist *src, 847 struct scatterlist *dst, 848 unsigned int cryptlen, u8 *iv) 849 { 850 req->src = src; 851 req->dst = dst; 852 req->cryptlen = cryptlen; 853 req->iv = iv; 854 } 855 856 static inline void aead_request_set_assoc(struct aead_request *req, 857 struct scatterlist *assoc, 858 unsigned int assoclen) 859 { 860 req->assoc = assoc; 861 req->assoclen = assoclen; 862 } 863 864 static inline struct crypto_blkcipher *__crypto_blkcipher_cast( 865 struct crypto_tfm *tfm) 866 { 867 return (struct crypto_blkcipher *)tfm; 868 } 869 870 static inline struct crypto_blkcipher *crypto_blkcipher_cast( 871 struct crypto_tfm *tfm) 872 { 873 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER); 874 return __crypto_blkcipher_cast(tfm); 875 } 876 877 static inline struct crypto_blkcipher *crypto_alloc_blkcipher( 878 const char *alg_name, u32 type, u32 mask) 879 { 880 type &= ~CRYPTO_ALG_TYPE_MASK; 881 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 882 mask |= CRYPTO_ALG_TYPE_MASK; 883 884 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); 885 } 886 887 static inline struct crypto_tfm *crypto_blkcipher_tfm( 888 struct crypto_blkcipher *tfm) 889 { 890 return &tfm->base; 891 } 892 893 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) 894 { 895 crypto_free_tfm(crypto_blkcipher_tfm(tfm)); 896 } 897 898 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) 899 { 900 type &= ~CRYPTO_ALG_TYPE_MASK; 901 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 902 mask |= CRYPTO_ALG_TYPE_MASK; 903 904 return crypto_has_alg(alg_name, type, mask); 905 } 906 907 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm) 908 { 909 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm)); 910 } 911 912 static inline struct blkcipher_tfm *crypto_blkcipher_crt( 913 struct crypto_blkcipher *tfm) 914 { 915 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher; 916 } 917 918 static inline struct blkcipher_alg *crypto_blkcipher_alg( 919 struct crypto_blkcipher *tfm) 920 { 921 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher; 922 } 923 924 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm) 925 { 926 return crypto_blkcipher_alg(tfm)->ivsize; 927 } 928 929 static inline unsigned int crypto_blkcipher_blocksize( 930 struct crypto_blkcipher *tfm) 931 { 932 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm)); 933 } 934 935 static inline unsigned int crypto_blkcipher_alignmask( 936 struct crypto_blkcipher *tfm) 937 { 938 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm)); 939 } 940 941 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm) 942 { 943 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm)); 944 } 945 946 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm, 947 u32 flags) 948 { 949 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags); 950 } 951 952 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm, 953 u32 flags) 954 { 955 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags); 956 } 957 958 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm, 959 const u8 *key, unsigned int keylen) 960 { 961 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm), 962 key, keylen); 963 } 964 965 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc, 966 struct scatterlist *dst, 967 struct scatterlist *src, 968 unsigned int nbytes) 969 { 970 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 971 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 972 } 973 974 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc, 975 struct scatterlist *dst, 976 struct scatterlist *src, 977 unsigned int nbytes) 978 { 979 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 980 } 981 982 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc, 983 struct scatterlist *dst, 984 struct scatterlist *src, 985 unsigned int nbytes) 986 { 987 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 988 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 989 } 990 991 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc, 992 struct scatterlist *dst, 993 struct scatterlist *src, 994 unsigned int nbytes) 995 { 996 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 997 } 998 999 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm, 1000 const u8 *src, unsigned int len) 1001 { 1002 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len); 1003 } 1004 1005 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm, 1006 u8 *dst, unsigned int len) 1007 { 1008 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); 1009 } 1010 1011 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 1012 { 1013 return (struct crypto_cipher *)tfm; 1014 } 1015 1016 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm) 1017 { 1018 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); 1019 return __crypto_cipher_cast(tfm); 1020 } 1021 1022 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 1023 u32 type, u32 mask) 1024 { 1025 type &= ~CRYPTO_ALG_TYPE_MASK; 1026 type |= CRYPTO_ALG_TYPE_CIPHER; 1027 mask |= CRYPTO_ALG_TYPE_MASK; 1028 1029 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 1030 } 1031 1032 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 1033 { 1034 return &tfm->base; 1035 } 1036 1037 static inline void crypto_free_cipher(struct crypto_cipher *tfm) 1038 { 1039 crypto_free_tfm(crypto_cipher_tfm(tfm)); 1040 } 1041 1042 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 1043 { 1044 type &= ~CRYPTO_ALG_TYPE_MASK; 1045 type |= CRYPTO_ALG_TYPE_CIPHER; 1046 mask |= CRYPTO_ALG_TYPE_MASK; 1047 1048 return crypto_has_alg(alg_name, type, mask); 1049 } 1050 1051 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm) 1052 { 1053 return &crypto_cipher_tfm(tfm)->crt_cipher; 1054 } 1055 1056 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 1057 { 1058 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 1059 } 1060 1061 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 1062 { 1063 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 1064 } 1065 1066 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 1067 { 1068 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 1069 } 1070 1071 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 1072 u32 flags) 1073 { 1074 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 1075 } 1076 1077 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 1078 u32 flags) 1079 { 1080 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 1081 } 1082 1083 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm, 1084 const u8 *key, unsigned int keylen) 1085 { 1086 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm), 1087 key, keylen); 1088 } 1089 1090 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 1091 u8 *dst, const u8 *src) 1092 { 1093 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm), 1094 dst, src); 1095 } 1096 1097 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 1098 u8 *dst, const u8 *src) 1099 { 1100 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm), 1101 dst, src); 1102 } 1103 1104 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm) 1105 { 1106 return (struct crypto_hash *)tfm; 1107 } 1108 1109 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm) 1110 { 1111 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) & 1112 CRYPTO_ALG_TYPE_HASH_MASK); 1113 return __crypto_hash_cast(tfm); 1114 } 1115 1116 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, 1117 u32 type, u32 mask) 1118 { 1119 type &= ~CRYPTO_ALG_TYPE_MASK; 1120 mask &= ~CRYPTO_ALG_TYPE_MASK; 1121 type |= CRYPTO_ALG_TYPE_HASH; 1122 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1123 1124 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask)); 1125 } 1126 1127 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm) 1128 { 1129 return &tfm->base; 1130 } 1131 1132 static inline void crypto_free_hash(struct crypto_hash *tfm) 1133 { 1134 crypto_free_tfm(crypto_hash_tfm(tfm)); 1135 } 1136 1137 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) 1138 { 1139 type &= ~CRYPTO_ALG_TYPE_MASK; 1140 mask &= ~CRYPTO_ALG_TYPE_MASK; 1141 type |= CRYPTO_ALG_TYPE_HASH; 1142 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1143 1144 return crypto_has_alg(alg_name, type, mask); 1145 } 1146 1147 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm) 1148 { 1149 return &crypto_hash_tfm(tfm)->crt_hash; 1150 } 1151 1152 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm) 1153 { 1154 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm)); 1155 } 1156 1157 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm) 1158 { 1159 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm)); 1160 } 1161 1162 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm) 1163 { 1164 return crypto_hash_crt(tfm)->digestsize; 1165 } 1166 1167 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm) 1168 { 1169 return crypto_tfm_get_flags(crypto_hash_tfm(tfm)); 1170 } 1171 1172 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags) 1173 { 1174 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags); 1175 } 1176 1177 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags) 1178 { 1179 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags); 1180 } 1181 1182 static inline int crypto_hash_init(struct hash_desc *desc) 1183 { 1184 return crypto_hash_crt(desc->tfm)->init(desc); 1185 } 1186 1187 static inline int crypto_hash_update(struct hash_desc *desc, 1188 struct scatterlist *sg, 1189 unsigned int nbytes) 1190 { 1191 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes); 1192 } 1193 1194 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out) 1195 { 1196 return crypto_hash_crt(desc->tfm)->final(desc, out); 1197 } 1198 1199 static inline int crypto_hash_digest(struct hash_desc *desc, 1200 struct scatterlist *sg, 1201 unsigned int nbytes, u8 *out) 1202 { 1203 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out); 1204 } 1205 1206 static inline int crypto_hash_setkey(struct crypto_hash *hash, 1207 const u8 *key, unsigned int keylen) 1208 { 1209 return crypto_hash_crt(hash)->setkey(hash, key, keylen); 1210 } 1211 1212 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 1213 { 1214 return (struct crypto_comp *)tfm; 1215 } 1216 1217 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm) 1218 { 1219 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) & 1220 CRYPTO_ALG_TYPE_MASK); 1221 return __crypto_comp_cast(tfm); 1222 } 1223 1224 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 1225 u32 type, u32 mask) 1226 { 1227 type &= ~CRYPTO_ALG_TYPE_MASK; 1228 type |= CRYPTO_ALG_TYPE_COMPRESS; 1229 mask |= CRYPTO_ALG_TYPE_MASK; 1230 1231 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 1232 } 1233 1234 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 1235 { 1236 return &tfm->base; 1237 } 1238 1239 static inline void crypto_free_comp(struct crypto_comp *tfm) 1240 { 1241 crypto_free_tfm(crypto_comp_tfm(tfm)); 1242 } 1243 1244 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 1245 { 1246 type &= ~CRYPTO_ALG_TYPE_MASK; 1247 type |= CRYPTO_ALG_TYPE_COMPRESS; 1248 mask |= CRYPTO_ALG_TYPE_MASK; 1249 1250 return crypto_has_alg(alg_name, type, mask); 1251 } 1252 1253 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 1254 { 1255 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 1256 } 1257 1258 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm) 1259 { 1260 return &crypto_comp_tfm(tfm)->crt_compress; 1261 } 1262 1263 static inline int crypto_comp_compress(struct crypto_comp *tfm, 1264 const u8 *src, unsigned int slen, 1265 u8 *dst, unsigned int *dlen) 1266 { 1267 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm), 1268 src, slen, dst, dlen); 1269 } 1270 1271 static inline int crypto_comp_decompress(struct crypto_comp *tfm, 1272 const u8 *src, unsigned int slen, 1273 u8 *dst, unsigned int *dlen) 1274 { 1275 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm), 1276 src, slen, dst, dlen); 1277 } 1278 1279 #endif /* _LINUX_CRYPTO_H */ 1280 1281