1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <[email protected]> 5 * Copyright (c) 2002 David S. Miller ([email protected]) 6 * Copyright (c) 2005 Herbert Xu <[email protected]> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <[email protected]> 9 * and Nettle, by Niels Möller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 #ifndef _LINUX_CRYPTO_H 18 #define _LINUX_CRYPTO_H 19 20 #include <asm/atomic.h> 21 #include <linux/module.h> 22 #include <linux/kernel.h> 23 #include <linux/list.h> 24 #include <linux/slab.h> 25 #include <linux/string.h> 26 #include <linux/uaccess.h> 27 28 /* 29 * Algorithm masks and types. 30 */ 31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002 34 #define CRYPTO_ALG_TYPE_HASH 0x00000003 35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 36 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 37 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 38 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000008 39 #define CRYPTO_ALG_TYPE_AEAD 0x00000009 40 41 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e 42 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c 43 44 #define CRYPTO_ALG_LARVAL 0x00000010 45 #define CRYPTO_ALG_DEAD 0x00000020 46 #define CRYPTO_ALG_DYING 0x00000040 47 #define CRYPTO_ALG_ASYNC 0x00000080 48 49 /* 50 * Set this bit if and only if the algorithm requires another algorithm of 51 * the same type to handle corner cases. 52 */ 53 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 54 55 /* 56 * This bit is set for symmetric key ciphers that have already been wrapped 57 * with a generic IV generator to prevent them from being wrapped again. 58 */ 59 #define CRYPTO_ALG_GENIV 0x00000200 60 61 /* 62 * Transform masks and values (for crt_flags). 63 */ 64 #define CRYPTO_TFM_REQ_MASK 0x000fff00 65 #define CRYPTO_TFM_RES_MASK 0xfff00000 66 67 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 68 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 69 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 70 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 71 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000 72 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000 73 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000 74 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000 75 76 /* 77 * Miscellaneous stuff. 78 */ 79 #define CRYPTO_MAX_ALG_NAME 64 80 81 /* 82 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 83 * declaration) is used to ensure that the crypto_tfm context structure is 84 * aligned correctly for the given architecture so that there are no alignment 85 * faults for C data types. In particular, this is required on platforms such 86 * as arm where pointers are 32-bit aligned but there are data types such as 87 * u64 which require 64-bit alignment. 88 */ 89 #if defined(ARCH_KMALLOC_MINALIGN) 90 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 91 #elif defined(ARCH_SLAB_MINALIGN) 92 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN 93 #else 94 #define CRYPTO_MINALIGN __alignof__(unsigned long long) 95 #endif 96 97 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 98 99 struct scatterlist; 100 struct crypto_ablkcipher; 101 struct crypto_async_request; 102 struct crypto_aead; 103 struct crypto_blkcipher; 104 struct crypto_hash; 105 struct crypto_tfm; 106 struct crypto_type; 107 struct aead_givcrypt_request; 108 struct skcipher_givcrypt_request; 109 110 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 111 112 struct crypto_async_request { 113 struct list_head list; 114 crypto_completion_t complete; 115 void *data; 116 struct crypto_tfm *tfm; 117 118 u32 flags; 119 }; 120 121 struct ablkcipher_request { 122 struct crypto_async_request base; 123 124 unsigned int nbytes; 125 126 void *info; 127 128 struct scatterlist *src; 129 struct scatterlist *dst; 130 131 void *__ctx[] CRYPTO_MINALIGN_ATTR; 132 }; 133 134 /** 135 * struct aead_request - AEAD request 136 * @base: Common attributes for async crypto requests 137 * @assoclen: Length in bytes of associated data for authentication 138 * @cryptlen: Length of data to be encrypted or decrypted 139 * @iv: Initialisation vector 140 * @assoc: Associated data 141 * @src: Source data 142 * @dst: Destination data 143 * @__ctx: Start of private context data 144 */ 145 struct aead_request { 146 struct crypto_async_request base; 147 148 unsigned int assoclen; 149 unsigned int cryptlen; 150 151 u8 *iv; 152 153 struct scatterlist *assoc; 154 struct scatterlist *src; 155 struct scatterlist *dst; 156 157 void *__ctx[] CRYPTO_MINALIGN_ATTR; 158 }; 159 160 struct blkcipher_desc { 161 struct crypto_blkcipher *tfm; 162 void *info; 163 u32 flags; 164 }; 165 166 struct cipher_desc { 167 struct crypto_tfm *tfm; 168 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 169 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, 170 const u8 *src, unsigned int nbytes); 171 void *info; 172 }; 173 174 struct hash_desc { 175 struct crypto_hash *tfm; 176 u32 flags; 177 }; 178 179 /* 180 * Algorithms: modular crypto algorithm implementations, managed 181 * via crypto_register_alg() and crypto_unregister_alg(). 182 */ 183 struct ablkcipher_alg { 184 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 185 unsigned int keylen); 186 int (*encrypt)(struct ablkcipher_request *req); 187 int (*decrypt)(struct ablkcipher_request *req); 188 int (*givencrypt)(struct skcipher_givcrypt_request *req); 189 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 190 191 const char *geniv; 192 193 unsigned int min_keysize; 194 unsigned int max_keysize; 195 unsigned int ivsize; 196 }; 197 198 struct aead_alg { 199 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 200 unsigned int keylen); 201 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize); 202 int (*encrypt)(struct aead_request *req); 203 int (*decrypt)(struct aead_request *req); 204 int (*givencrypt)(struct aead_givcrypt_request *req); 205 int (*givdecrypt)(struct aead_givcrypt_request *req); 206 207 const char *geniv; 208 209 unsigned int ivsize; 210 unsigned int maxauthsize; 211 }; 212 213 struct blkcipher_alg { 214 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 215 unsigned int keylen); 216 int (*encrypt)(struct blkcipher_desc *desc, 217 struct scatterlist *dst, struct scatterlist *src, 218 unsigned int nbytes); 219 int (*decrypt)(struct blkcipher_desc *desc, 220 struct scatterlist *dst, struct scatterlist *src, 221 unsigned int nbytes); 222 223 const char *geniv; 224 225 unsigned int min_keysize; 226 unsigned int max_keysize; 227 unsigned int ivsize; 228 }; 229 230 struct cipher_alg { 231 unsigned int cia_min_keysize; 232 unsigned int cia_max_keysize; 233 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 234 unsigned int keylen); 235 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 236 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 237 }; 238 239 struct digest_alg { 240 unsigned int dia_digestsize; 241 void (*dia_init)(struct crypto_tfm *tfm); 242 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data, 243 unsigned int len); 244 void (*dia_final)(struct crypto_tfm *tfm, u8 *out); 245 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key, 246 unsigned int keylen); 247 }; 248 249 struct hash_alg { 250 int (*init)(struct hash_desc *desc); 251 int (*update)(struct hash_desc *desc, struct scatterlist *sg, 252 unsigned int nbytes); 253 int (*final)(struct hash_desc *desc, u8 *out); 254 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 255 unsigned int nbytes, u8 *out); 256 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 257 unsigned int keylen); 258 259 unsigned int digestsize; 260 }; 261 262 struct compress_alg { 263 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 264 unsigned int slen, u8 *dst, unsigned int *dlen); 265 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 266 unsigned int slen, u8 *dst, unsigned int *dlen); 267 }; 268 269 #define cra_ablkcipher cra_u.ablkcipher 270 #define cra_aead cra_u.aead 271 #define cra_blkcipher cra_u.blkcipher 272 #define cra_cipher cra_u.cipher 273 #define cra_digest cra_u.digest 274 #define cra_hash cra_u.hash 275 #define cra_compress cra_u.compress 276 277 struct crypto_alg { 278 struct list_head cra_list; 279 struct list_head cra_users; 280 281 u32 cra_flags; 282 unsigned int cra_blocksize; 283 unsigned int cra_ctxsize; 284 unsigned int cra_alignmask; 285 286 int cra_priority; 287 atomic_t cra_refcnt; 288 289 char cra_name[CRYPTO_MAX_ALG_NAME]; 290 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 291 292 const struct crypto_type *cra_type; 293 294 union { 295 struct ablkcipher_alg ablkcipher; 296 struct aead_alg aead; 297 struct blkcipher_alg blkcipher; 298 struct cipher_alg cipher; 299 struct digest_alg digest; 300 struct hash_alg hash; 301 struct compress_alg compress; 302 } cra_u; 303 304 int (*cra_init)(struct crypto_tfm *tfm); 305 void (*cra_exit)(struct crypto_tfm *tfm); 306 void (*cra_destroy)(struct crypto_alg *alg); 307 308 struct module *cra_module; 309 }; 310 311 /* 312 * Algorithm registration interface. 313 */ 314 int crypto_register_alg(struct crypto_alg *alg); 315 int crypto_unregister_alg(struct crypto_alg *alg); 316 317 /* 318 * Algorithm query interface. 319 */ 320 int crypto_has_alg(const char *name, u32 type, u32 mask); 321 322 /* 323 * Transforms: user-instantiated objects which encapsulate algorithms 324 * and core processing logic. Managed via crypto_alloc_*() and 325 * crypto_free_*(), as well as the various helpers below. 326 */ 327 328 struct ablkcipher_tfm { 329 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key, 330 unsigned int keylen); 331 int (*encrypt)(struct ablkcipher_request *req); 332 int (*decrypt)(struct ablkcipher_request *req); 333 int (*givencrypt)(struct skcipher_givcrypt_request *req); 334 int (*givdecrypt)(struct skcipher_givcrypt_request *req); 335 336 struct crypto_ablkcipher *base; 337 338 unsigned int ivsize; 339 unsigned int reqsize; 340 }; 341 342 struct aead_tfm { 343 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 344 unsigned int keylen); 345 int (*encrypt)(struct aead_request *req); 346 int (*decrypt)(struct aead_request *req); 347 int (*givencrypt)(struct aead_givcrypt_request *req); 348 int (*givdecrypt)(struct aead_givcrypt_request *req); 349 350 struct crypto_aead *base; 351 352 unsigned int ivsize; 353 unsigned int authsize; 354 unsigned int reqsize; 355 }; 356 357 struct blkcipher_tfm { 358 void *iv; 359 int (*setkey)(struct crypto_tfm *tfm, const u8 *key, 360 unsigned int keylen); 361 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 362 struct scatterlist *src, unsigned int nbytes); 363 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, 364 struct scatterlist *src, unsigned int nbytes); 365 }; 366 367 struct cipher_tfm { 368 int (*cit_setkey)(struct crypto_tfm *tfm, 369 const u8 *key, unsigned int keylen); 370 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 371 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 372 }; 373 374 struct hash_tfm { 375 int (*init)(struct hash_desc *desc); 376 int (*update)(struct hash_desc *desc, 377 struct scatterlist *sg, unsigned int nsg); 378 int (*final)(struct hash_desc *desc, u8 *out); 379 int (*digest)(struct hash_desc *desc, struct scatterlist *sg, 380 unsigned int nsg, u8 *out); 381 int (*setkey)(struct crypto_hash *tfm, const u8 *key, 382 unsigned int keylen); 383 unsigned int digestsize; 384 }; 385 386 struct compress_tfm { 387 int (*cot_compress)(struct crypto_tfm *tfm, 388 const u8 *src, unsigned int slen, 389 u8 *dst, unsigned int *dlen); 390 int (*cot_decompress)(struct crypto_tfm *tfm, 391 const u8 *src, unsigned int slen, 392 u8 *dst, unsigned int *dlen); 393 }; 394 395 #define crt_ablkcipher crt_u.ablkcipher 396 #define crt_aead crt_u.aead 397 #define crt_blkcipher crt_u.blkcipher 398 #define crt_cipher crt_u.cipher 399 #define crt_hash crt_u.hash 400 #define crt_compress crt_u.compress 401 402 struct crypto_tfm { 403 404 u32 crt_flags; 405 406 union { 407 struct ablkcipher_tfm ablkcipher; 408 struct aead_tfm aead; 409 struct blkcipher_tfm blkcipher; 410 struct cipher_tfm cipher; 411 struct hash_tfm hash; 412 struct compress_tfm compress; 413 } crt_u; 414 415 struct crypto_alg *__crt_alg; 416 417 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 418 }; 419 420 struct crypto_ablkcipher { 421 struct crypto_tfm base; 422 }; 423 424 struct crypto_aead { 425 struct crypto_tfm base; 426 }; 427 428 struct crypto_blkcipher { 429 struct crypto_tfm base; 430 }; 431 432 struct crypto_cipher { 433 struct crypto_tfm base; 434 }; 435 436 struct crypto_comp { 437 struct crypto_tfm base; 438 }; 439 440 struct crypto_hash { 441 struct crypto_tfm base; 442 }; 443 444 enum { 445 CRYPTOA_UNSPEC, 446 CRYPTOA_ALG, 447 CRYPTOA_TYPE, 448 CRYPTOA_U32, 449 __CRYPTOA_MAX, 450 }; 451 452 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1) 453 454 /* Maximum number of (rtattr) parameters for each template. */ 455 #define CRYPTO_MAX_ATTRS 32 456 457 struct crypto_attr_alg { 458 char name[CRYPTO_MAX_ALG_NAME]; 459 }; 460 461 struct crypto_attr_type { 462 u32 type; 463 u32 mask; 464 }; 465 466 struct crypto_attr_u32 { 467 u32 num; 468 }; 469 470 /* 471 * Transform user interface. 472 */ 473 474 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags); 475 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 476 void crypto_free_tfm(struct crypto_tfm *tfm); 477 478 /* 479 * Transform helpers which query the underlying algorithm. 480 */ 481 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 482 { 483 return tfm->__crt_alg->cra_name; 484 } 485 486 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 487 { 488 return tfm->__crt_alg->cra_driver_name; 489 } 490 491 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) 492 { 493 return tfm->__crt_alg->cra_priority; 494 } 495 496 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm) 497 { 498 return module_name(tfm->__crt_alg->cra_module); 499 } 500 501 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) 502 { 503 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; 504 } 505 506 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 507 { 508 return tfm->__crt_alg->cra_blocksize; 509 } 510 511 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 512 { 513 return tfm->__crt_alg->cra_alignmask; 514 } 515 516 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 517 { 518 return tfm->crt_flags; 519 } 520 521 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 522 { 523 tfm->crt_flags |= flags; 524 } 525 526 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 527 { 528 tfm->crt_flags &= ~flags; 529 } 530 531 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 532 { 533 return tfm->__crt_ctx; 534 } 535 536 static inline unsigned int crypto_tfm_ctx_alignment(void) 537 { 538 struct crypto_tfm *tfm; 539 return __alignof__(tfm->__crt_ctx); 540 } 541 542 /* 543 * API wrappers. 544 */ 545 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( 546 struct crypto_tfm *tfm) 547 { 548 return (struct crypto_ablkcipher *)tfm; 549 } 550 551 static inline u32 crypto_skcipher_type(u32 type) 552 { 553 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 554 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 555 return type; 556 } 557 558 static inline u32 crypto_skcipher_mask(u32 mask) 559 { 560 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); 561 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; 562 return mask; 563 } 564 565 struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, 566 u32 type, u32 mask); 567 568 static inline struct crypto_tfm *crypto_ablkcipher_tfm( 569 struct crypto_ablkcipher *tfm) 570 { 571 return &tfm->base; 572 } 573 574 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) 575 { 576 crypto_free_tfm(crypto_ablkcipher_tfm(tfm)); 577 } 578 579 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, 580 u32 mask) 581 { 582 return crypto_has_alg(alg_name, crypto_skcipher_type(type), 583 crypto_skcipher_mask(mask)); 584 } 585 586 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( 587 struct crypto_ablkcipher *tfm) 588 { 589 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher; 590 } 591 592 static inline unsigned int crypto_ablkcipher_ivsize( 593 struct crypto_ablkcipher *tfm) 594 { 595 return crypto_ablkcipher_crt(tfm)->ivsize; 596 } 597 598 static inline unsigned int crypto_ablkcipher_blocksize( 599 struct crypto_ablkcipher *tfm) 600 { 601 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm)); 602 } 603 604 static inline unsigned int crypto_ablkcipher_alignmask( 605 struct crypto_ablkcipher *tfm) 606 { 607 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm)); 608 } 609 610 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm) 611 { 612 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm)); 613 } 614 615 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm, 616 u32 flags) 617 { 618 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags); 619 } 620 621 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, 622 u32 flags) 623 { 624 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags); 625 } 626 627 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 628 const u8 *key, unsigned int keylen) 629 { 630 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm); 631 632 return crt->setkey(crt->base, key, keylen); 633 } 634 635 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( 636 struct ablkcipher_request *req) 637 { 638 return __crypto_ablkcipher_cast(req->base.tfm); 639 } 640 641 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req) 642 { 643 struct ablkcipher_tfm *crt = 644 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 645 return crt->encrypt(req); 646 } 647 648 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) 649 { 650 struct ablkcipher_tfm *crt = 651 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 652 return crt->decrypt(req); 653 } 654 655 static inline unsigned int crypto_ablkcipher_reqsize( 656 struct crypto_ablkcipher *tfm) 657 { 658 return crypto_ablkcipher_crt(tfm)->reqsize; 659 } 660 661 static inline void ablkcipher_request_set_tfm( 662 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) 663 { 664 req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base); 665 } 666 667 static inline struct ablkcipher_request *ablkcipher_request_cast( 668 struct crypto_async_request *req) 669 { 670 return container_of(req, struct ablkcipher_request, base); 671 } 672 673 static inline struct ablkcipher_request *ablkcipher_request_alloc( 674 struct crypto_ablkcipher *tfm, gfp_t gfp) 675 { 676 struct ablkcipher_request *req; 677 678 req = kmalloc(sizeof(struct ablkcipher_request) + 679 crypto_ablkcipher_reqsize(tfm), gfp); 680 681 if (likely(req)) 682 ablkcipher_request_set_tfm(req, tfm); 683 684 return req; 685 } 686 687 static inline void ablkcipher_request_free(struct ablkcipher_request *req) 688 { 689 kfree(req); 690 } 691 692 static inline void ablkcipher_request_set_callback( 693 struct ablkcipher_request *req, 694 u32 flags, crypto_completion_t complete, void *data) 695 { 696 req->base.complete = complete; 697 req->base.data = data; 698 req->base.flags = flags; 699 } 700 701 static inline void ablkcipher_request_set_crypt( 702 struct ablkcipher_request *req, 703 struct scatterlist *src, struct scatterlist *dst, 704 unsigned int nbytes, void *iv) 705 { 706 req->src = src; 707 req->dst = dst; 708 req->nbytes = nbytes; 709 req->info = iv; 710 } 711 712 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) 713 { 714 return (struct crypto_aead *)tfm; 715 } 716 717 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask); 718 719 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) 720 { 721 return &tfm->base; 722 } 723 724 static inline void crypto_free_aead(struct crypto_aead *tfm) 725 { 726 crypto_free_tfm(crypto_aead_tfm(tfm)); 727 } 728 729 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm) 730 { 731 return &crypto_aead_tfm(tfm)->crt_aead; 732 } 733 734 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm) 735 { 736 return crypto_aead_crt(tfm)->ivsize; 737 } 738 739 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm) 740 { 741 return crypto_aead_crt(tfm)->authsize; 742 } 743 744 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm) 745 { 746 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm)); 747 } 748 749 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm) 750 { 751 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm)); 752 } 753 754 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm) 755 { 756 return crypto_tfm_get_flags(crypto_aead_tfm(tfm)); 757 } 758 759 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags) 760 { 761 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags); 762 } 763 764 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) 765 { 766 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags); 767 } 768 769 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, 770 unsigned int keylen) 771 { 772 struct aead_tfm *crt = crypto_aead_crt(tfm); 773 774 return crt->setkey(crt->base, key, keylen); 775 } 776 777 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize); 778 779 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) 780 { 781 return __crypto_aead_cast(req->base.tfm); 782 } 783 784 static inline int crypto_aead_encrypt(struct aead_request *req) 785 { 786 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req); 787 } 788 789 static inline int crypto_aead_decrypt(struct aead_request *req) 790 { 791 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req); 792 } 793 794 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) 795 { 796 return crypto_aead_crt(tfm)->reqsize; 797 } 798 799 static inline void aead_request_set_tfm(struct aead_request *req, 800 struct crypto_aead *tfm) 801 { 802 req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base); 803 } 804 805 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, 806 gfp_t gfp) 807 { 808 struct aead_request *req; 809 810 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp); 811 812 if (likely(req)) 813 aead_request_set_tfm(req, tfm); 814 815 return req; 816 } 817 818 static inline void aead_request_free(struct aead_request *req) 819 { 820 kfree(req); 821 } 822 823 static inline void aead_request_set_callback(struct aead_request *req, 824 u32 flags, 825 crypto_completion_t complete, 826 void *data) 827 { 828 req->base.complete = complete; 829 req->base.data = data; 830 req->base.flags = flags; 831 } 832 833 static inline void aead_request_set_crypt(struct aead_request *req, 834 struct scatterlist *src, 835 struct scatterlist *dst, 836 unsigned int cryptlen, u8 *iv) 837 { 838 req->src = src; 839 req->dst = dst; 840 req->cryptlen = cryptlen; 841 req->iv = iv; 842 } 843 844 static inline void aead_request_set_assoc(struct aead_request *req, 845 struct scatterlist *assoc, 846 unsigned int assoclen) 847 { 848 req->assoc = assoc; 849 req->assoclen = assoclen; 850 } 851 852 static inline struct crypto_blkcipher *__crypto_blkcipher_cast( 853 struct crypto_tfm *tfm) 854 { 855 return (struct crypto_blkcipher *)tfm; 856 } 857 858 static inline struct crypto_blkcipher *crypto_blkcipher_cast( 859 struct crypto_tfm *tfm) 860 { 861 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER); 862 return __crypto_blkcipher_cast(tfm); 863 } 864 865 static inline struct crypto_blkcipher *crypto_alloc_blkcipher( 866 const char *alg_name, u32 type, u32 mask) 867 { 868 type &= ~CRYPTO_ALG_TYPE_MASK; 869 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 870 mask |= CRYPTO_ALG_TYPE_MASK; 871 872 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); 873 } 874 875 static inline struct crypto_tfm *crypto_blkcipher_tfm( 876 struct crypto_blkcipher *tfm) 877 { 878 return &tfm->base; 879 } 880 881 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) 882 { 883 crypto_free_tfm(crypto_blkcipher_tfm(tfm)); 884 } 885 886 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) 887 { 888 type &= ~CRYPTO_ALG_TYPE_MASK; 889 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 890 mask |= CRYPTO_ALG_TYPE_MASK; 891 892 return crypto_has_alg(alg_name, type, mask); 893 } 894 895 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm) 896 { 897 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm)); 898 } 899 900 static inline struct blkcipher_tfm *crypto_blkcipher_crt( 901 struct crypto_blkcipher *tfm) 902 { 903 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher; 904 } 905 906 static inline struct blkcipher_alg *crypto_blkcipher_alg( 907 struct crypto_blkcipher *tfm) 908 { 909 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher; 910 } 911 912 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm) 913 { 914 return crypto_blkcipher_alg(tfm)->ivsize; 915 } 916 917 static inline unsigned int crypto_blkcipher_blocksize( 918 struct crypto_blkcipher *tfm) 919 { 920 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm)); 921 } 922 923 static inline unsigned int crypto_blkcipher_alignmask( 924 struct crypto_blkcipher *tfm) 925 { 926 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm)); 927 } 928 929 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm) 930 { 931 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm)); 932 } 933 934 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm, 935 u32 flags) 936 { 937 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags); 938 } 939 940 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm, 941 u32 flags) 942 { 943 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags); 944 } 945 946 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm, 947 const u8 *key, unsigned int keylen) 948 { 949 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm), 950 key, keylen); 951 } 952 953 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc, 954 struct scatterlist *dst, 955 struct scatterlist *src, 956 unsigned int nbytes) 957 { 958 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 959 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 960 } 961 962 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc, 963 struct scatterlist *dst, 964 struct scatterlist *src, 965 unsigned int nbytes) 966 { 967 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); 968 } 969 970 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc, 971 struct scatterlist *dst, 972 struct scatterlist *src, 973 unsigned int nbytes) 974 { 975 desc->info = crypto_blkcipher_crt(desc->tfm)->iv; 976 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 977 } 978 979 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc, 980 struct scatterlist *dst, 981 struct scatterlist *src, 982 unsigned int nbytes) 983 { 984 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); 985 } 986 987 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm, 988 const u8 *src, unsigned int len) 989 { 990 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len); 991 } 992 993 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm, 994 u8 *dst, unsigned int len) 995 { 996 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); 997 } 998 999 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 1000 { 1001 return (struct crypto_cipher *)tfm; 1002 } 1003 1004 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm) 1005 { 1006 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); 1007 return __crypto_cipher_cast(tfm); 1008 } 1009 1010 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 1011 u32 type, u32 mask) 1012 { 1013 type &= ~CRYPTO_ALG_TYPE_MASK; 1014 type |= CRYPTO_ALG_TYPE_CIPHER; 1015 mask |= CRYPTO_ALG_TYPE_MASK; 1016 1017 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 1018 } 1019 1020 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 1021 { 1022 return &tfm->base; 1023 } 1024 1025 static inline void crypto_free_cipher(struct crypto_cipher *tfm) 1026 { 1027 crypto_free_tfm(crypto_cipher_tfm(tfm)); 1028 } 1029 1030 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 1031 { 1032 type &= ~CRYPTO_ALG_TYPE_MASK; 1033 type |= CRYPTO_ALG_TYPE_CIPHER; 1034 mask |= CRYPTO_ALG_TYPE_MASK; 1035 1036 return crypto_has_alg(alg_name, type, mask); 1037 } 1038 1039 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm) 1040 { 1041 return &crypto_cipher_tfm(tfm)->crt_cipher; 1042 } 1043 1044 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 1045 { 1046 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 1047 } 1048 1049 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 1050 { 1051 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 1052 } 1053 1054 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 1055 { 1056 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 1057 } 1058 1059 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 1060 u32 flags) 1061 { 1062 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 1063 } 1064 1065 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 1066 u32 flags) 1067 { 1068 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 1069 } 1070 1071 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm, 1072 const u8 *key, unsigned int keylen) 1073 { 1074 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm), 1075 key, keylen); 1076 } 1077 1078 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 1079 u8 *dst, const u8 *src) 1080 { 1081 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm), 1082 dst, src); 1083 } 1084 1085 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 1086 u8 *dst, const u8 *src) 1087 { 1088 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm), 1089 dst, src); 1090 } 1091 1092 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm) 1093 { 1094 return (struct crypto_hash *)tfm; 1095 } 1096 1097 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm) 1098 { 1099 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) & 1100 CRYPTO_ALG_TYPE_HASH_MASK); 1101 return __crypto_hash_cast(tfm); 1102 } 1103 1104 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, 1105 u32 type, u32 mask) 1106 { 1107 type &= ~CRYPTO_ALG_TYPE_MASK; 1108 mask &= ~CRYPTO_ALG_TYPE_MASK; 1109 type |= CRYPTO_ALG_TYPE_HASH; 1110 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1111 1112 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask)); 1113 } 1114 1115 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm) 1116 { 1117 return &tfm->base; 1118 } 1119 1120 static inline void crypto_free_hash(struct crypto_hash *tfm) 1121 { 1122 crypto_free_tfm(crypto_hash_tfm(tfm)); 1123 } 1124 1125 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) 1126 { 1127 type &= ~CRYPTO_ALG_TYPE_MASK; 1128 mask &= ~CRYPTO_ALG_TYPE_MASK; 1129 type |= CRYPTO_ALG_TYPE_HASH; 1130 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1131 1132 return crypto_has_alg(alg_name, type, mask); 1133 } 1134 1135 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm) 1136 { 1137 return &crypto_hash_tfm(tfm)->crt_hash; 1138 } 1139 1140 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm) 1141 { 1142 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm)); 1143 } 1144 1145 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm) 1146 { 1147 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm)); 1148 } 1149 1150 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm) 1151 { 1152 return crypto_hash_crt(tfm)->digestsize; 1153 } 1154 1155 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm) 1156 { 1157 return crypto_tfm_get_flags(crypto_hash_tfm(tfm)); 1158 } 1159 1160 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags) 1161 { 1162 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags); 1163 } 1164 1165 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags) 1166 { 1167 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags); 1168 } 1169 1170 static inline int crypto_hash_init(struct hash_desc *desc) 1171 { 1172 return crypto_hash_crt(desc->tfm)->init(desc); 1173 } 1174 1175 static inline int crypto_hash_update(struct hash_desc *desc, 1176 struct scatterlist *sg, 1177 unsigned int nbytes) 1178 { 1179 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes); 1180 } 1181 1182 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out) 1183 { 1184 return crypto_hash_crt(desc->tfm)->final(desc, out); 1185 } 1186 1187 static inline int crypto_hash_digest(struct hash_desc *desc, 1188 struct scatterlist *sg, 1189 unsigned int nbytes, u8 *out) 1190 { 1191 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out); 1192 } 1193 1194 static inline int crypto_hash_setkey(struct crypto_hash *hash, 1195 const u8 *key, unsigned int keylen) 1196 { 1197 return crypto_hash_crt(hash)->setkey(hash, key, keylen); 1198 } 1199 1200 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 1201 { 1202 return (struct crypto_comp *)tfm; 1203 } 1204 1205 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm) 1206 { 1207 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) & 1208 CRYPTO_ALG_TYPE_MASK); 1209 return __crypto_comp_cast(tfm); 1210 } 1211 1212 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 1213 u32 type, u32 mask) 1214 { 1215 type &= ~CRYPTO_ALG_TYPE_MASK; 1216 type |= CRYPTO_ALG_TYPE_COMPRESS; 1217 mask |= CRYPTO_ALG_TYPE_MASK; 1218 1219 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 1220 } 1221 1222 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 1223 { 1224 return &tfm->base; 1225 } 1226 1227 static inline void crypto_free_comp(struct crypto_comp *tfm) 1228 { 1229 crypto_free_tfm(crypto_comp_tfm(tfm)); 1230 } 1231 1232 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 1233 { 1234 type &= ~CRYPTO_ALG_TYPE_MASK; 1235 type |= CRYPTO_ALG_TYPE_COMPRESS; 1236 mask |= CRYPTO_ALG_TYPE_MASK; 1237 1238 return crypto_has_alg(alg_name, type, mask); 1239 } 1240 1241 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 1242 { 1243 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 1244 } 1245 1246 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm) 1247 { 1248 return &crypto_comp_tfm(tfm)->crt_compress; 1249 } 1250 1251 static inline int crypto_comp_compress(struct crypto_comp *tfm, 1252 const u8 *src, unsigned int slen, 1253 u8 *dst, unsigned int *dlen) 1254 { 1255 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm), 1256 src, slen, dst, dlen); 1257 } 1258 1259 static inline int crypto_comp_decompress(struct crypto_comp *tfm, 1260 const u8 *src, unsigned int slen, 1261 u8 *dst, unsigned int *dlen) 1262 { 1263 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm), 1264 src, slen, dst, dlen); 1265 } 1266 1267 #endif /* _LINUX_CRYPTO_H */ 1268 1269