1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */ 21cac2cbcSHuang Ying /* 31cac2cbcSHuang Ying * Software async crypto daemon 4298c926cSAdrian Hoban * 5298c926cSAdrian Hoban * Added AEAD support to cryptd. 6298c926cSAdrian Hoban * Authors: Tadeusz Struk ([email protected]) 7298c926cSAdrian Hoban * Adrian Hoban <[email protected]> 8298c926cSAdrian Hoban * Gabriele Paoloni <[email protected]> 9298c926cSAdrian Hoban * Aidan O'Mahony ([email protected]) 10298c926cSAdrian Hoban * Copyright (c) 2010, Intel Corporation. 111cac2cbcSHuang Ying */ 121cac2cbcSHuang Ying 131cac2cbcSHuang Ying #ifndef _CRYPTO_CRYPT_H 141cac2cbcSHuang Ying #define _CRYPTO_CRYPT_H 151cac2cbcSHuang Ying 16*244d22ffSAndy Shevchenko #include <linux/types.h> 17*244d22ffSAndy Shevchenko 1853033d4dSHerbert Xu #include <crypto/aead.h> 19ace13663SHuang Ying #include <crypto/hash.h> 204e0958d1SHerbert Xu #include <crypto/skcipher.h> 211cac2cbcSHuang Ying 224e0958d1SHerbert Xu struct cryptd_skcipher { 234e0958d1SHerbert Xu struct crypto_skcipher base; 244e0958d1SHerbert Xu }; 254e0958d1SHerbert Xu 260a877e35SEric Biggers /* alg_name should be algorithm to be cryptd-ed */ 274e0958d1SHerbert Xu struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, 284e0958d1SHerbert Xu u32 type, u32 mask); 294e0958d1SHerbert Xu struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm); 304e0958d1SHerbert Xu /* Must be called without moving CPUs. */ 314e0958d1SHerbert Xu bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm); 324e0958d1SHerbert Xu void cryptd_free_skcipher(struct cryptd_skcipher *tfm); 334e0958d1SHerbert Xu 34ace13663SHuang Ying struct cryptd_ahash { 35ace13663SHuang Ying struct crypto_ahash base; 36ace13663SHuang Ying }; 37ace13663SHuang Ying __cryptd_ahash_cast(struct crypto_ahash * tfm)38ace13663SHuang Yingstatic inline struct cryptd_ahash *__cryptd_ahash_cast( 39ace13663SHuang Ying struct crypto_ahash *tfm) 40ace13663SHuang Ying { 41ace13663SHuang Ying return (struct cryptd_ahash *)tfm; 42ace13663SHuang Ying } 43ace13663SHuang Ying 44ace13663SHuang Ying /* alg_name should be algorithm to be cryptd-ed */ 45ace13663SHuang Ying struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, 46ace13663SHuang Ying u32 type, u32 mask); 47ace13663SHuang Ying struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); 480e1227d3SHuang Ying struct shash_desc *cryptd_shash_desc(struct ahash_request *req); 4981760ea6SHerbert Xu /* Must be called without moving CPUs. */ 5081760ea6SHerbert Xu bool cryptd_ahash_queued(struct cryptd_ahash *tfm); 51ace13663SHuang Ying void cryptd_free_ahash(struct cryptd_ahash *tfm); 52ace13663SHuang Ying 53298c926cSAdrian Hoban struct cryptd_aead { 54298c926cSAdrian Hoban struct crypto_aead base; 55298c926cSAdrian Hoban }; 56298c926cSAdrian Hoban __cryptd_aead_cast(struct crypto_aead * tfm)57298c926cSAdrian Hobanstatic inline struct cryptd_aead *__cryptd_aead_cast( 58298c926cSAdrian Hoban struct crypto_aead *tfm) 59298c926cSAdrian Hoban { 60298c926cSAdrian Hoban return (struct cryptd_aead *)tfm; 61298c926cSAdrian Hoban } 62298c926cSAdrian Hoban 63298c926cSAdrian Hoban struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, 64298c926cSAdrian Hoban u32 type, u32 mask); 65298c926cSAdrian Hoban 66298c926cSAdrian Hoban struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm); 6781760ea6SHerbert Xu /* Must be called without moving CPUs. */ 6881760ea6SHerbert Xu bool cryptd_aead_queued(struct cryptd_aead *tfm); 69298c926cSAdrian Hoban 70298c926cSAdrian Hoban void cryptd_free_aead(struct cryptd_aead *tfm); 71298c926cSAdrian Hoban 721cac2cbcSHuang Ying #endif 73