xref: /linux-6.15/include/crypto/engine.h (revision 2d6a79cc)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Crypto engine API
4  *
5  * Copyright (c) 2016 Baolin Wang <[email protected]>
6  */
7 #ifndef _CRYPTO_ENGINE_H
8 #define _CRYPTO_ENGINE_H
9 
10 #include <crypto/algapi.h>
11 #include <linux/kthread.h>
12 #include <linux/spinlock_types.h>
13 #include <linux/types.h>
14 
15 struct aead_request;
16 struct ahash_request;
17 struct akcipher_request;
18 struct device;
19 struct kpp_request;
20 struct skcipher_request;
21 
22 #define ENGINE_NAME_LEN	30
23 /*
24  * struct crypto_engine - crypto hardware engine
25  * @name: the engine name
26  * @idling: the engine is entering idle state
27  * @busy: request pump is busy
28  * @running: the engine is on working
29  * @retry_support: indication that the hardware allows re-execution
30  * of a failed backlog request
31  * crypto-engine, in head position to keep order
32  * @list: link with the global crypto engine list
33  * @queue_lock: spinlock to synchronise access to request queue
34  * @queue: the crypto queue of the engine
35  * @rt: whether this queue is set to run as a realtime task
36  * @prepare_crypt_hardware: a request will soon arrive from the queue
37  * so the subsystem requests the driver to prepare the hardware
38  * by issuing this call
39  * @unprepare_crypt_hardware: there are currently no more requests on the
40  * queue so the subsystem notifies the driver that it may relax the
41  * hardware by issuing this call
42  * @do_batch_requests: execute a batch of requests. Depends on multiple
43  * requests support.
44  * @kworker: kthread worker struct for request pump
45  * @pump_requests: work struct for scheduling work to the request pump
46  * @priv_data: the engine private data
47  * @cur_req: the current request which is on processing
48  */
49 struct crypto_engine {
50 	char			name[ENGINE_NAME_LEN];
51 	bool			idling;
52 	bool			busy;
53 	bool			running;
54 
55 	bool			retry_support;
56 
57 	struct list_head	list;
58 	spinlock_t		queue_lock;
59 	struct crypto_queue	queue;
60 	struct device		*dev;
61 
62 	bool			rt;
63 
64 	int (*prepare_crypt_hardware)(struct crypto_engine *engine);
65 	int (*unprepare_crypt_hardware)(struct crypto_engine *engine);
66 	int (*do_batch_requests)(struct crypto_engine *engine);
67 
68 
69 	struct kthread_worker           *kworker;
70 	struct kthread_work             pump_requests;
71 
72 	void				*priv_data;
73 	struct crypto_async_request	*cur_req;
74 };
75 
76 /*
77  * struct crypto_engine_op - crypto hardware engine operations
78  * @do_one_request: do encryption for current request
79  */
80 struct crypto_engine_op {
81 	int (*do_one_request)(struct crypto_engine *engine,
82 			      void *areq);
83 };
84 
85 struct crypto_engine_ctx {
86 	struct crypto_engine_op op;
87 };
88 
89 int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
90 					   struct aead_request *req);
91 int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
92 					       struct akcipher_request *req);
93 int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
94 					       struct ahash_request *req);
95 int crypto_transfer_kpp_request_to_engine(struct crypto_engine *engine,
96 					  struct kpp_request *req);
97 int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
98 					       struct skcipher_request *req);
99 void crypto_finalize_aead_request(struct crypto_engine *engine,
100 				  struct aead_request *req, int err);
101 void crypto_finalize_akcipher_request(struct crypto_engine *engine,
102 				      struct akcipher_request *req, int err);
103 void crypto_finalize_hash_request(struct crypto_engine *engine,
104 				  struct ahash_request *req, int err);
105 void crypto_finalize_kpp_request(struct crypto_engine *engine,
106 				 struct kpp_request *req, int err);
107 void crypto_finalize_skcipher_request(struct crypto_engine *engine,
108 				      struct skcipher_request *req, int err);
109 int crypto_engine_start(struct crypto_engine *engine);
110 int crypto_engine_stop(struct crypto_engine *engine);
111 struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt);
112 struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev,
113 						       bool retry_support,
114 						       int (*cbk_do_batch)(struct crypto_engine *engine),
115 						       bool rt, int qlen);
116 int crypto_engine_exit(struct crypto_engine *engine);
117 
118 #endif /* _CRYPTO_ENGINE_H */
119