12874c5fdSThomas Gleixner // SPDX-License-Identifier: GPL-2.0-or-later
2735d37b5SBaolin Wang /*
3735d37b5SBaolin Wang * Handle async block request by crypto hardware engine.
4735d37b5SBaolin Wang *
5735d37b5SBaolin Wang * Copyright (C) 2016 Linaro, Inc.
6735d37b5SBaolin Wang *
7735d37b5SBaolin Wang * Author: Baolin Wang <[email protected]>
8735d37b5SBaolin Wang */
9735d37b5SBaolin Wang
10e5e7eb02SHerbert Xu #include <crypto/internal/aead.h>
11e5e7eb02SHerbert Xu #include <crypto/internal/akcipher.h>
1245c461c5SHerbert Xu #include <crypto/internal/engine.h>
13e5e7eb02SHerbert Xu #include <crypto/internal/hash.h>
14e5e7eb02SHerbert Xu #include <crypto/internal/kpp.h>
15e5e7eb02SHerbert Xu #include <crypto/internal/skcipher.h>
16735d37b5SBaolin Wang #include <linux/err.h>
17735d37b5SBaolin Wang #include <linux/delay.h>
180c3dc787SHerbert Xu #include <linux/device.h>
19e5e7eb02SHerbert Xu #include <linux/kernel.h>
20e5e7eb02SHerbert Xu #include <linux/module.h>
21ae7e81c0SIngo Molnar #include <uapi/linux/sched/types.h>
22735d37b5SBaolin Wang #include "internal.h"
23735d37b5SBaolin Wang
24735d37b5SBaolin Wang #define CRYPTO_ENGINE_MAX_QLEN 10
25735d37b5SBaolin Wang
26e5e7eb02SHerbert Xu /* Temporary algorithm flag used to indicate an updated driver. */
27e5e7eb02SHerbert Xu #define CRYPTO_ALG_ENGINE 0x200
28e5e7eb02SHerbert Xu
29e5e7eb02SHerbert Xu struct crypto_engine_alg {
30e5e7eb02SHerbert Xu struct crypto_alg base;
31e5e7eb02SHerbert Xu struct crypto_engine_op op;
32e5e7eb02SHerbert Xu };
33e5e7eb02SHerbert Xu
34735d37b5SBaolin Wang /**
35218d1cc1SCorentin LABBE * crypto_finalize_request - finalize one request if the request is done
36218d1cc1SCorentin LABBE * @engine: the hardware engine
37218d1cc1SCorentin LABBE * @req: the request need to be finalized
38218d1cc1SCorentin LABBE * @err: error number
39218d1cc1SCorentin LABBE */
crypto_finalize_request(struct crypto_engine * engine,struct crypto_async_request * req,int err)40218d1cc1SCorentin LABBE static void crypto_finalize_request(struct crypto_engine *engine,
41218d1cc1SCorentin LABBE struct crypto_async_request *req, int err)
42218d1cc1SCorentin LABBE {
43218d1cc1SCorentin LABBE unsigned long flags;
44218d1cc1SCorentin LABBE
456a89f492SIuliana Prodan /*
466a89f492SIuliana Prodan * If hardware cannot enqueue more requests
476a89f492SIuliana Prodan * and retry mechanism is not supported
486a89f492SIuliana Prodan * make sure we are completing the current request
496a89f492SIuliana Prodan */
506a89f492SIuliana Prodan if (!engine->retry_support) {
51218d1cc1SCorentin LABBE spin_lock_irqsave(&engine->queue_lock, flags);
526a89f492SIuliana Prodan if (engine->cur_req == req) {
536a89f492SIuliana Prodan engine->cur_req = NULL;
546a89f492SIuliana Prodan }
55218d1cc1SCorentin LABBE spin_unlock_irqrestore(&engine->queue_lock, flags);
566a89f492SIuliana Prodan }
57218d1cc1SCorentin LABBE
584058cf08SCorentin Labbe lockdep_assert_in_softirq();
596909823dSHerbert Xu crypto_request_complete(req, err);
60218d1cc1SCorentin LABBE
61218d1cc1SCorentin LABBE kthread_queue_work(engine->kworker, &engine->pump_requests);
62218d1cc1SCorentin LABBE }
63218d1cc1SCorentin LABBE
64218d1cc1SCorentin LABBE /**
65735d37b5SBaolin Wang * crypto_pump_requests - dequeue one request from engine queue to process
66735d37b5SBaolin Wang * @engine: the hardware engine
67735d37b5SBaolin Wang * @in_kthread: true if we are in the context of the request pump thread
68735d37b5SBaolin Wang *
69735d37b5SBaolin Wang * This function checks if there is any request in the engine queue that
70735d37b5SBaolin Wang * needs processing and if so call out to the driver to initialize hardware
71735d37b5SBaolin Wang * and handle each request.
72735d37b5SBaolin Wang */
crypto_pump_requests(struct crypto_engine * engine,bool in_kthread)73735d37b5SBaolin Wang static void crypto_pump_requests(struct crypto_engine *engine,
74735d37b5SBaolin Wang bool in_kthread)
75735d37b5SBaolin Wang {
76735d37b5SBaolin Wang struct crypto_async_request *async_req, *backlog;
77e5e7eb02SHerbert Xu struct crypto_engine_alg *alg;
78e5e7eb02SHerbert Xu struct crypto_engine_op *op;
79735d37b5SBaolin Wang unsigned long flags;
80735d37b5SBaolin Wang bool was_busy = false;
81218d1cc1SCorentin LABBE int ret;
82735d37b5SBaolin Wang
83735d37b5SBaolin Wang spin_lock_irqsave(&engine->queue_lock, flags);
84735d37b5SBaolin Wang
85735d37b5SBaolin Wang /* Make sure we are not already running a request */
866a89f492SIuliana Prodan if (!engine->retry_support && engine->cur_req)
87735d37b5SBaolin Wang goto out;
88735d37b5SBaolin Wang
89735d37b5SBaolin Wang /* If another context is idling then defer */
90735d37b5SBaolin Wang if (engine->idling) {
91c4ca2b0bSPetr Mladek kthread_queue_work(engine->kworker, &engine->pump_requests);
92735d37b5SBaolin Wang goto out;
93735d37b5SBaolin Wang }
94735d37b5SBaolin Wang
95735d37b5SBaolin Wang /* Check if the engine queue is idle */
96735d37b5SBaolin Wang if (!crypto_queue_len(&engine->queue) || !engine->running) {
97735d37b5SBaolin Wang if (!engine->busy)
98735d37b5SBaolin Wang goto out;
99735d37b5SBaolin Wang
100735d37b5SBaolin Wang /* Only do teardown in the thread */
101735d37b5SBaolin Wang if (!in_kthread) {
102c4ca2b0bSPetr Mladek kthread_queue_work(engine->kworker,
103735d37b5SBaolin Wang &engine->pump_requests);
104735d37b5SBaolin Wang goto out;
105735d37b5SBaolin Wang }
106735d37b5SBaolin Wang
107735d37b5SBaolin Wang engine->busy = false;
108735d37b5SBaolin Wang engine->idling = true;
109735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
110735d37b5SBaolin Wang
111735d37b5SBaolin Wang if (engine->unprepare_crypt_hardware &&
112735d37b5SBaolin Wang engine->unprepare_crypt_hardware(engine))
11388d58ef8SCorentin LABBE dev_err(engine->dev, "failed to unprepare crypt hardware\n");
114735d37b5SBaolin Wang
115735d37b5SBaolin Wang spin_lock_irqsave(&engine->queue_lock, flags);
116735d37b5SBaolin Wang engine->idling = false;
117735d37b5SBaolin Wang goto out;
118735d37b5SBaolin Wang }
119735d37b5SBaolin Wang
1206a89f492SIuliana Prodan start_request:
121735d37b5SBaolin Wang /* Get the fist request from the engine queue to handle */
122735d37b5SBaolin Wang backlog = crypto_get_backlog(&engine->queue);
123735d37b5SBaolin Wang async_req = crypto_dequeue_request(&engine->queue);
124735d37b5SBaolin Wang if (!async_req)
125735d37b5SBaolin Wang goto out;
126735d37b5SBaolin Wang
1276a89f492SIuliana Prodan /*
1286a89f492SIuliana Prodan * If hardware doesn't support the retry mechanism,
1296a89f492SIuliana Prodan * keep track of the request we are processing now.
1306a89f492SIuliana Prodan * We'll need it on completion (crypto_finalize_request).
1316a89f492SIuliana Prodan */
1326a89f492SIuliana Prodan if (!engine->retry_support)
1334cba7cf0SCorentin LABBE engine->cur_req = async_req;
1346a89f492SIuliana Prodan
135735d37b5SBaolin Wang if (engine->busy)
136735d37b5SBaolin Wang was_busy = true;
137735d37b5SBaolin Wang else
138735d37b5SBaolin Wang engine->busy = true;
139735d37b5SBaolin Wang
140735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
141735d37b5SBaolin Wang
142735d37b5SBaolin Wang /* Until here we get the request need to be encrypted successfully */
143735d37b5SBaolin Wang if (!was_busy && engine->prepare_crypt_hardware) {
144735d37b5SBaolin Wang ret = engine->prepare_crypt_hardware(engine);
145735d37b5SBaolin Wang if (ret) {
14688d58ef8SCorentin LABBE dev_err(engine->dev, "failed to prepare crypt hardware\n");
147bcd6e41dSHerbert Xu goto req_err_1;
148735d37b5SBaolin Wang }
149735d37b5SBaolin Wang }
150735d37b5SBaolin Wang
151e5e7eb02SHerbert Xu if (async_req->tfm->__crt_alg->cra_flags & CRYPTO_ALG_ENGINE) {
152e5e7eb02SHerbert Xu alg = container_of(async_req->tfm->__crt_alg,
153e5e7eb02SHerbert Xu struct crypto_engine_alg, base);
154e5e7eb02SHerbert Xu op = &alg->op;
155e5e7eb02SHerbert Xu } else {
156218d1cc1SCorentin LABBE dev_err(engine->dev, "failed to do request\n");
157218d1cc1SCorentin LABBE ret = -EINVAL;
1586a89f492SIuliana Prodan goto req_err_1;
159218d1cc1SCorentin LABBE }
160735d37b5SBaolin Wang
161e5e7eb02SHerbert Xu ret = op->do_one_request(engine, async_req);
1626a89f492SIuliana Prodan
1636a89f492SIuliana Prodan /* Request unsuccessfully executed by hardware */
1646a89f492SIuliana Prodan if (ret < 0) {
1656a89f492SIuliana Prodan /*
1666a89f492SIuliana Prodan * If hardware queue is full (-ENOSPC), requeue request
1676a89f492SIuliana Prodan * regardless of backlog flag.
1686a89f492SIuliana Prodan * Otherwise, unprepare and complete the request.
1696a89f492SIuliana Prodan */
1706a89f492SIuliana Prodan if (!engine->retry_support ||
171d1c72f6eSIuliana Prodan (ret != -ENOSPC)) {
1726a89f492SIuliana Prodan dev_err(engine->dev,
1736a89f492SIuliana Prodan "Failed to do one request from queue: %d\n",
1746a89f492SIuliana Prodan ret);
1756a89f492SIuliana Prodan goto req_err_1;
1766a89f492SIuliana Prodan }
1776a89f492SIuliana Prodan spin_lock_irqsave(&engine->queue_lock, flags);
1786a89f492SIuliana Prodan /*
1796a89f492SIuliana Prodan * If hardware was unable to execute request, enqueue it
1806a89f492SIuliana Prodan * back in front of crypto-engine queue, to keep the order
1816a89f492SIuliana Prodan * of requests.
1826a89f492SIuliana Prodan */
1836a89f492SIuliana Prodan crypto_enqueue_request_head(&engine->queue, async_req);
1846a89f492SIuliana Prodan
1856a89f492SIuliana Prodan kthread_queue_work(engine->kworker, &engine->pump_requests);
1866a89f492SIuliana Prodan goto out;
1876a89f492SIuliana Prodan }
1886a89f492SIuliana Prodan
1896a89f492SIuliana Prodan goto retry;
1906a89f492SIuliana Prodan
1916a89f492SIuliana Prodan req_err_1:
1926909823dSHerbert Xu crypto_request_complete(async_req, ret);
1936a89f492SIuliana Prodan
1946a89f492SIuliana Prodan retry:
1954140aafcSOlivier Bacon if (backlog)
1964140aafcSOlivier Bacon crypto_request_complete(backlog, -EINPROGRESS);
1974140aafcSOlivier Bacon
1986a89f492SIuliana Prodan /* If retry mechanism is supported, send new requests to engine */
1996a89f492SIuliana Prodan if (engine->retry_support) {
2006a89f492SIuliana Prodan spin_lock_irqsave(&engine->queue_lock, flags);
2016a89f492SIuliana Prodan goto start_request;
2026a89f492SIuliana Prodan }
203735d37b5SBaolin Wang return;
204735d37b5SBaolin Wang
205735d37b5SBaolin Wang out:
206735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
2078d908226SIuliana Prodan
2088d908226SIuliana Prodan /*
2098d908226SIuliana Prodan * Batch requests is possible only if
2108d908226SIuliana Prodan * hardware can enqueue multiple requests
2118d908226SIuliana Prodan */
2128d908226SIuliana Prodan if (engine->do_batch_requests) {
2138d908226SIuliana Prodan ret = engine->do_batch_requests(engine);
2148d908226SIuliana Prodan if (ret)
2158d908226SIuliana Prodan dev_err(engine->dev, "failed to do batch requests: %d\n",
2168d908226SIuliana Prodan ret);
2178d908226SIuliana Prodan }
2188d908226SIuliana Prodan
2196a89f492SIuliana Prodan return;
220735d37b5SBaolin Wang }
221735d37b5SBaolin Wang
crypto_pump_work(struct kthread_work * work)222735d37b5SBaolin Wang static void crypto_pump_work(struct kthread_work *work)
223735d37b5SBaolin Wang {
224735d37b5SBaolin Wang struct crypto_engine *engine =
225735d37b5SBaolin Wang container_of(work, struct crypto_engine, pump_requests);
226735d37b5SBaolin Wang
227735d37b5SBaolin Wang crypto_pump_requests(engine, true);
228735d37b5SBaolin Wang }
229735d37b5SBaolin Wang
230735d37b5SBaolin Wang /**
231218d1cc1SCorentin LABBE * crypto_transfer_request - transfer the new request into the engine queue
232735d37b5SBaolin Wang * @engine: the hardware engine
233735d37b5SBaolin Wang * @req: the request need to be listed into the engine queue
234d5db91d2SYang Li * @need_pump: indicates whether queue the pump of request to kthread_work
235735d37b5SBaolin Wang */
crypto_transfer_request(struct crypto_engine * engine,struct crypto_async_request * req,bool need_pump)236218d1cc1SCorentin LABBE static int crypto_transfer_request(struct crypto_engine *engine,
237218d1cc1SCorentin LABBE struct crypto_async_request *req,
2384cba7cf0SCorentin LABBE bool need_pump)
239735d37b5SBaolin Wang {
240735d37b5SBaolin Wang unsigned long flags;
241735d37b5SBaolin Wang int ret;
242735d37b5SBaolin Wang
243735d37b5SBaolin Wang spin_lock_irqsave(&engine->queue_lock, flags);
244735d37b5SBaolin Wang
245735d37b5SBaolin Wang if (!engine->running) {
246735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
247735d37b5SBaolin Wang return -ESHUTDOWN;
248735d37b5SBaolin Wang }
249735d37b5SBaolin Wang
250218d1cc1SCorentin LABBE ret = crypto_enqueue_request(&engine->queue, req);
251735d37b5SBaolin Wang
252735d37b5SBaolin Wang if (!engine->busy && need_pump)
253c4ca2b0bSPetr Mladek kthread_queue_work(engine->kworker, &engine->pump_requests);
254735d37b5SBaolin Wang
255735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
256735d37b5SBaolin Wang return ret;
257735d37b5SBaolin Wang }
258735d37b5SBaolin Wang
259735d37b5SBaolin Wang /**
260218d1cc1SCorentin LABBE * crypto_transfer_request_to_engine - transfer one request to list
2614cba7cf0SCorentin LABBE * into the engine queue
2624cba7cf0SCorentin LABBE * @engine: the hardware engine
2634cba7cf0SCorentin LABBE * @req: the request need to be listed into the engine queue
2644cba7cf0SCorentin LABBE */
crypto_transfer_request_to_engine(struct crypto_engine * engine,struct crypto_async_request * req)265218d1cc1SCorentin LABBE static int crypto_transfer_request_to_engine(struct crypto_engine *engine,
266218d1cc1SCorentin LABBE struct crypto_async_request *req)
267218d1cc1SCorentin LABBE {
268218d1cc1SCorentin LABBE return crypto_transfer_request(engine, req, true);
269218d1cc1SCorentin LABBE }
270218d1cc1SCorentin LABBE
271218d1cc1SCorentin LABBE /**
272218d1cc1SCorentin LABBE * crypto_transfer_aead_request_to_engine - transfer one aead_request
273218d1cc1SCorentin LABBE * to list into the engine queue
274735d37b5SBaolin Wang * @engine: the hardware engine
275735d37b5SBaolin Wang * @req: the request need to be listed into the engine queue
276735d37b5SBaolin Wang */
crypto_transfer_aead_request_to_engine(struct crypto_engine * engine,struct aead_request * req)277218d1cc1SCorentin LABBE int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
278218d1cc1SCorentin LABBE struct aead_request *req)
279735d37b5SBaolin Wang {
280218d1cc1SCorentin LABBE return crypto_transfer_request_to_engine(engine, &req->base);
281735d37b5SBaolin Wang }
282218d1cc1SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_transfer_aead_request_to_engine);
283735d37b5SBaolin Wang
284735d37b5SBaolin Wang /**
285218d1cc1SCorentin LABBE * crypto_transfer_akcipher_request_to_engine - transfer one akcipher_request
286218d1cc1SCorentin LABBE * to list into the engine queue
287218d1cc1SCorentin LABBE * @engine: the hardware engine
288218d1cc1SCorentin LABBE * @req: the request need to be listed into the engine queue
289218d1cc1SCorentin LABBE */
crypto_transfer_akcipher_request_to_engine(struct crypto_engine * engine,struct akcipher_request * req)290218d1cc1SCorentin LABBE int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
291218d1cc1SCorentin LABBE struct akcipher_request *req)
292218d1cc1SCorentin LABBE {
293218d1cc1SCorentin LABBE return crypto_transfer_request_to_engine(engine, &req->base);
294218d1cc1SCorentin LABBE }
295218d1cc1SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_transfer_akcipher_request_to_engine);
296218d1cc1SCorentin LABBE
297218d1cc1SCorentin LABBE /**
298218d1cc1SCorentin LABBE * crypto_transfer_hash_request_to_engine - transfer one ahash_request
299218d1cc1SCorentin LABBE * to list into the engine queue
3004cba7cf0SCorentin LABBE * @engine: the hardware engine
3014cba7cf0SCorentin LABBE * @req: the request need to be listed into the engine queue
3024cba7cf0SCorentin LABBE */
crypto_transfer_hash_request_to_engine(struct crypto_engine * engine,struct ahash_request * req)3034cba7cf0SCorentin LABBE int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
3044cba7cf0SCorentin LABBE struct ahash_request *req)
3054cba7cf0SCorentin LABBE {
306218d1cc1SCorentin LABBE return crypto_transfer_request_to_engine(engine, &req->base);
3074cba7cf0SCorentin LABBE }
3084cba7cf0SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_transfer_hash_request_to_engine);
3094cba7cf0SCorentin LABBE
3104cba7cf0SCorentin LABBE /**
3111730c5aaSPrabhjot Khurana * crypto_transfer_kpp_request_to_engine - transfer one kpp_request to list
3121730c5aaSPrabhjot Khurana * into the engine queue
3131730c5aaSPrabhjot Khurana * @engine: the hardware engine
3141730c5aaSPrabhjot Khurana * @req: the request need to be listed into the engine queue
3151730c5aaSPrabhjot Khurana */
crypto_transfer_kpp_request_to_engine(struct crypto_engine * engine,struct kpp_request * req)3161730c5aaSPrabhjot Khurana int crypto_transfer_kpp_request_to_engine(struct crypto_engine *engine,
3171730c5aaSPrabhjot Khurana struct kpp_request *req)
3181730c5aaSPrabhjot Khurana {
3191730c5aaSPrabhjot Khurana return crypto_transfer_request_to_engine(engine, &req->base);
3201730c5aaSPrabhjot Khurana }
3211730c5aaSPrabhjot Khurana EXPORT_SYMBOL_GPL(crypto_transfer_kpp_request_to_engine);
3221730c5aaSPrabhjot Khurana
3231730c5aaSPrabhjot Khurana /**
324218d1cc1SCorentin LABBE * crypto_transfer_skcipher_request_to_engine - transfer one skcipher_request
325218d1cc1SCorentin LABBE * to list into the engine queue
326218d1cc1SCorentin LABBE * @engine: the hardware engine
327218d1cc1SCorentin LABBE * @req: the request need to be listed into the engine queue
328218d1cc1SCorentin LABBE */
crypto_transfer_skcipher_request_to_engine(struct crypto_engine * engine,struct skcipher_request * req)329218d1cc1SCorentin LABBE int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
330218d1cc1SCorentin LABBE struct skcipher_request *req)
331218d1cc1SCorentin LABBE {
332218d1cc1SCorentin LABBE return crypto_transfer_request_to_engine(engine, &req->base);
333218d1cc1SCorentin LABBE }
334218d1cc1SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request_to_engine);
335218d1cc1SCorentin LABBE
336218d1cc1SCorentin LABBE /**
337218d1cc1SCorentin LABBE * crypto_finalize_aead_request - finalize one aead_request if
338218d1cc1SCorentin LABBE * the request is done
339735d37b5SBaolin Wang * @engine: the hardware engine
340735d37b5SBaolin Wang * @req: the request need to be finalized
341735d37b5SBaolin Wang * @err: error number
342735d37b5SBaolin Wang */
crypto_finalize_aead_request(struct crypto_engine * engine,struct aead_request * req,int err)343218d1cc1SCorentin LABBE void crypto_finalize_aead_request(struct crypto_engine *engine,
344218d1cc1SCorentin LABBE struct aead_request *req, int err)
345735d37b5SBaolin Wang {
346218d1cc1SCorentin LABBE return crypto_finalize_request(engine, &req->base, err);
347735d37b5SBaolin Wang }
348218d1cc1SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_finalize_aead_request);
3494cba7cf0SCorentin LABBE
3504cba7cf0SCorentin LABBE /**
351218d1cc1SCorentin LABBE * crypto_finalize_akcipher_request - finalize one akcipher_request if
352218d1cc1SCorentin LABBE * the request is done
353218d1cc1SCorentin LABBE * @engine: the hardware engine
354218d1cc1SCorentin LABBE * @req: the request need to be finalized
355218d1cc1SCorentin LABBE * @err: error number
356218d1cc1SCorentin LABBE */
crypto_finalize_akcipher_request(struct crypto_engine * engine,struct akcipher_request * req,int err)357218d1cc1SCorentin LABBE void crypto_finalize_akcipher_request(struct crypto_engine *engine,
358218d1cc1SCorentin LABBE struct akcipher_request *req, int err)
359218d1cc1SCorentin LABBE {
360218d1cc1SCorentin LABBE return crypto_finalize_request(engine, &req->base, err);
361218d1cc1SCorentin LABBE }
362218d1cc1SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_finalize_akcipher_request);
363218d1cc1SCorentin LABBE
364218d1cc1SCorentin LABBE /**
365218d1cc1SCorentin LABBE * crypto_finalize_hash_request - finalize one ahash_request if
366218d1cc1SCorentin LABBE * the request is done
3674cba7cf0SCorentin LABBE * @engine: the hardware engine
3684cba7cf0SCorentin LABBE * @req: the request need to be finalized
3694cba7cf0SCorentin LABBE * @err: error number
3704cba7cf0SCorentin LABBE */
crypto_finalize_hash_request(struct crypto_engine * engine,struct ahash_request * req,int err)3714cba7cf0SCorentin LABBE void crypto_finalize_hash_request(struct crypto_engine *engine,
3724cba7cf0SCorentin LABBE struct ahash_request *req, int err)
3734cba7cf0SCorentin LABBE {
374218d1cc1SCorentin LABBE return crypto_finalize_request(engine, &req->base, err);
3754cba7cf0SCorentin LABBE }
3764cba7cf0SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_finalize_hash_request);
377735d37b5SBaolin Wang
378735d37b5SBaolin Wang /**
3791730c5aaSPrabhjot Khurana * crypto_finalize_kpp_request - finalize one kpp_request if the request is done
3801730c5aaSPrabhjot Khurana * @engine: the hardware engine
3811730c5aaSPrabhjot Khurana * @req: the request need to be finalized
3821730c5aaSPrabhjot Khurana * @err: error number
3831730c5aaSPrabhjot Khurana */
crypto_finalize_kpp_request(struct crypto_engine * engine,struct kpp_request * req,int err)3841730c5aaSPrabhjot Khurana void crypto_finalize_kpp_request(struct crypto_engine *engine,
3851730c5aaSPrabhjot Khurana struct kpp_request *req, int err)
3861730c5aaSPrabhjot Khurana {
3871730c5aaSPrabhjot Khurana return crypto_finalize_request(engine, &req->base, err);
3881730c5aaSPrabhjot Khurana }
3891730c5aaSPrabhjot Khurana EXPORT_SYMBOL_GPL(crypto_finalize_kpp_request);
3901730c5aaSPrabhjot Khurana
3911730c5aaSPrabhjot Khurana /**
392218d1cc1SCorentin LABBE * crypto_finalize_skcipher_request - finalize one skcipher_request if
393218d1cc1SCorentin LABBE * the request is done
394218d1cc1SCorentin LABBE * @engine: the hardware engine
395218d1cc1SCorentin LABBE * @req: the request need to be finalized
396218d1cc1SCorentin LABBE * @err: error number
397218d1cc1SCorentin LABBE */
crypto_finalize_skcipher_request(struct crypto_engine * engine,struct skcipher_request * req,int err)398218d1cc1SCorentin LABBE void crypto_finalize_skcipher_request(struct crypto_engine *engine,
399218d1cc1SCorentin LABBE struct skcipher_request *req, int err)
400218d1cc1SCorentin LABBE {
401218d1cc1SCorentin LABBE return crypto_finalize_request(engine, &req->base, err);
402218d1cc1SCorentin LABBE }
403218d1cc1SCorentin LABBE EXPORT_SYMBOL_GPL(crypto_finalize_skcipher_request);
404218d1cc1SCorentin LABBE
405218d1cc1SCorentin LABBE /**
406735d37b5SBaolin Wang * crypto_engine_start - start the hardware engine
407735d37b5SBaolin Wang * @engine: the hardware engine need to be started
408735d37b5SBaolin Wang *
409735d37b5SBaolin Wang * Return 0 on success, else on fail.
410735d37b5SBaolin Wang */
crypto_engine_start(struct crypto_engine * engine)411735d37b5SBaolin Wang int crypto_engine_start(struct crypto_engine *engine)
412735d37b5SBaolin Wang {
413735d37b5SBaolin Wang unsigned long flags;
414735d37b5SBaolin Wang
415735d37b5SBaolin Wang spin_lock_irqsave(&engine->queue_lock, flags);
416735d37b5SBaolin Wang
417735d37b5SBaolin Wang if (engine->running || engine->busy) {
418735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
419735d37b5SBaolin Wang return -EBUSY;
420735d37b5SBaolin Wang }
421735d37b5SBaolin Wang
422735d37b5SBaolin Wang engine->running = true;
423735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
424735d37b5SBaolin Wang
425c4ca2b0bSPetr Mladek kthread_queue_work(engine->kworker, &engine->pump_requests);
426735d37b5SBaolin Wang
427735d37b5SBaolin Wang return 0;
428735d37b5SBaolin Wang }
429735d37b5SBaolin Wang EXPORT_SYMBOL_GPL(crypto_engine_start);
430735d37b5SBaolin Wang
431735d37b5SBaolin Wang /**
432735d37b5SBaolin Wang * crypto_engine_stop - stop the hardware engine
433735d37b5SBaolin Wang * @engine: the hardware engine need to be stopped
434735d37b5SBaolin Wang *
435735d37b5SBaolin Wang * Return 0 on success, else on fail.
436735d37b5SBaolin Wang */
crypto_engine_stop(struct crypto_engine * engine)437735d37b5SBaolin Wang int crypto_engine_stop(struct crypto_engine *engine)
438735d37b5SBaolin Wang {
439735d37b5SBaolin Wang unsigned long flags;
4404cba7cf0SCorentin LABBE unsigned int limit = 500;
441735d37b5SBaolin Wang int ret = 0;
442735d37b5SBaolin Wang
443735d37b5SBaolin Wang spin_lock_irqsave(&engine->queue_lock, flags);
444735d37b5SBaolin Wang
445735d37b5SBaolin Wang /*
446735d37b5SBaolin Wang * If the engine queue is not empty or the engine is on busy state,
447735d37b5SBaolin Wang * we need to wait for a while to pump the requests of engine queue.
448735d37b5SBaolin Wang */
449735d37b5SBaolin Wang while ((crypto_queue_len(&engine->queue) || engine->busy) && limit--) {
450735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
451735d37b5SBaolin Wang msleep(20);
452735d37b5SBaolin Wang spin_lock_irqsave(&engine->queue_lock, flags);
453735d37b5SBaolin Wang }
454735d37b5SBaolin Wang
455735d37b5SBaolin Wang if (crypto_queue_len(&engine->queue) || engine->busy)
456735d37b5SBaolin Wang ret = -EBUSY;
457735d37b5SBaolin Wang else
458735d37b5SBaolin Wang engine->running = false;
459735d37b5SBaolin Wang
460735d37b5SBaolin Wang spin_unlock_irqrestore(&engine->queue_lock, flags);
461735d37b5SBaolin Wang
462735d37b5SBaolin Wang if (ret)
46388d58ef8SCorentin LABBE dev_warn(engine->dev, "could not stop engine\n");
464735d37b5SBaolin Wang
465735d37b5SBaolin Wang return ret;
466735d37b5SBaolin Wang }
467735d37b5SBaolin Wang EXPORT_SYMBOL_GPL(crypto_engine_stop);
468735d37b5SBaolin Wang
469735d37b5SBaolin Wang /**
4706a89f492SIuliana Prodan * crypto_engine_alloc_init_and_set - allocate crypto hardware engine structure
4716a89f492SIuliana Prodan * and initialize it by setting the maximum number of entries in the software
4726a89f492SIuliana Prodan * crypto-engine queue.
473735d37b5SBaolin Wang * @dev: the device attached with one hardware engine
4746a89f492SIuliana Prodan * @retry_support: whether hardware has support for retry mechanism
47540a3af45SRandy Dunlap * @cbk_do_batch: pointer to a callback function to be invoked when executing
4768d908226SIuliana Prodan * a batch of requests.
4778d908226SIuliana Prodan * This has the form:
4788d908226SIuliana Prodan * callback(struct crypto_engine *engine)
4798d908226SIuliana Prodan * where:
48042a9a08bSHerbert Xu * engine: the crypto engine structure.
481735d37b5SBaolin Wang * @rt: whether this queue is set to run as a realtime task
4826a89f492SIuliana Prodan * @qlen: maximum size of the crypto-engine queue
483735d37b5SBaolin Wang *
484735d37b5SBaolin Wang * This must be called from context that can sleep.
485735d37b5SBaolin Wang * Return: the crypto engine structure on success, else NULL.
486735d37b5SBaolin Wang */
crypto_engine_alloc_init_and_set(struct device * dev,bool retry_support,int (* cbk_do_batch)(struct crypto_engine * engine),bool rt,int qlen)4876a89f492SIuliana Prodan struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev,
4886a89f492SIuliana Prodan bool retry_support,
4898d908226SIuliana Prodan int (*cbk_do_batch)(struct crypto_engine *engine),
4906a89f492SIuliana Prodan bool rt, int qlen)
491735d37b5SBaolin Wang {
492735d37b5SBaolin Wang struct crypto_engine *engine;
493735d37b5SBaolin Wang
494735d37b5SBaolin Wang if (!dev)
495735d37b5SBaolin Wang return NULL;
496735d37b5SBaolin Wang
497735d37b5SBaolin Wang engine = devm_kzalloc(dev, sizeof(*engine), GFP_KERNEL);
498735d37b5SBaolin Wang if (!engine)
499735d37b5SBaolin Wang return NULL;
500735d37b5SBaolin Wang
50188d58ef8SCorentin LABBE engine->dev = dev;
502735d37b5SBaolin Wang engine->rt = rt;
503735d37b5SBaolin Wang engine->running = false;
504735d37b5SBaolin Wang engine->busy = false;
505735d37b5SBaolin Wang engine->idling = false;
5066a89f492SIuliana Prodan engine->retry_support = retry_support;
507735d37b5SBaolin Wang engine->priv_data = dev;
5088d908226SIuliana Prodan /*
5098d908226SIuliana Prodan * Batch requests is possible only if
5108d908226SIuliana Prodan * hardware has support for retry mechanism.
5118d908226SIuliana Prodan */
5128d908226SIuliana Prodan engine->do_batch_requests = retry_support ? cbk_do_batch : NULL;
5138d908226SIuliana Prodan
514735d37b5SBaolin Wang snprintf(engine->name, sizeof(engine->name),
515735d37b5SBaolin Wang "%s-engine", dev_name(dev));
516735d37b5SBaolin Wang
5176a89f492SIuliana Prodan crypto_init_queue(&engine->queue, qlen);
518735d37b5SBaolin Wang spin_lock_init(&engine->queue_lock);
519735d37b5SBaolin Wang
520*b04e317bSFrederic Weisbecker engine->kworker = kthread_run_worker(0, "%s", engine->name);
521c4ca2b0bSPetr Mladek if (IS_ERR(engine->kworker)) {
522735d37b5SBaolin Wang dev_err(dev, "failed to create crypto request pump task\n");
523735d37b5SBaolin Wang return NULL;
524735d37b5SBaolin Wang }
5253989144fSPetr Mladek kthread_init_work(&engine->pump_requests, crypto_pump_work);
526735d37b5SBaolin Wang
527735d37b5SBaolin Wang if (engine->rt) {
528735d37b5SBaolin Wang dev_info(dev, "will run requests pump with realtime priority\n");
529dbc6d0d5SPeter Zijlstra sched_set_fifo(engine->kworker->task);
530735d37b5SBaolin Wang }
531735d37b5SBaolin Wang
532735d37b5SBaolin Wang return engine;
533735d37b5SBaolin Wang }
5346a89f492SIuliana Prodan EXPORT_SYMBOL_GPL(crypto_engine_alloc_init_and_set);
5356a89f492SIuliana Prodan
5366a89f492SIuliana Prodan /**
5376a89f492SIuliana Prodan * crypto_engine_alloc_init - allocate crypto hardware engine structure and
5386a89f492SIuliana Prodan * initialize it.
5396a89f492SIuliana Prodan * @dev: the device attached with one hardware engine
5406a89f492SIuliana Prodan * @rt: whether this queue is set to run as a realtime task
5416a89f492SIuliana Prodan *
5426a89f492SIuliana Prodan * This must be called from context that can sleep.
5436a89f492SIuliana Prodan * Return: the crypto engine structure on success, else NULL.
5446a89f492SIuliana Prodan */
crypto_engine_alloc_init(struct device * dev,bool rt)5456a89f492SIuliana Prodan struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt)
5466a89f492SIuliana Prodan {
5478d908226SIuliana Prodan return crypto_engine_alloc_init_and_set(dev, false, NULL, rt,
5486a89f492SIuliana Prodan CRYPTO_ENGINE_MAX_QLEN);
5496a89f492SIuliana Prodan }
550735d37b5SBaolin Wang EXPORT_SYMBOL_GPL(crypto_engine_alloc_init);
551735d37b5SBaolin Wang
552735d37b5SBaolin Wang /**
553735d37b5SBaolin Wang * crypto_engine_exit - free the resources of hardware engine when exit
554735d37b5SBaolin Wang * @engine: the hardware engine need to be freed
555735d37b5SBaolin Wang */
crypto_engine_exit(struct crypto_engine * engine)5565ec12f1cSUwe Kleine-König void crypto_engine_exit(struct crypto_engine *engine)
557735d37b5SBaolin Wang {
558735d37b5SBaolin Wang int ret;
559735d37b5SBaolin Wang
560735d37b5SBaolin Wang ret = crypto_engine_stop(engine);
561735d37b5SBaolin Wang if (ret)
5625ec12f1cSUwe Kleine-König return;
563735d37b5SBaolin Wang
564c4ca2b0bSPetr Mladek kthread_destroy_worker(engine->kworker);
565735d37b5SBaolin Wang }
566735d37b5SBaolin Wang EXPORT_SYMBOL_GPL(crypto_engine_exit);
567735d37b5SBaolin Wang
crypto_engine_register_aead(struct aead_engine_alg * alg)568e5e7eb02SHerbert Xu int crypto_engine_register_aead(struct aead_engine_alg *alg)
569e5e7eb02SHerbert Xu {
570e5e7eb02SHerbert Xu if (!alg->op.do_one_request)
571e5e7eb02SHerbert Xu return -EINVAL;
572e5e7eb02SHerbert Xu
573e5e7eb02SHerbert Xu alg->base.base.cra_flags |= CRYPTO_ALG_ENGINE;
574e5e7eb02SHerbert Xu
575e5e7eb02SHerbert Xu return crypto_register_aead(&alg->base);
576e5e7eb02SHerbert Xu }
577e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_aead);
578e5e7eb02SHerbert Xu
crypto_engine_unregister_aead(struct aead_engine_alg * alg)579e5e7eb02SHerbert Xu void crypto_engine_unregister_aead(struct aead_engine_alg *alg)
580e5e7eb02SHerbert Xu {
581e5e7eb02SHerbert Xu crypto_unregister_aead(&alg->base);
582e5e7eb02SHerbert Xu }
583e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_aead);
584e5e7eb02SHerbert Xu
crypto_engine_register_aeads(struct aead_engine_alg * algs,int count)585e5e7eb02SHerbert Xu int crypto_engine_register_aeads(struct aead_engine_alg *algs, int count)
586e5e7eb02SHerbert Xu {
587e5e7eb02SHerbert Xu int i, ret;
588e5e7eb02SHerbert Xu
589e5e7eb02SHerbert Xu for (i = 0; i < count; i++) {
590e5e7eb02SHerbert Xu ret = crypto_engine_register_aead(&algs[i]);
591e5e7eb02SHerbert Xu if (ret)
592e5e7eb02SHerbert Xu goto err;
593e5e7eb02SHerbert Xu }
594e5e7eb02SHerbert Xu
595e5e7eb02SHerbert Xu return 0;
596e5e7eb02SHerbert Xu
597e5e7eb02SHerbert Xu err:
598e5e7eb02SHerbert Xu crypto_engine_unregister_aeads(algs, i);
599e5e7eb02SHerbert Xu
600e5e7eb02SHerbert Xu return ret;
601e5e7eb02SHerbert Xu }
602e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_aeads);
603e5e7eb02SHerbert Xu
crypto_engine_unregister_aeads(struct aead_engine_alg * algs,int count)604e5e7eb02SHerbert Xu void crypto_engine_unregister_aeads(struct aead_engine_alg *algs, int count)
605e5e7eb02SHerbert Xu {
606e5e7eb02SHerbert Xu int i;
607e5e7eb02SHerbert Xu
608e5e7eb02SHerbert Xu for (i = count - 1; i >= 0; --i)
609e5e7eb02SHerbert Xu crypto_engine_unregister_aead(&algs[i]);
610e5e7eb02SHerbert Xu }
611e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_aeads);
612e5e7eb02SHerbert Xu
crypto_engine_register_ahash(struct ahash_engine_alg * alg)613e5e7eb02SHerbert Xu int crypto_engine_register_ahash(struct ahash_engine_alg *alg)
614e5e7eb02SHerbert Xu {
615e5e7eb02SHerbert Xu if (!alg->op.do_one_request)
616e5e7eb02SHerbert Xu return -EINVAL;
617e5e7eb02SHerbert Xu
618e5e7eb02SHerbert Xu alg->base.halg.base.cra_flags |= CRYPTO_ALG_ENGINE;
619e5e7eb02SHerbert Xu
620e5e7eb02SHerbert Xu return crypto_register_ahash(&alg->base);
621e5e7eb02SHerbert Xu }
622e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_ahash);
623e5e7eb02SHerbert Xu
crypto_engine_unregister_ahash(struct ahash_engine_alg * alg)624e5e7eb02SHerbert Xu void crypto_engine_unregister_ahash(struct ahash_engine_alg *alg)
625e5e7eb02SHerbert Xu {
626e5e7eb02SHerbert Xu crypto_unregister_ahash(&alg->base);
627e5e7eb02SHerbert Xu }
628e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_ahash);
629e5e7eb02SHerbert Xu
crypto_engine_register_ahashes(struct ahash_engine_alg * algs,int count)630e5e7eb02SHerbert Xu int crypto_engine_register_ahashes(struct ahash_engine_alg *algs, int count)
631e5e7eb02SHerbert Xu {
632e5e7eb02SHerbert Xu int i, ret;
633e5e7eb02SHerbert Xu
634e5e7eb02SHerbert Xu for (i = 0; i < count; i++) {
635e5e7eb02SHerbert Xu ret = crypto_engine_register_ahash(&algs[i]);
636e5e7eb02SHerbert Xu if (ret)
637e5e7eb02SHerbert Xu goto err;
638e5e7eb02SHerbert Xu }
639e5e7eb02SHerbert Xu
640e5e7eb02SHerbert Xu return 0;
641e5e7eb02SHerbert Xu
642e5e7eb02SHerbert Xu err:
643e5e7eb02SHerbert Xu crypto_engine_unregister_ahashes(algs, i);
644e5e7eb02SHerbert Xu
645e5e7eb02SHerbert Xu return ret;
646e5e7eb02SHerbert Xu }
647e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_ahashes);
648e5e7eb02SHerbert Xu
crypto_engine_unregister_ahashes(struct ahash_engine_alg * algs,int count)649e5e7eb02SHerbert Xu void crypto_engine_unregister_ahashes(struct ahash_engine_alg *algs,
650e5e7eb02SHerbert Xu int count)
651e5e7eb02SHerbert Xu {
652e5e7eb02SHerbert Xu int i;
653e5e7eb02SHerbert Xu
654e5e7eb02SHerbert Xu for (i = count - 1; i >= 0; --i)
655e5e7eb02SHerbert Xu crypto_engine_unregister_ahash(&algs[i]);
656e5e7eb02SHerbert Xu }
657e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_ahashes);
658e5e7eb02SHerbert Xu
crypto_engine_register_akcipher(struct akcipher_engine_alg * alg)659e5e7eb02SHerbert Xu int crypto_engine_register_akcipher(struct akcipher_engine_alg *alg)
660e5e7eb02SHerbert Xu {
661e5e7eb02SHerbert Xu if (!alg->op.do_one_request)
662e5e7eb02SHerbert Xu return -EINVAL;
663e5e7eb02SHerbert Xu
664e5e7eb02SHerbert Xu alg->base.base.cra_flags |= CRYPTO_ALG_ENGINE;
665e5e7eb02SHerbert Xu
666e5e7eb02SHerbert Xu return crypto_register_akcipher(&alg->base);
667e5e7eb02SHerbert Xu }
668e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_akcipher);
669e5e7eb02SHerbert Xu
crypto_engine_unregister_akcipher(struct akcipher_engine_alg * alg)670e5e7eb02SHerbert Xu void crypto_engine_unregister_akcipher(struct akcipher_engine_alg *alg)
671e5e7eb02SHerbert Xu {
672e5e7eb02SHerbert Xu crypto_unregister_akcipher(&alg->base);
673e5e7eb02SHerbert Xu }
674e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_akcipher);
675e5e7eb02SHerbert Xu
crypto_engine_register_kpp(struct kpp_engine_alg * alg)676e5e7eb02SHerbert Xu int crypto_engine_register_kpp(struct kpp_engine_alg *alg)
677e5e7eb02SHerbert Xu {
678e5e7eb02SHerbert Xu if (!alg->op.do_one_request)
679e5e7eb02SHerbert Xu return -EINVAL;
680e5e7eb02SHerbert Xu
681e5e7eb02SHerbert Xu alg->base.base.cra_flags |= CRYPTO_ALG_ENGINE;
682e5e7eb02SHerbert Xu
683e5e7eb02SHerbert Xu return crypto_register_kpp(&alg->base);
684e5e7eb02SHerbert Xu }
685e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_kpp);
686e5e7eb02SHerbert Xu
crypto_engine_unregister_kpp(struct kpp_engine_alg * alg)687e5e7eb02SHerbert Xu void crypto_engine_unregister_kpp(struct kpp_engine_alg *alg)
688e5e7eb02SHerbert Xu {
689e5e7eb02SHerbert Xu crypto_unregister_kpp(&alg->base);
690e5e7eb02SHerbert Xu }
691e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_kpp);
692e5e7eb02SHerbert Xu
crypto_engine_register_skcipher(struct skcipher_engine_alg * alg)693e5e7eb02SHerbert Xu int crypto_engine_register_skcipher(struct skcipher_engine_alg *alg)
694e5e7eb02SHerbert Xu {
695e5e7eb02SHerbert Xu if (!alg->op.do_one_request)
696e5e7eb02SHerbert Xu return -EINVAL;
697e5e7eb02SHerbert Xu
698e5e7eb02SHerbert Xu alg->base.base.cra_flags |= CRYPTO_ALG_ENGINE;
699e5e7eb02SHerbert Xu
700e5e7eb02SHerbert Xu return crypto_register_skcipher(&alg->base);
701e5e7eb02SHerbert Xu }
702e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_skcipher);
703e5e7eb02SHerbert Xu
crypto_engine_unregister_skcipher(struct skcipher_engine_alg * alg)704e5e7eb02SHerbert Xu void crypto_engine_unregister_skcipher(struct skcipher_engine_alg *alg)
705e5e7eb02SHerbert Xu {
706e5e7eb02SHerbert Xu return crypto_unregister_skcipher(&alg->base);
707e5e7eb02SHerbert Xu }
708e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_skcipher);
709e5e7eb02SHerbert Xu
crypto_engine_register_skciphers(struct skcipher_engine_alg * algs,int count)710e5e7eb02SHerbert Xu int crypto_engine_register_skciphers(struct skcipher_engine_alg *algs,
711e5e7eb02SHerbert Xu int count)
712e5e7eb02SHerbert Xu {
713e5e7eb02SHerbert Xu int i, ret;
714e5e7eb02SHerbert Xu
715e5e7eb02SHerbert Xu for (i = 0; i < count; i++) {
716e5e7eb02SHerbert Xu ret = crypto_engine_register_skcipher(&algs[i]);
717e5e7eb02SHerbert Xu if (ret)
718e5e7eb02SHerbert Xu goto err;
719e5e7eb02SHerbert Xu }
720e5e7eb02SHerbert Xu
721e5e7eb02SHerbert Xu return 0;
722e5e7eb02SHerbert Xu
723e5e7eb02SHerbert Xu err:
724e5e7eb02SHerbert Xu crypto_engine_unregister_skciphers(algs, i);
725e5e7eb02SHerbert Xu
726e5e7eb02SHerbert Xu return ret;
727e5e7eb02SHerbert Xu }
728e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_register_skciphers);
729e5e7eb02SHerbert Xu
crypto_engine_unregister_skciphers(struct skcipher_engine_alg * algs,int count)730e5e7eb02SHerbert Xu void crypto_engine_unregister_skciphers(struct skcipher_engine_alg *algs,
731e5e7eb02SHerbert Xu int count)
732e5e7eb02SHerbert Xu {
733e5e7eb02SHerbert Xu int i;
734e5e7eb02SHerbert Xu
735e5e7eb02SHerbert Xu for (i = count - 1; i >= 0; --i)
736e5e7eb02SHerbert Xu crypto_engine_unregister_skcipher(&algs[i]);
737e5e7eb02SHerbert Xu }
738e5e7eb02SHerbert Xu EXPORT_SYMBOL_GPL(crypto_engine_unregister_skciphers);
739e5e7eb02SHerbert Xu
740735d37b5SBaolin Wang MODULE_LICENSE("GPL");
741735d37b5SBaolin Wang MODULE_DESCRIPTION("Crypto hardware engine framework");
742