1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
3 */
4
5 #include <dirent.h>
6 #include <fcntl.h>
7 #include <stdio.h>
8 #include <string.h>
9 #include <sys/mman.h>
10 #include <sys/queue.h>
11 #include <sys/types.h>
12 #include <unistd.h>
13 #include <openssl/sha.h>
14 #include <openssl/cmac.h> /*sub key apis*/
15 #include <openssl/evp.h> /*sub key apis*/
16
17 #include <rte_hexdump.h>
18 #include <rte_memzone.h>
19 #include <rte_malloc.h>
20 #include <rte_memory.h>
21 #include <rte_spinlock.h>
22 #include <rte_string_fns.h>
23 #include <cryptodev_pmd.h>
24
25 #include "ccp_dev.h"
26 #include "ccp_crypto.h"
27 #include "ccp_pci.h"
28 #include "ccp_pmd_private.h"
29
30 #include <openssl/conf.h>
31 #include <openssl/err.h>
32 #include <openssl/hmac.h>
33
34 extern int iommu_mode;
35 void *sha_ctx;
36 /* SHA initial context values */
37 uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
38 SHA1_H4, SHA1_H3,
39 SHA1_H2, SHA1_H1,
40 SHA1_H0, 0x0U,
41 0x0U, 0x0U,
42 };
43
44 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
45 SHA224_H7, SHA224_H6,
46 SHA224_H5, SHA224_H4,
47 SHA224_H3, SHA224_H2,
48 SHA224_H1, SHA224_H0,
49 };
50
51 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
52 SHA256_H7, SHA256_H6,
53 SHA256_H5, SHA256_H4,
54 SHA256_H3, SHA256_H2,
55 SHA256_H1, SHA256_H0,
56 };
57
58 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
59 SHA384_H7, SHA384_H6,
60 SHA384_H5, SHA384_H4,
61 SHA384_H3, SHA384_H2,
62 SHA384_H1, SHA384_H0,
63 };
64
65 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
66 SHA512_H7, SHA512_H6,
67 SHA512_H5, SHA512_H4,
68 SHA512_H3, SHA512_H2,
69 SHA512_H1, SHA512_H0,
70 };
71
72 #if defined(_MSC_VER)
73 #define SHA3_CONST(x) x
74 #else
75 #define SHA3_CONST(x) x##L
76 #endif
77
78 /** 'Words' here refers to uint64_t */
79 #define SHA3_KECCAK_SPONGE_WORDS \
80 (((1600) / 8) / sizeof(uint64_t))
81 typedef struct sha3_context_ {
82 uint64_t saved;
83 /**
84 * The portion of the input message that we
85 * didn't consume yet
86 */
87 union {
88 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
89 /* Keccak's state */
90 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
91 /**total 200 ctx size**/
92 };
93 unsigned int byteIndex;
94 /**
95 * 0..7--the next byte after the set one
96 * (starts from 0; 0--none are buffered)
97 */
98 unsigned int wordIndex;
99 /**
100 * 0..24--the next word to integrate input
101 * (starts from 0)
102 */
103 unsigned int capacityWords;
104 /**
105 * the double size of the hash output in
106 * words (e.g. 16 for Keccak 512)
107 */
108 } sha3_context;
109
110 #ifndef SHA3_ROTL64
111 #define SHA3_ROTL64(x, y) \
112 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
113 #endif
114
115 static const uint64_t keccakf_rndc[24] = {
116 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
117 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
118 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
119 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
120 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
121 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
122 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
123 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
124 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
125 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
126 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
127 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
128 };
129
130 static const unsigned int keccakf_rotc[24] = {
131 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
132 18, 39, 61, 20, 44
133 };
134
135 static const unsigned int keccakf_piln[24] = {
136 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
137 14, 22, 9, 6, 1
138 };
139
140 static enum ccp_cmd_order
ccp_get_cmd_id(const struct rte_crypto_sym_xform * xform)141 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
142 {
143 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
144
145 if (xform == NULL)
146 return res;
147 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
148 if (xform->next == NULL)
149 return CCP_CMD_AUTH;
150 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
151 return CCP_CMD_HASH_CIPHER;
152 }
153 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
154 if (xform->next == NULL)
155 return CCP_CMD_CIPHER;
156 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
157 return CCP_CMD_CIPHER_HASH;
158 }
159 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
160 return CCP_CMD_COMBINED;
161 return res;
162 }
163
164 /* partial hash using openssl */
partial_hash_sha1(uint8_t * data_in,uint8_t * data_out)165 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
166 {
167 SHA_CTX ctx;
168
169 if (!SHA1_Init(&ctx))
170 return -EFAULT;
171 SHA1_Transform(&ctx, data_in);
172 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
173 return 0;
174 }
175
partial_hash_sha224(uint8_t * data_in,uint8_t * data_out)176 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
177 {
178 SHA256_CTX ctx;
179
180 if (!SHA224_Init(&ctx))
181 return -EFAULT;
182 SHA256_Transform(&ctx, data_in);
183 rte_memcpy(data_out, &ctx,
184 SHA256_DIGEST_LENGTH);
185 return 0;
186 }
187
partial_hash_sha256(uint8_t * data_in,uint8_t * data_out)188 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
189 {
190 SHA256_CTX ctx;
191
192 if (!SHA256_Init(&ctx))
193 return -EFAULT;
194 SHA256_Transform(&ctx, data_in);
195 rte_memcpy(data_out, &ctx,
196 SHA256_DIGEST_LENGTH);
197 return 0;
198 }
199
partial_hash_sha384(uint8_t * data_in,uint8_t * data_out)200 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
201 {
202 SHA512_CTX ctx;
203
204 if (!SHA384_Init(&ctx))
205 return -EFAULT;
206 SHA512_Transform(&ctx, data_in);
207 rte_memcpy(data_out, &ctx,
208 SHA512_DIGEST_LENGTH);
209 return 0;
210 }
211
partial_hash_sha512(uint8_t * data_in,uint8_t * data_out)212 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
213 {
214 SHA512_CTX ctx;
215
216 if (!SHA512_Init(&ctx))
217 return -EFAULT;
218 SHA512_Transform(&ctx, data_in);
219 rte_memcpy(data_out, &ctx,
220 SHA512_DIGEST_LENGTH);
221 return 0;
222 }
223
224 static void
keccakf(uint64_t s[25])225 keccakf(uint64_t s[25])
226 {
227 int i, j, round;
228 uint64_t t, bc[5];
229 #define KECCAK_ROUNDS 24
230
231 for (round = 0; round < KECCAK_ROUNDS; round++) {
232
233 /* Theta */
234 for (i = 0; i < 5; i++)
235 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
236 s[i + 20];
237
238 for (i = 0; i < 5; i++) {
239 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
240 for (j = 0; j < 25; j += 5)
241 s[j + i] ^= t;
242 }
243
244 /* Rho Pi */
245 t = s[1];
246 for (i = 0; i < 24; i++) {
247 j = keccakf_piln[i];
248 bc[0] = s[j];
249 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
250 t = bc[0];
251 }
252
253 /* Chi */
254 for (j = 0; j < 25; j += 5) {
255 for (i = 0; i < 5; i++)
256 bc[i] = s[j + i];
257 for (i = 0; i < 5; i++)
258 s[j + i] ^= (~bc[(i + 1) % 5]) &
259 bc[(i + 2) % 5];
260 }
261
262 /* Iota */
263 s[0] ^= keccakf_rndc[round];
264 }
265 }
266
267 static void
sha3_Init224(void * priv)268 sha3_Init224(void *priv)
269 {
270 sha3_context *ctx = (sha3_context *) priv;
271
272 memset(ctx, 0, sizeof(*ctx));
273 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
274 }
275
276 static void
sha3_Init256(void * priv)277 sha3_Init256(void *priv)
278 {
279 sha3_context *ctx = (sha3_context *) priv;
280
281 memset(ctx, 0, sizeof(*ctx));
282 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
283 }
284
285 static void
sha3_Init384(void * priv)286 sha3_Init384(void *priv)
287 {
288 sha3_context *ctx = (sha3_context *) priv;
289
290 memset(ctx, 0, sizeof(*ctx));
291 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
292 }
293
294 static void
sha3_Init512(void * priv)295 sha3_Init512(void *priv)
296 {
297 sha3_context *ctx = (sha3_context *) priv;
298
299 memset(ctx, 0, sizeof(*ctx));
300 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
301 }
302
303
304 /* This is simply the 'update' with the padding block.
305 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
306 * bytes are always present, but they can be the same byte.
307 */
308 static void
sha3_Update(void * priv,void const * bufIn,size_t len)309 sha3_Update(void *priv, void const *bufIn, size_t len)
310 {
311 sha3_context *ctx = (sha3_context *) priv;
312 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
313 size_t words;
314 unsigned int tail;
315 size_t i;
316 const uint8_t *buf = bufIn;
317
318 if (len < old_tail) {
319 while (len--)
320 ctx->saved |= (uint64_t) (*(buf++)) <<
321 ((ctx->byteIndex++) * 8);
322 return;
323 }
324
325 if (old_tail) {
326 len -= old_tail;
327 while (old_tail--)
328 ctx->saved |= (uint64_t) (*(buf++)) <<
329 ((ctx->byteIndex++) * 8);
330
331 ctx->s[ctx->wordIndex] ^= ctx->saved;
332 ctx->byteIndex = 0;
333 ctx->saved = 0;
334 if (++ctx->wordIndex ==
335 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
336 keccakf(ctx->s);
337 ctx->wordIndex = 0;
338 }
339 }
340
341 words = len / sizeof(uint64_t);
342 tail = len - words * sizeof(uint64_t);
343
344 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
345 const uint64_t t = (uint64_t) (buf[0]) |
346 ((uint64_t) (buf[1]) << 8 * 1) |
347 ((uint64_t) (buf[2]) << 8 * 2) |
348 ((uint64_t) (buf[3]) << 8 * 3) |
349 ((uint64_t) (buf[4]) << 8 * 4) |
350 ((uint64_t) (buf[5]) << 8 * 5) |
351 ((uint64_t) (buf[6]) << 8 * 6) |
352 ((uint64_t) (buf[7]) << 8 * 7);
353 ctx->s[ctx->wordIndex] ^= t;
354 if (++ctx->wordIndex ==
355 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
356 keccakf(ctx->s);
357 ctx->wordIndex = 0;
358 }
359 }
360
361 while (tail--)
362 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
363 }
364
partial_hash_sha3_224(uint8_t * data_in,uint8_t * data_out)365 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
366 {
367 sha3_context *ctx;
368 int i;
369
370 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
371 if (!ctx) {
372 CCP_LOG_ERR("sha3-ctx creation failed");
373 return -ENOMEM;
374 }
375 sha3_Init224(ctx);
376 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
377 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
378 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
379 rte_free(ctx);
380
381 return 0;
382 }
383
partial_hash_sha3_256(uint8_t * data_in,uint8_t * data_out)384 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
385 {
386 sha3_context *ctx;
387 int i;
388
389 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
390 if (!ctx) {
391 CCP_LOG_ERR("sha3-ctx creation failed");
392 return -ENOMEM;
393 }
394 sha3_Init256(ctx);
395 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
396 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
397 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
398 rte_free(ctx);
399
400 return 0;
401 }
402
partial_hash_sha3_384(uint8_t * data_in,uint8_t * data_out)403 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
404 {
405 sha3_context *ctx;
406 int i;
407
408 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
409 if (!ctx) {
410 CCP_LOG_ERR("sha3-ctx creation failed");
411 return -ENOMEM;
412 }
413 sha3_Init384(ctx);
414 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
415 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
416 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
417 rte_free(ctx);
418
419 return 0;
420 }
421
partial_hash_sha3_512(uint8_t * data_in,uint8_t * data_out)422 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
423 {
424 sha3_context *ctx;
425 int i;
426
427 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
428 if (!ctx) {
429 CCP_LOG_ERR("sha3-ctx creation failed");
430 return -ENOMEM;
431 }
432 sha3_Init512(ctx);
433 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
434 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
435 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
436 rte_free(ctx);
437
438 return 0;
439 }
440
generate_partial_hash(struct ccp_session * sess)441 static int generate_partial_hash(struct ccp_session *sess)
442 {
443
444 uint8_t ipad[sess->auth.block_size];
445 uint8_t opad[sess->auth.block_size];
446 uint8_t *ipad_t, *opad_t;
447 uint32_t *hash_value_be32, hash_temp32[8];
448 uint64_t *hash_value_be64, hash_temp64[8];
449 int i, count;
450 uint8_t *hash_value_sha3;
451
452 opad_t = ipad_t = (uint8_t *)sess->auth.key;
453
454 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
455 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
456
457 /* considering key size is always equal to block size of algorithm */
458 for (i = 0; i < sess->auth.block_size; i++) {
459 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
460 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
461 }
462
463 switch (sess->auth.algo) {
464 case CCP_AUTH_ALGO_SHA1_HMAC:
465 count = SHA1_DIGEST_SIZE >> 2;
466
467 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
468 return -1;
469 for (i = 0; i < count; i++, hash_value_be32++)
470 *hash_value_be32 = hash_temp32[count - 1 - i];
471
472 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
473 + sess->auth.ctx_len);
474 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
475 return -1;
476 for (i = 0; i < count; i++, hash_value_be32++)
477 *hash_value_be32 = hash_temp32[count - 1 - i];
478 return 0;
479 case CCP_AUTH_ALGO_SHA224_HMAC:
480 count = SHA256_DIGEST_SIZE >> 2;
481
482 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
483 return -1;
484 for (i = 0; i < count; i++, hash_value_be32++)
485 *hash_value_be32 = hash_temp32[count - 1 - i];
486
487 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
488 + sess->auth.ctx_len);
489 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
490 return -1;
491 for (i = 0; i < count; i++, hash_value_be32++)
492 *hash_value_be32 = hash_temp32[count - 1 - i];
493 return 0;
494 case CCP_AUTH_ALGO_SHA3_224_HMAC:
495 hash_value_sha3 = sess->auth.pre_compute;
496 if (partial_hash_sha3_224(ipad, hash_value_sha3))
497 return -1;
498
499 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
500 + sess->auth.ctx_len);
501 if (partial_hash_sha3_224(opad, hash_value_sha3))
502 return -1;
503 return 0;
504 case CCP_AUTH_ALGO_SHA256_HMAC:
505 count = SHA256_DIGEST_SIZE >> 2;
506
507 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
508 return -1;
509 for (i = 0; i < count; i++, hash_value_be32++)
510 *hash_value_be32 = hash_temp32[count - 1 - i];
511
512 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
513 + sess->auth.ctx_len);
514 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
515 return -1;
516 for (i = 0; i < count; i++, hash_value_be32++)
517 *hash_value_be32 = hash_temp32[count - 1 - i];
518 return 0;
519 case CCP_AUTH_ALGO_SHA3_256_HMAC:
520 hash_value_sha3 = sess->auth.pre_compute;
521 if (partial_hash_sha3_256(ipad, hash_value_sha3))
522 return -1;
523
524 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
525 + sess->auth.ctx_len);
526 if (partial_hash_sha3_256(opad, hash_value_sha3))
527 return -1;
528 return 0;
529 case CCP_AUTH_ALGO_SHA384_HMAC:
530 count = SHA512_DIGEST_SIZE >> 3;
531
532 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
533 return -1;
534 for (i = 0; i < count; i++, hash_value_be64++)
535 *hash_value_be64 = hash_temp64[count - 1 - i];
536
537 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
538 + sess->auth.ctx_len);
539 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
540 return -1;
541 for (i = 0; i < count; i++, hash_value_be64++)
542 *hash_value_be64 = hash_temp64[count - 1 - i];
543 return 0;
544 case CCP_AUTH_ALGO_SHA3_384_HMAC:
545 hash_value_sha3 = sess->auth.pre_compute;
546 if (partial_hash_sha3_384(ipad, hash_value_sha3))
547 return -1;
548
549 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
550 + sess->auth.ctx_len);
551 if (partial_hash_sha3_384(opad, hash_value_sha3))
552 return -1;
553 return 0;
554 case CCP_AUTH_ALGO_SHA512_HMAC:
555 count = SHA512_DIGEST_SIZE >> 3;
556
557 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
558 return -1;
559 for (i = 0; i < count; i++, hash_value_be64++)
560 *hash_value_be64 = hash_temp64[count - 1 - i];
561
562 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
563 + sess->auth.ctx_len);
564 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
565 return -1;
566 for (i = 0; i < count; i++, hash_value_be64++)
567 *hash_value_be64 = hash_temp64[count - 1 - i];
568 return 0;
569 case CCP_AUTH_ALGO_SHA3_512_HMAC:
570 hash_value_sha3 = sess->auth.pre_compute;
571 if (partial_hash_sha3_512(ipad, hash_value_sha3))
572 return -1;
573
574 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
575 + sess->auth.ctx_len);
576 if (partial_hash_sha3_512(opad, hash_value_sha3))
577 return -1;
578 return 0;
579 default:
580 CCP_LOG_ERR("Invalid auth algo");
581 return -1;
582 }
583 }
584
585 /* prepare temporary keys K1 and K2 */
prepare_key(unsigned char * k,unsigned char * l,int bl)586 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
587 {
588 int i;
589 /* Shift block to left, including carry */
590 for (i = 0; i < bl; i++) {
591 k[i] = l[i] << 1;
592 if (i < bl - 1 && l[i + 1] & 0x80)
593 k[i] |= 1;
594 }
595 /* If MSB set fixup with R */
596 if (l[0] & 0x80)
597 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
598 }
599
600 /* subkeys K1 and K2 generation for CMAC */
601 static int
generate_cmac_subkeys(struct ccp_session * sess)602 generate_cmac_subkeys(struct ccp_session *sess)
603 {
604 const EVP_CIPHER *algo;
605 EVP_CIPHER_CTX *ctx;
606 unsigned char *ccp_ctx;
607 size_t i;
608 int dstlen, totlen;
609 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
610 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
611 unsigned char k1[AES_BLOCK_SIZE] = {0};
612 unsigned char k2[AES_BLOCK_SIZE] = {0};
613
614 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
615 algo = EVP_aes_128_cbc();
616 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
617 algo = EVP_aes_192_cbc();
618 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
619 algo = EVP_aes_256_cbc();
620 else {
621 CCP_LOG_ERR("Invalid CMAC type length");
622 return -1;
623 }
624
625 ctx = EVP_CIPHER_CTX_new();
626 if (!ctx) {
627 CCP_LOG_ERR("ctx creation failed");
628 return -1;
629 }
630 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
631 (unsigned char *)zero_iv) <= 0)
632 goto key_generate_err;
633 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
634 goto key_generate_err;
635 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
636 AES_BLOCK_SIZE) <= 0)
637 goto key_generate_err;
638 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
639 goto key_generate_err;
640
641 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
642
643 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
644 prepare_key(k1, dst, AES_BLOCK_SIZE);
645 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
646 *ccp_ctx = k1[i];
647
648 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
649 (2 * CCP_SB_BYTES) - 1);
650 prepare_key(k2, k1, AES_BLOCK_SIZE);
651 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
652 *ccp_ctx = k2[i];
653
654 EVP_CIPHER_CTX_free(ctx);
655
656 return 0;
657
658 key_generate_err:
659 CCP_LOG_ERR("CMAC Init failed");
660 return -1;
661 }
662
663 /* configure session */
664 static int
ccp_configure_session_cipher(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)665 ccp_configure_session_cipher(struct ccp_session *sess,
666 const struct rte_crypto_sym_xform *xform)
667 {
668 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
669 size_t i, j, x;
670
671 cipher_xform = &xform->cipher;
672
673 /* set cipher direction */
674 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
675 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
676 else
677 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
678
679 /* set cipher key */
680 sess->cipher.key_length = cipher_xform->key.length;
681 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
682 cipher_xform->key.length);
683
684 /* set iv parameters */
685 sess->iv.offset = cipher_xform->iv.offset;
686 sess->iv.length = cipher_xform->iv.length;
687
688 switch (cipher_xform->algo) {
689 case RTE_CRYPTO_CIPHER_AES_CTR:
690 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
691 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
692 sess->cipher.engine = CCP_ENGINE_AES;
693 break;
694 case RTE_CRYPTO_CIPHER_AES_ECB:
695 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
696 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
697 sess->cipher.engine = CCP_ENGINE_AES;
698 break;
699 case RTE_CRYPTO_CIPHER_AES_CBC:
700 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
701 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
702 sess->cipher.engine = CCP_ENGINE_AES;
703 break;
704 case RTE_CRYPTO_CIPHER_3DES_CBC:
705 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
706 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
707 sess->cipher.engine = CCP_ENGINE_3DES;
708 break;
709 default:
710 CCP_LOG_ERR("Unsupported cipher algo");
711 return -1;
712 }
713
714
715 switch (sess->cipher.engine) {
716 case CCP_ENGINE_AES:
717 if (sess->cipher.key_length == 16)
718 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
719 else if (sess->cipher.key_length == 24)
720 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
721 else if (sess->cipher.key_length == 32)
722 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
723 else {
724 CCP_LOG_ERR("Invalid cipher key length");
725 return -1;
726 }
727 for (i = 0; i < sess->cipher.key_length ; i++)
728 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
729 sess->cipher.key[i];
730 break;
731 case CCP_ENGINE_3DES:
732 if (sess->cipher.key_length == 16)
733 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
734 else if (sess->cipher.key_length == 24)
735 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
736 else {
737 CCP_LOG_ERR("Invalid cipher key length");
738 return -1;
739 }
740 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
741 for (i = 0; i < 8; i++)
742 sess->cipher.key_ccp[(8 + x) - i - 1] =
743 sess->cipher.key[i + x];
744 break;
745 default:
746 CCP_LOG_ERR("Invalid CCP Engine");
747 return -ENOTSUP;
748 }
749 if (iommu_mode == 2) {
750 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
751 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
752 } else {
753 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
754 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
755 }
756 return 0;
757 }
758
759 static int
ccp_configure_session_auth(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)760 ccp_configure_session_auth(struct ccp_session *sess,
761 const struct rte_crypto_sym_xform *xform)
762 {
763 const struct rte_crypto_auth_xform *auth_xform = NULL;
764 size_t i;
765
766 auth_xform = &xform->auth;
767
768 sess->auth.digest_length = auth_xform->digest_length;
769 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
770 sess->auth.op = CCP_AUTH_OP_GENERATE;
771 else
772 sess->auth.op = CCP_AUTH_OP_VERIFY;
773 switch (auth_xform->algo) {
774 case RTE_CRYPTO_AUTH_MD5_HMAC:
775 if (sess->auth_opt) {
776 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
777 sess->auth.offset = ((CCP_SB_BYTES << 1) -
778 MD5_DIGEST_SIZE);
779 sess->auth.key_length = auth_xform->key.length;
780 sess->auth.block_size = MD5_BLOCK_SIZE;
781 memset(sess->auth.key, 0, sess->auth.block_size);
782 rte_memcpy(sess->auth.key, auth_xform->key.data,
783 auth_xform->key.length);
784 } else
785 return -1; /* HMAC MD5 not supported on CCP */
786 break;
787 case RTE_CRYPTO_AUTH_SHA1:
788 sess->auth.engine = CCP_ENGINE_SHA;
789 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
790 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
791 sess->auth.ctx = (void *)ccp_sha1_init;
792 sess->auth.ctx_len = CCP_SB_BYTES;
793 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
794 rte_memcpy(sha_ctx, sess->auth.ctx, SHA_COMMON_DIGEST_SIZE);
795 break;
796 case RTE_CRYPTO_AUTH_SHA1_HMAC:
797 if (sess->auth_opt) {
798 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
799 return -1;
800 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
801 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
802 sess->auth.block_size = SHA1_BLOCK_SIZE;
803 sess->auth.key_length = auth_xform->key.length;
804 memset(sess->auth.key, 0, sess->auth.block_size);
805 rte_memcpy(sess->auth.key, auth_xform->key.data,
806 auth_xform->key.length);
807 } else {
808 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
809 return -1;
810 sess->auth.engine = CCP_ENGINE_SHA;
811 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
812 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
813 sess->auth.ctx_len = CCP_SB_BYTES;
814 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
815 sess->auth.block_size = SHA1_BLOCK_SIZE;
816 sess->auth.key_length = auth_xform->key.length;
817 memset(sess->auth.key, 0, sess->auth.block_size);
818 memset(sess->auth.pre_compute, 0,
819 sess->auth.ctx_len << 1);
820 rte_memcpy(sess->auth.key, auth_xform->key.data,
821 auth_xform->key.length);
822 if (generate_partial_hash(sess))
823 return -1;
824 }
825 break;
826 case RTE_CRYPTO_AUTH_SHA224:
827 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
828 sess->auth.engine = CCP_ENGINE_SHA;
829 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
830 sess->auth.ctx = (void *)ccp_sha224_init;
831 sess->auth.ctx_len = CCP_SB_BYTES;
832 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
833 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE);
834 break;
835 case RTE_CRYPTO_AUTH_SHA224_HMAC:
836 if (sess->auth_opt) {
837 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
838 return -1;
839 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
840 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
841 sess->auth.block_size = SHA224_BLOCK_SIZE;
842 sess->auth.key_length = auth_xform->key.length;
843 memset(sess->auth.key, 0, sess->auth.block_size);
844 rte_memcpy(sess->auth.key, auth_xform->key.data,
845 auth_xform->key.length);
846 } else {
847 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
848 return -1;
849 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
850 sess->auth.engine = CCP_ENGINE_SHA;
851 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
852 sess->auth.ctx_len = CCP_SB_BYTES;
853 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
854 sess->auth.block_size = SHA224_BLOCK_SIZE;
855 sess->auth.key_length = auth_xform->key.length;
856 memset(sess->auth.key, 0, sess->auth.block_size);
857 memset(sess->auth.pre_compute, 0,
858 sess->auth.ctx_len << 1);
859 rte_memcpy(sess->auth.key, auth_xform->key.data,
860 auth_xform->key.length);
861 if (generate_partial_hash(sess))
862 return -1;
863 }
864 break;
865 case RTE_CRYPTO_AUTH_SHA3_224:
866 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
867 sess->auth.engine = CCP_ENGINE_SHA;
868 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
869 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
870 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
871 break;
872 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
873 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
874 return -1;
875 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
876 sess->auth.engine = CCP_ENGINE_SHA;
877 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
878 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
879 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
880 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
881 sess->auth.key_length = auth_xform->key.length;
882 memset(sess->auth.key, 0, sess->auth.block_size);
883 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
884 rte_memcpy(sess->auth.key, auth_xform->key.data,
885 auth_xform->key.length);
886 if (generate_partial_hash(sess))
887 return -1;
888 break;
889 case RTE_CRYPTO_AUTH_SHA256:
890 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
891 sess->auth.engine = CCP_ENGINE_SHA;
892 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
893 sess->auth.ctx = (void *)ccp_sha256_init;
894 sess->auth.ctx_len = CCP_SB_BYTES;
895 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
896 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE);
897 break;
898 case RTE_CRYPTO_AUTH_SHA256_HMAC:
899 if (sess->auth_opt) {
900 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
901 return -1;
902 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
903 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
904 sess->auth.block_size = SHA256_BLOCK_SIZE;
905 sess->auth.key_length = auth_xform->key.length;
906 memset(sess->auth.key, 0, sess->auth.block_size);
907 rte_memcpy(sess->auth.key, auth_xform->key.data,
908 auth_xform->key.length);
909 } else {
910 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
911 return -1;
912 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
913 sess->auth.engine = CCP_ENGINE_SHA;
914 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
915 sess->auth.ctx_len = CCP_SB_BYTES;
916 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
917 sess->auth.block_size = SHA256_BLOCK_SIZE;
918 sess->auth.key_length = auth_xform->key.length;
919 memset(sess->auth.key, 0, sess->auth.block_size);
920 memset(sess->auth.pre_compute, 0,
921 sess->auth.ctx_len << 1);
922 rte_memcpy(sess->auth.key, auth_xform->key.data,
923 auth_xform->key.length);
924 if (generate_partial_hash(sess))
925 return -1;
926 }
927 break;
928 case RTE_CRYPTO_AUTH_SHA3_256:
929 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
930 sess->auth.engine = CCP_ENGINE_SHA;
931 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
932 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
933 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
934 break;
935 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
936 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
937 return -1;
938 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
939 sess->auth.engine = CCP_ENGINE_SHA;
940 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
941 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
942 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
943 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
944 sess->auth.key_length = auth_xform->key.length;
945 memset(sess->auth.key, 0, sess->auth.block_size);
946 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
947 rte_memcpy(sess->auth.key, auth_xform->key.data,
948 auth_xform->key.length);
949 if (generate_partial_hash(sess))
950 return -1;
951 break;
952 case RTE_CRYPTO_AUTH_SHA384:
953 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
954 sess->auth.engine = CCP_ENGINE_SHA;
955 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
956 sess->auth.ctx = (void *)ccp_sha384_init;
957 sess->auth.ctx_len = CCP_SB_BYTES << 1;
958 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
959 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE);
960 break;
961 case RTE_CRYPTO_AUTH_SHA384_HMAC:
962 if (sess->auth_opt) {
963 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
964 return -1;
965 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
966 sess->auth.offset = ((CCP_SB_BYTES << 1) -
967 SHA384_DIGEST_SIZE);
968 sess->auth.block_size = SHA384_BLOCK_SIZE;
969 sess->auth.key_length = auth_xform->key.length;
970 memset(sess->auth.key, 0, sess->auth.block_size);
971 rte_memcpy(sess->auth.key, auth_xform->key.data,
972 auth_xform->key.length);
973 } else {
974 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
975 return -1;
976 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
977 sess->auth.engine = CCP_ENGINE_SHA;
978 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
979 sess->auth.ctx_len = CCP_SB_BYTES << 1;
980 sess->auth.offset = ((CCP_SB_BYTES << 1) -
981 SHA384_DIGEST_SIZE);
982 sess->auth.block_size = SHA384_BLOCK_SIZE;
983 sess->auth.key_length = auth_xform->key.length;
984 memset(sess->auth.key, 0, sess->auth.block_size);
985 memset(sess->auth.pre_compute, 0,
986 sess->auth.ctx_len << 1);
987 rte_memcpy(sess->auth.key, auth_xform->key.data,
988 auth_xform->key.length);
989 if (generate_partial_hash(sess))
990 return -1;
991 }
992 break;
993 case RTE_CRYPTO_AUTH_SHA3_384:
994 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
995 sess->auth.engine = CCP_ENGINE_SHA;
996 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
997 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
998 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
999 break;
1000 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
1001 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
1002 return -1;
1003 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
1004 sess->auth.engine = CCP_ENGINE_SHA;
1005 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
1006 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1007 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
1008 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
1009 sess->auth.key_length = auth_xform->key.length;
1010 memset(sess->auth.key, 0, sess->auth.block_size);
1011 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1012 rte_memcpy(sess->auth.key, auth_xform->key.data,
1013 auth_xform->key.length);
1014 if (generate_partial_hash(sess))
1015 return -1;
1016 break;
1017 case RTE_CRYPTO_AUTH_SHA512:
1018 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1019 sess->auth.engine = CCP_ENGINE_SHA;
1020 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1021 sess->auth.ctx = (void *)ccp_sha512_init;
1022 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1023 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1024 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE);
1025 break;
1026 case RTE_CRYPTO_AUTH_SHA512_HMAC:
1027 if (sess->auth_opt) {
1028 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1029 return -1;
1030 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1031 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1032 SHA512_DIGEST_SIZE);
1033 sess->auth.block_size = SHA512_BLOCK_SIZE;
1034 sess->auth.key_length = auth_xform->key.length;
1035 memset(sess->auth.key, 0, sess->auth.block_size);
1036 rte_memcpy(sess->auth.key, auth_xform->key.data,
1037 auth_xform->key.length);
1038 } else {
1039 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1040 return -1;
1041 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1042 sess->auth.engine = CCP_ENGINE_SHA;
1043 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1044 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1045 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1046 SHA512_DIGEST_SIZE);
1047 sess->auth.block_size = SHA512_BLOCK_SIZE;
1048 sess->auth.key_length = auth_xform->key.length;
1049 memset(sess->auth.key, 0, sess->auth.block_size);
1050 memset(sess->auth.pre_compute, 0,
1051 sess->auth.ctx_len << 1);
1052 rte_memcpy(sess->auth.key, auth_xform->key.data,
1053 auth_xform->key.length);
1054 if (generate_partial_hash(sess))
1055 return -1;
1056 }
1057 break;
1058 case RTE_CRYPTO_AUTH_SHA3_512:
1059 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1060 sess->auth.engine = CCP_ENGINE_SHA;
1061 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1062 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1063 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1064 break;
1065 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1066 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1067 return -1;
1068 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1069 sess->auth.engine = CCP_ENGINE_SHA;
1070 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1071 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1072 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1073 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1074 sess->auth.key_length = auth_xform->key.length;
1075 memset(sess->auth.key, 0, sess->auth.block_size);
1076 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1077 rte_memcpy(sess->auth.key, auth_xform->key.data,
1078 auth_xform->key.length);
1079 if (generate_partial_hash(sess))
1080 return -1;
1081 break;
1082 case RTE_CRYPTO_AUTH_AES_CMAC:
1083 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1084 sess->auth.engine = CCP_ENGINE_AES;
1085 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1086 sess->auth.key_length = auth_xform->key.length;
1087 /* padding and hash result */
1088 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1089 sess->auth.offset = AES_BLOCK_SIZE;
1090 sess->auth.block_size = AES_BLOCK_SIZE;
1091 if (sess->auth.key_length == 16)
1092 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1093 else if (sess->auth.key_length == 24)
1094 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1095 else if (sess->auth.key_length == 32)
1096 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1097 else {
1098 CCP_LOG_ERR("Invalid CMAC key length");
1099 return -1;
1100 }
1101 rte_memcpy(sess->auth.key, auth_xform->key.data,
1102 sess->auth.key_length);
1103 for (i = 0; i < sess->auth.key_length; i++)
1104 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1105 sess->auth.key[i];
1106 if (generate_cmac_subkeys(sess))
1107 return -1;
1108 break;
1109 default:
1110 CCP_LOG_ERR("Unsupported hash algo");
1111 return -ENOTSUP;
1112 }
1113 return 0;
1114 }
1115
1116 static int
ccp_configure_session_aead(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)1117 ccp_configure_session_aead(struct ccp_session *sess,
1118 const struct rte_crypto_sym_xform *xform)
1119 {
1120 const struct rte_crypto_aead_xform *aead_xform = NULL;
1121 size_t i;
1122
1123 aead_xform = &xform->aead;
1124
1125 sess->cipher.key_length = aead_xform->key.length;
1126 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1127 aead_xform->key.length);
1128
1129 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1130 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1131 sess->auth.op = CCP_AUTH_OP_GENERATE;
1132 } else {
1133 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1134 sess->auth.op = CCP_AUTH_OP_VERIFY;
1135 }
1136 sess->aead_algo = aead_xform->algo;
1137 sess->auth.aad_length = aead_xform->aad_length;
1138 sess->auth.digest_length = aead_xform->digest_length;
1139
1140 /* set iv parameters */
1141 sess->iv.offset = aead_xform->iv.offset;
1142 sess->iv.length = aead_xform->iv.length;
1143
1144 switch (aead_xform->algo) {
1145 case RTE_CRYPTO_AEAD_AES_GCM:
1146 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1147 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1148 sess->cipher.engine = CCP_ENGINE_AES;
1149 if (sess->cipher.key_length == 16)
1150 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1151 else if (sess->cipher.key_length == 24)
1152 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1153 else if (sess->cipher.key_length == 32)
1154 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1155 else {
1156 CCP_LOG_ERR("Invalid aead key length");
1157 return -1;
1158 }
1159 for (i = 0; i < sess->cipher.key_length; i++)
1160 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1161 sess->cipher.key[i];
1162 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1163 sess->auth.engine = CCP_ENGINE_AES;
1164 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1165 sess->auth.ctx_len = CCP_SB_BYTES;
1166 sess->auth.offset = 0;
1167 sess->auth.block_size = AES_BLOCK_SIZE;
1168 sess->cmd_id = CCP_CMD_COMBINED;
1169 break;
1170 default:
1171 CCP_LOG_ERR("Unsupported aead algo");
1172 return -ENOTSUP;
1173 }
1174 if (iommu_mode == 2) {
1175 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
1176 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
1177 } else {
1178 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce);
1179 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp);
1180 }
1181 return 0;
1182 }
1183
1184 int
ccp_set_session_parameters(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform,struct ccp_private * internals)1185 ccp_set_session_parameters(struct ccp_session *sess,
1186 const struct rte_crypto_sym_xform *xform,
1187 struct ccp_private *internals)
1188 {
1189 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1190 const struct rte_crypto_sym_xform *auth_xform = NULL;
1191 const struct rte_crypto_sym_xform *aead_xform = NULL;
1192 int ret = 0;
1193
1194 sess->auth_opt = internals->auth_opt;
1195 sess->cmd_id = ccp_get_cmd_id(xform);
1196
1197 switch (sess->cmd_id) {
1198 case CCP_CMD_CIPHER:
1199 cipher_xform = xform;
1200 break;
1201 case CCP_CMD_AUTH:
1202 auth_xform = xform;
1203 break;
1204 case CCP_CMD_CIPHER_HASH:
1205 cipher_xform = xform;
1206 auth_xform = xform->next;
1207 break;
1208 case CCP_CMD_HASH_CIPHER:
1209 auth_xform = xform;
1210 cipher_xform = xform->next;
1211 break;
1212 case CCP_CMD_COMBINED:
1213 aead_xform = xform;
1214 break;
1215 default:
1216 CCP_LOG_ERR("Unsupported cmd_id");
1217 return -1;
1218 }
1219
1220 /* Default IV length = 0 */
1221 sess->iv.length = 0;
1222 if (cipher_xform) {
1223 ret = ccp_configure_session_cipher(sess, cipher_xform);
1224 if (ret != 0) {
1225 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1226 return ret;
1227 }
1228 }
1229 if (auth_xform) {
1230 ret = ccp_configure_session_auth(sess, auth_xform);
1231 if (ret != 0) {
1232 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1233 return ret;
1234 }
1235 }
1236 if (aead_xform) {
1237 ret = ccp_configure_session_aead(sess, aead_xform);
1238 if (ret != 0) {
1239 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1240 return ret;
1241 }
1242 }
1243 return ret;
1244 }
1245
1246 /* calculate CCP descriptors requirement */
1247 static inline int
ccp_cipher_slot(struct ccp_session * session)1248 ccp_cipher_slot(struct ccp_session *session)
1249 {
1250 int count = 0;
1251
1252 switch (session->cipher.algo) {
1253 case CCP_CIPHER_ALGO_AES_CBC:
1254 count = 2;
1255 /**< op + passthrough for iv */
1256 break;
1257 case CCP_CIPHER_ALGO_AES_ECB:
1258 count = 1;
1259 /**<only op*/
1260 break;
1261 case CCP_CIPHER_ALGO_AES_CTR:
1262 count = 2;
1263 /**< op + passthrough for iv */
1264 break;
1265 case CCP_CIPHER_ALGO_3DES_CBC:
1266 count = 2;
1267 /**< op + passthrough for iv */
1268 break;
1269 default:
1270 CCP_LOG_ERR("Unsupported cipher algo %d",
1271 session->cipher.algo);
1272 }
1273 return count;
1274 }
1275
1276 static inline int
ccp_auth_slot(struct ccp_session * session)1277 ccp_auth_slot(struct ccp_session *session)
1278 {
1279 int count = 0;
1280
1281 switch (session->auth.algo) {
1282 case CCP_AUTH_ALGO_SHA1:
1283 case CCP_AUTH_ALGO_SHA224:
1284 case CCP_AUTH_ALGO_SHA256:
1285 case CCP_AUTH_ALGO_SHA384:
1286 case CCP_AUTH_ALGO_SHA512:
1287 count = 3;
1288 /**< op + lsb passthrough cpy to/from*/
1289 break;
1290 case CCP_AUTH_ALGO_MD5_HMAC:
1291 break;
1292 case CCP_AUTH_ALGO_SHA1_HMAC:
1293 case CCP_AUTH_ALGO_SHA224_HMAC:
1294 case CCP_AUTH_ALGO_SHA256_HMAC:
1295 if (session->auth_opt == 0)
1296 count = 6;
1297 break;
1298 case CCP_AUTH_ALGO_SHA384_HMAC:
1299 case CCP_AUTH_ALGO_SHA512_HMAC:
1300 /**
1301 * 1. Load PHash1 = H(k ^ ipad); to LSB
1302 * 2. generate IHash = H(hash on message with PHash1
1303 * as init values);
1304 * 3. Retrieve IHash 2 slots for 384/512
1305 * 4. Load Phash2 = H(k ^ opad); to LSB
1306 * 5. generate FHash = H(hash on Ihash with Phash2
1307 * as init value);
1308 * 6. Retrieve HMAC output from LSB to host memory
1309 */
1310 if (session->auth_opt == 0)
1311 count = 7;
1312 break;
1313 case CCP_AUTH_ALGO_SHA3_224:
1314 case CCP_AUTH_ALGO_SHA3_256:
1315 case CCP_AUTH_ALGO_SHA3_384:
1316 case CCP_AUTH_ALGO_SHA3_512:
1317 count = 1;
1318 /**< only op ctx and dst in host memory*/
1319 break;
1320 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1321 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1322 count = 3;
1323 break;
1324 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1325 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1326 count = 4;
1327 /**
1328 * 1. Op to Perform Ihash
1329 * 2. Retrieve result from LSB to host memory
1330 * 3. Perform final hash
1331 */
1332 break;
1333 case CCP_AUTH_ALGO_AES_CMAC:
1334 count = 4;
1335 /**
1336 * op
1337 * extra descriptor in padding case
1338 * (k1/k2(255:128) with iv(127:0))
1339 * Retrieve result
1340 */
1341 break;
1342 default:
1343 CCP_LOG_ERR("Unsupported auth algo %d",
1344 session->auth.algo);
1345 }
1346
1347 return count;
1348 }
1349
1350 static int
ccp_aead_slot(struct ccp_session * session)1351 ccp_aead_slot(struct ccp_session *session)
1352 {
1353 int count = 0;
1354
1355 switch (session->aead_algo) {
1356 case RTE_CRYPTO_AEAD_AES_GCM:
1357 break;
1358 default:
1359 CCP_LOG_ERR("Unsupported aead algo %d",
1360 session->aead_algo);
1361 }
1362 switch (session->auth.algo) {
1363 case CCP_AUTH_ALGO_AES_GCM:
1364 count = 5;
1365 /**
1366 * 1. Passthru iv
1367 * 2. Hash AAD
1368 * 3. GCTR
1369 * 4. Reload passthru
1370 * 5. Hash Final tag
1371 */
1372 break;
1373 default:
1374 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1375 session->auth.algo);
1376 }
1377 return count;
1378 }
1379
1380 int
ccp_compute_slot_count(struct ccp_session * session)1381 ccp_compute_slot_count(struct ccp_session *session)
1382 {
1383 int count = 0;
1384
1385 switch (session->cmd_id) {
1386 case CCP_CMD_CIPHER:
1387 count = ccp_cipher_slot(session);
1388 break;
1389 case CCP_CMD_AUTH:
1390 count = ccp_auth_slot(session);
1391 break;
1392 case CCP_CMD_CIPHER_HASH:
1393 case CCP_CMD_HASH_CIPHER:
1394 count = ccp_cipher_slot(session);
1395 count += ccp_auth_slot(session);
1396 break;
1397 case CCP_CMD_COMBINED:
1398 count = ccp_aead_slot(session);
1399 break;
1400 default:
1401 CCP_LOG_ERR("Unsupported cmd_id");
1402
1403 }
1404
1405 return count;
1406 }
1407
1408 static uint8_t
algo_select(int sessalgo,const EVP_MD ** algo)1409 algo_select(int sessalgo,
1410 const EVP_MD **algo)
1411 {
1412 int res = 0;
1413
1414 switch (sessalgo) {
1415 case CCP_AUTH_ALGO_MD5_HMAC:
1416 *algo = EVP_md5();
1417 break;
1418 case CCP_AUTH_ALGO_SHA1_HMAC:
1419 *algo = EVP_sha1();
1420 break;
1421 case CCP_AUTH_ALGO_SHA224_HMAC:
1422 *algo = EVP_sha224();
1423 break;
1424 case CCP_AUTH_ALGO_SHA256_HMAC:
1425 *algo = EVP_sha256();
1426 break;
1427 case CCP_AUTH_ALGO_SHA384_HMAC:
1428 *algo = EVP_sha384();
1429 break;
1430 case CCP_AUTH_ALGO_SHA512_HMAC:
1431 *algo = EVP_sha512();
1432 break;
1433 default:
1434 res = -EINVAL;
1435 break;
1436 }
1437 return res;
1438 }
1439
1440 static int
process_cpu_auth_hmac(uint8_t * src,uint8_t * dst,__rte_unused uint8_t * iv,EVP_PKEY * pkey,int srclen,EVP_MD_CTX * ctx,const EVP_MD * algo,uint16_t d_len)1441 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1442 __rte_unused uint8_t *iv,
1443 EVP_PKEY *pkey,
1444 int srclen,
1445 EVP_MD_CTX *ctx,
1446 const EVP_MD *algo,
1447 uint16_t d_len)
1448 {
1449 size_t dstlen;
1450 unsigned char temp_dst[64];
1451
1452 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1453 goto process_auth_err;
1454
1455 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1456 goto process_auth_err;
1457
1458 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1459 goto process_auth_err;
1460
1461 memcpy(dst, temp_dst, d_len);
1462 return 0;
1463 process_auth_err:
1464 CCP_LOG_ERR("Process cpu auth failed");
1465 return -EINVAL;
1466 }
1467
cpu_crypto_auth(struct ccp_qp * qp,struct rte_crypto_op * op,struct ccp_session * sess,EVP_MD_CTX * ctx)1468 static int cpu_crypto_auth(struct ccp_qp *qp,
1469 struct rte_crypto_op *op,
1470 struct ccp_session *sess,
1471 EVP_MD_CTX *ctx)
1472 {
1473 uint8_t *src, *dst;
1474 int srclen, status;
1475 struct rte_mbuf *mbuf_src, *mbuf_dst;
1476 const EVP_MD *algo = NULL;
1477 EVP_PKEY *pkey;
1478
1479 algo_select(sess->auth.algo, &algo);
1480 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1481 sess->auth.key_length);
1482 mbuf_src = op->sym->m_src;
1483 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1484 srclen = op->sym->auth.data.length;
1485 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1486 op->sym->auth.data.offset);
1487
1488 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1489 dst = qp->temp_digest;
1490 } else {
1491 dst = op->sym->auth.digest.data;
1492 if (dst == NULL) {
1493 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1494 op->sym->auth.data.offset +
1495 sess->auth.digest_length);
1496 }
1497 }
1498 status = process_cpu_auth_hmac(src, dst, NULL,
1499 pkey, srclen,
1500 ctx,
1501 algo,
1502 sess->auth.digest_length);
1503 if (status) {
1504 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1505 return status;
1506 }
1507
1508 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1509 if (memcmp(dst, op->sym->auth.digest.data,
1510 sess->auth.digest_length) != 0) {
1511 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1512 } else {
1513 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1514 }
1515 } else {
1516 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1517 }
1518 EVP_PKEY_free(pkey);
1519 return 0;
1520 }
1521
1522 static void
ccp_perform_passthru(struct ccp_passthru * pst,struct ccp_queue * cmd_q)1523 ccp_perform_passthru(struct ccp_passthru *pst,
1524 struct ccp_queue *cmd_q)
1525 {
1526 struct ccp_desc *desc;
1527 union ccp_function function;
1528
1529 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1530
1531 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1532
1533 CCP_CMD_SOC(desc) = 0;
1534 CCP_CMD_IOC(desc) = 0;
1535 CCP_CMD_INIT(desc) = 0;
1536 CCP_CMD_EOM(desc) = 0;
1537 CCP_CMD_PROT(desc) = 0;
1538
1539 function.raw = 0;
1540 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1541 CCP_PT_BITWISE(&function) = pst->bit_mod;
1542 CCP_CMD_FUNCTION(desc) = function.raw;
1543
1544 CCP_CMD_LEN(desc) = pst->len;
1545
1546 if (pst->dir) {
1547 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1548 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1549 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1550
1551 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1552 CCP_CMD_DST_HI(desc) = 0;
1553 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1554
1555 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1556 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1557 } else {
1558
1559 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1560 CCP_CMD_SRC_HI(desc) = 0;
1561 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1562
1563 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1564 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1565 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1566 }
1567
1568 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1569 }
1570
1571 static int
ccp_perform_hmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1572 ccp_perform_hmac(struct rte_crypto_op *op,
1573 struct ccp_queue *cmd_q)
1574 {
1575
1576 struct ccp_session *session;
1577 union ccp_function function;
1578 struct ccp_desc *desc;
1579 uint32_t tail;
1580 phys_addr_t src_addr, dest_addr, dest_addr_t;
1581 struct ccp_passthru pst;
1582 uint64_t auth_msg_bits;
1583 void *append_ptr;
1584 uint8_t *addr;
1585
1586 session = (struct ccp_session *)get_sym_session_private_data(
1587 op->sym->session,
1588 ccp_cryptodev_driver_id);
1589 addr = session->auth.pre_compute;
1590
1591 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1592 op->sym->auth.data.offset);
1593 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1594 session->auth.ctx_len);
1595 if (iommu_mode == 2) {
1596 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1597 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1598 } else {
1599 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1600 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1601 }
1602 dest_addr_t = dest_addr;
1603
1604 /** Load PHash1 to LSB*/
1605 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1606 pst.len = session->auth.ctx_len;
1607 pst.dir = 1;
1608 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1609 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1610 ccp_perform_passthru(&pst, cmd_q);
1611
1612 /**sha engine command descriptor for IntermediateHash*/
1613
1614 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1615 memset(desc, 0, Q_DESC_SIZE);
1616
1617 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1618
1619 CCP_CMD_SOC(desc) = 0;
1620 CCP_CMD_IOC(desc) = 0;
1621 CCP_CMD_INIT(desc) = 1;
1622 CCP_CMD_EOM(desc) = 1;
1623 CCP_CMD_PROT(desc) = 0;
1624
1625 function.raw = 0;
1626 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1627 CCP_CMD_FUNCTION(desc) = function.raw;
1628
1629 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1630 auth_msg_bits = (op->sym->auth.data.length +
1631 session->auth.block_size) * 8;
1632
1633 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1634 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1635 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1636
1637 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1638 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1639 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1640
1641 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1642
1643 rte_wmb();
1644
1645 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1646 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1647 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1648 cmd_q->qcontrol | CMD_Q_RUN);
1649
1650 /* Intermediate Hash value retrieve */
1651 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1652 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1653
1654 pst.src_addr =
1655 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1656 pst.dest_addr = dest_addr_t;
1657 pst.len = CCP_SB_BYTES;
1658 pst.dir = 0;
1659 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1660 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1661 ccp_perform_passthru(&pst, cmd_q);
1662
1663 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1664 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1665 pst.len = CCP_SB_BYTES;
1666 pst.dir = 0;
1667 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1668 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1669 ccp_perform_passthru(&pst, cmd_q);
1670
1671 } else {
1672 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1673 pst.dest_addr = dest_addr_t;
1674 pst.len = session->auth.ctx_len;
1675 pst.dir = 0;
1676 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1677 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1678 ccp_perform_passthru(&pst, cmd_q);
1679
1680 }
1681
1682 /** Load PHash2 to LSB*/
1683 addr += session->auth.ctx_len;
1684 if (iommu_mode == 2)
1685 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1686 else
1687 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr);
1688 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1689 pst.len = session->auth.ctx_len;
1690 pst.dir = 1;
1691 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1692 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1693 ccp_perform_passthru(&pst, cmd_q);
1694
1695 /**sha engine command descriptor for FinalHash*/
1696 dest_addr_t += session->auth.offset;
1697
1698 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1699 memset(desc, 0, Q_DESC_SIZE);
1700
1701 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1702
1703 CCP_CMD_SOC(desc) = 0;
1704 CCP_CMD_IOC(desc) = 0;
1705 CCP_CMD_INIT(desc) = 1;
1706 CCP_CMD_EOM(desc) = 1;
1707 CCP_CMD_PROT(desc) = 0;
1708
1709 function.raw = 0;
1710 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1711 CCP_CMD_FUNCTION(desc) = function.raw;
1712
1713 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1714 session->auth.offset);
1715 auth_msg_bits = (session->auth.block_size +
1716 session->auth.ctx_len -
1717 session->auth.offset) * 8;
1718
1719 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1720 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1721 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1722
1723 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1724 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1725 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1726
1727 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1728
1729 rte_wmb();
1730
1731 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1732 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1733 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1734 cmd_q->qcontrol | CMD_Q_RUN);
1735
1736 /* Retrieve hmac output */
1737 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1738 pst.dest_addr = dest_addr;
1739 pst.len = session->auth.ctx_len;
1740 pst.dir = 0;
1741 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1742 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1743 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1744 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1745 else
1746 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1747 ccp_perform_passthru(&pst, cmd_q);
1748
1749 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1750 return 0;
1751
1752 }
1753
1754 static int
ccp_perform_sha(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1755 ccp_perform_sha(struct rte_crypto_op *op,
1756 struct ccp_queue *cmd_q)
1757 {
1758 struct ccp_session *session;
1759 union ccp_function function;
1760 struct ccp_desc *desc;
1761 uint32_t tail;
1762 phys_addr_t src_addr, dest_addr;
1763 struct ccp_passthru pst;
1764 void *append_ptr;
1765 uint64_t auth_msg_bits;
1766
1767 session = (struct ccp_session *)get_sym_session_private_data(
1768 op->sym->session,
1769 ccp_cryptodev_driver_id);
1770
1771 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1772 op->sym->auth.data.offset);
1773 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1774 session->auth.ctx_len);
1775 if (iommu_mode == 2) {
1776 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1777 pst.src_addr = (phys_addr_t)sha_ctx;
1778 } else {
1779 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr);
1780 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)
1781 session->auth.ctx);
1782 }
1783
1784 /** Passthru sha context*/
1785
1786 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1787 pst.len = session->auth.ctx_len;
1788 pst.dir = 1;
1789 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1790 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1791 ccp_perform_passthru(&pst, cmd_q);
1792
1793 /**prepare sha command descriptor*/
1794
1795 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1796 memset(desc, 0, Q_DESC_SIZE);
1797
1798 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1799
1800 CCP_CMD_SOC(desc) = 0;
1801 CCP_CMD_IOC(desc) = 0;
1802 CCP_CMD_INIT(desc) = 1;
1803 CCP_CMD_EOM(desc) = 1;
1804 CCP_CMD_PROT(desc) = 0;
1805
1806 function.raw = 0;
1807 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1808 CCP_CMD_FUNCTION(desc) = function.raw;
1809
1810 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1811 auth_msg_bits = op->sym->auth.data.length * 8;
1812
1813 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1814 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1815 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1816
1817 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1818 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1819 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1820
1821 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1822
1823 rte_wmb();
1824
1825 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1826 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1827 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1828 cmd_q->qcontrol | CMD_Q_RUN);
1829
1830 /* Hash value retrieve */
1831 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1832 pst.dest_addr = dest_addr;
1833 pst.len = session->auth.ctx_len;
1834 pst.dir = 0;
1835 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1836 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1837 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1838 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1839 else
1840 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1841 ccp_perform_passthru(&pst, cmd_q);
1842
1843 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1844 return 0;
1845
1846 }
1847
1848 static int
ccp_perform_sha3_hmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1849 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1850 struct ccp_queue *cmd_q)
1851 {
1852 struct ccp_session *session;
1853 struct ccp_passthru pst;
1854 union ccp_function function;
1855 struct ccp_desc *desc;
1856 uint8_t *append_ptr;
1857 uint32_t tail;
1858 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1859
1860 session = (struct ccp_session *)get_sym_session_private_data(
1861 op->sym->session,
1862 ccp_cryptodev_driver_id);
1863
1864 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1865 op->sym->auth.data.offset);
1866 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1867 session->auth.ctx_len);
1868 if (!append_ptr) {
1869 CCP_LOG_ERR("CCP MBUF append failed\n");
1870 return -1;
1871 }
1872 if (iommu_mode == 2) {
1873 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
1874 ctx_paddr = (phys_addr_t)rte_mem_virt2iova(
1875 session->auth.pre_compute);
1876 } else {
1877 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
1878 ctx_paddr = (phys_addr_t)rte_mem_virt2phy(
1879 session->auth.pre_compute);
1880 }
1881 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1882 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1883 memset(desc, 0, Q_DESC_SIZE);
1884
1885 /*desc1 for SHA3-Ihash operation */
1886 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1887 CCP_CMD_INIT(desc) = 1;
1888 CCP_CMD_EOM(desc) = 1;
1889
1890 function.raw = 0;
1891 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1892 CCP_CMD_FUNCTION(desc) = function.raw;
1893 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1894
1895 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1896 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1897 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1898
1899 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1900 CCP_CMD_DST_HI(desc) = 0;
1901 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1902
1903 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1904 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1905 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1906
1907 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1908
1909 rte_wmb();
1910 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1911 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1912 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1913 cmd_q->qcontrol | CMD_Q_RUN);
1914
1915 /* Intermediate Hash value retrieve */
1916 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1917 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1918
1919 pst.src_addr =
1920 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1921 pst.dest_addr = dest_addr_t;
1922 pst.len = CCP_SB_BYTES;
1923 pst.dir = 0;
1924 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1925 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1926 ccp_perform_passthru(&pst, cmd_q);
1927
1928 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1929 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1930 pst.len = CCP_SB_BYTES;
1931 pst.dir = 0;
1932 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1933 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1934 ccp_perform_passthru(&pst, cmd_q);
1935
1936 } else {
1937 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1938 pst.dest_addr = dest_addr_t;
1939 pst.len = CCP_SB_BYTES;
1940 pst.dir = 0;
1941 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1942 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1943 ccp_perform_passthru(&pst, cmd_q);
1944 }
1945
1946 /**sha engine command descriptor for FinalHash*/
1947 ctx_paddr += CCP_SHA3_CTX_SIZE;
1948 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1949 memset(desc, 0, Q_DESC_SIZE);
1950
1951 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1952 CCP_CMD_INIT(desc) = 1;
1953 CCP_CMD_EOM(desc) = 1;
1954
1955 function.raw = 0;
1956 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1957 CCP_CMD_FUNCTION(desc) = function.raw;
1958
1959 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1960 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1961 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1962 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1963 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1964 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1965 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1966 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1967 } else {
1968 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1969 }
1970
1971 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1972 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1973 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1974
1975 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1976 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1977 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1978
1979 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1980 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1981 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1982
1983 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1984
1985 rte_wmb();
1986 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1987 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1988 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1989 cmd_q->qcontrol | CMD_Q_RUN);
1990
1991 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1992 return 0;
1993 }
1994
1995 static int
ccp_perform_sha3(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1996 ccp_perform_sha3(struct rte_crypto_op *op,
1997 struct ccp_queue *cmd_q)
1998 {
1999 struct ccp_session *session;
2000 union ccp_function function;
2001 struct ccp_desc *desc;
2002 uint8_t *ctx_addr = NULL, *append_ptr = NULL;
2003 uint32_t tail;
2004 phys_addr_t src_addr, dest_addr, ctx_paddr;
2005
2006 session = (struct ccp_session *)get_sym_session_private_data(
2007 op->sym->session,
2008 ccp_cryptodev_driver_id);
2009
2010 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2011 op->sym->auth.data.offset);
2012 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2013 session->auth.ctx_len);
2014 if (!append_ptr) {
2015 CCP_LOG_ERR("CCP MBUF append failed\n");
2016 return -1;
2017 }
2018 if (iommu_mode == 2) {
2019 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
2020 ctx_paddr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
2021 } else {
2022 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2023 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr);
2024 }
2025
2026 ctx_addr = session->auth.sha3_ctx;
2027
2028 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2029 memset(desc, 0, Q_DESC_SIZE);
2030
2031 /* prepare desc for SHA3 operation */
2032 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
2033 CCP_CMD_INIT(desc) = 1;
2034 CCP_CMD_EOM(desc) = 1;
2035
2036 function.raw = 0;
2037 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
2038 CCP_CMD_FUNCTION(desc) = function.raw;
2039
2040 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2041
2042 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2043 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2044 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2045
2046 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2047 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2048 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2049
2050 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2051 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2052 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2053
2054 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2055
2056 rte_wmb();
2057
2058 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2059 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2060 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2061 cmd_q->qcontrol | CMD_Q_RUN);
2062
2063 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2064 return 0;
2065 }
2066
2067 static int
ccp_perform_aes_cmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)2068 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2069 struct ccp_queue *cmd_q)
2070 {
2071 struct ccp_session *session;
2072 union ccp_function function;
2073 struct ccp_passthru pst;
2074 struct ccp_desc *desc;
2075 uint32_t tail;
2076 uint8_t *src_tb, *append_ptr, *ctx_addr;
2077 phys_addr_t src_addr, dest_addr, key_addr;
2078 int length, non_align_len;
2079
2080 session = (struct ccp_session *)get_sym_session_private_data(
2081 op->sym->session,
2082 ccp_cryptodev_driver_id);
2083 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2084
2085 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2086 op->sym->auth.data.offset);
2087 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2088 session->auth.ctx_len);
2089 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2090
2091 function.raw = 0;
2092 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2093 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2094 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2095
2096 if (op->sym->auth.data.length % session->auth.block_size == 0) {
2097
2098 ctx_addr = session->auth.pre_compute;
2099 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2100 if (iommu_mode == 2)
2101 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2102 (void *)ctx_addr);
2103 else
2104 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2105 (void *)ctx_addr);
2106
2107 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2108 pst.len = CCP_SB_BYTES;
2109 pst.dir = 1;
2110 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2111 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2112 ccp_perform_passthru(&pst, cmd_q);
2113
2114 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2115 memset(desc, 0, Q_DESC_SIZE);
2116
2117 /* prepare desc for aes-cmac command */
2118 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2119 CCP_CMD_EOM(desc) = 1;
2120 CCP_CMD_FUNCTION(desc) = function.raw;
2121
2122 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2123 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2124 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2125 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2126
2127 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2128 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2129 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2130 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2131
2132 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2133
2134 rte_wmb();
2135
2136 tail =
2137 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2138 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2139 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2140 cmd_q->qcontrol | CMD_Q_RUN);
2141 } else {
2142 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2143 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2144 if (iommu_mode == 2)
2145 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2146 (void *)ctx_addr);
2147 else
2148 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2149 (void *)ctx_addr);
2150 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2151 pst.len = CCP_SB_BYTES;
2152 pst.dir = 1;
2153 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2154 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2155 ccp_perform_passthru(&pst, cmd_q);
2156
2157 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2158 length *= AES_BLOCK_SIZE;
2159 non_align_len = op->sym->auth.data.length - length;
2160 /* prepare desc for aes-cmac command */
2161 /*Command 1*/
2162 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2163 memset(desc, 0, Q_DESC_SIZE);
2164
2165 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2166 CCP_CMD_INIT(desc) = 1;
2167 CCP_CMD_FUNCTION(desc) = function.raw;
2168
2169 CCP_CMD_LEN(desc) = length;
2170 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2171 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2172 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2173
2174 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2175 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2176 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2177 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2178
2179 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2180
2181 /*Command 2*/
2182 append_ptr = append_ptr + CCP_SB_BYTES;
2183 memset(append_ptr, 0, AES_BLOCK_SIZE);
2184 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2185 uint8_t *,
2186 op->sym->auth.data.offset +
2187 length);
2188 rte_memcpy(append_ptr, src_tb, non_align_len);
2189 append_ptr[non_align_len] = CMAC_PAD_VALUE;
2190
2191 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2192 memset(desc, 0, Q_DESC_SIZE);
2193
2194 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2195 CCP_CMD_EOM(desc) = 1;
2196 CCP_CMD_FUNCTION(desc) = function.raw;
2197 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2198
2199 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2200 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2201 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2202
2203 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2204 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2205 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2206 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2207
2208 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2209
2210 rte_wmb();
2211 tail =
2212 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2213 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2214 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2215 cmd_q->qcontrol | CMD_Q_RUN);
2216 }
2217 /* Retrieve result */
2218 pst.dest_addr = dest_addr;
2219 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2220 pst.len = CCP_SB_BYTES;
2221 pst.dir = 0;
2222 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2223 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2224 ccp_perform_passthru(&pst, cmd_q);
2225
2226 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2227 return 0;
2228 }
2229
2230 static int
ccp_perform_aes(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2231 ccp_perform_aes(struct rte_crypto_op *op,
2232 struct ccp_queue *cmd_q,
2233 struct ccp_batch_info *b_info)
2234 {
2235 struct ccp_session *session;
2236 union ccp_function function;
2237 uint8_t *lsb_buf;
2238 struct ccp_passthru pst = {0};
2239 struct ccp_desc *desc;
2240 phys_addr_t src_addr, dest_addr, key_addr;
2241 uint8_t *iv;
2242
2243 session = (struct ccp_session *)get_sym_session_private_data(
2244 op->sym->session,
2245 ccp_cryptodev_driver_id);
2246 function.raw = 0;
2247
2248 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2249 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2250 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2251 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2252 iv, session->iv.length);
2253 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2254 CCP_AES_SIZE(&function) = 0x1F;
2255 } else {
2256 lsb_buf =
2257 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2258 rte_memcpy(lsb_buf +
2259 (CCP_SB_BYTES - session->iv.length),
2260 iv, session->iv.length);
2261 pst.src_addr = b_info->lsb_buf_phys +
2262 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2263 b_info->lsb_buf_idx++;
2264 }
2265
2266 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2267 pst.len = CCP_SB_BYTES;
2268 pst.dir = 1;
2269 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2270 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2271 ccp_perform_passthru(&pst, cmd_q);
2272 }
2273
2274 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2275
2276 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2277 op->sym->cipher.data.offset);
2278 if (likely(op->sym->m_dst != NULL))
2279 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2280 op->sym->cipher.data.offset);
2281 else
2282 dest_addr = src_addr;
2283 key_addr = session->cipher.key_phys;
2284
2285 /* prepare desc for aes command */
2286 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2287 CCP_CMD_INIT(desc) = 1;
2288 CCP_CMD_EOM(desc) = 1;
2289
2290 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2291 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2292 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2293 CCP_CMD_FUNCTION(desc) = function.raw;
2294
2295 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2296
2297 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2298 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2299 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2300
2301 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2302 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2303 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2304
2305 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2306 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2307 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2308
2309 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2310 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2311
2312 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2313 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2314 return 0;
2315 }
2316
2317 static int
ccp_perform_3des(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2318 ccp_perform_3des(struct rte_crypto_op *op,
2319 struct ccp_queue *cmd_q,
2320 struct ccp_batch_info *b_info)
2321 {
2322 struct ccp_session *session;
2323 union ccp_function function;
2324 unsigned char *lsb_buf;
2325 struct ccp_passthru pst;
2326 struct ccp_desc *desc;
2327 uint32_t tail;
2328 uint8_t *iv;
2329 phys_addr_t src_addr, dest_addr, key_addr;
2330
2331 session = (struct ccp_session *)get_sym_session_private_data(
2332 op->sym->session,
2333 ccp_cryptodev_driver_id);
2334
2335 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2336 switch (session->cipher.um.des_mode) {
2337 case CCP_DES_MODE_CBC:
2338 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2339 b_info->lsb_buf_idx++;
2340
2341 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2342 iv, session->iv.length);
2343 if (iommu_mode == 2)
2344 pst.src_addr = (phys_addr_t)rte_mem_virt2iova(
2345 (void *) lsb_buf);
2346 else
2347 pst.src_addr = (phys_addr_t)rte_mem_virt2phy(
2348 (void *) lsb_buf);
2349 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2350 pst.len = CCP_SB_BYTES;
2351 pst.dir = 1;
2352 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2353 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2354 ccp_perform_passthru(&pst, cmd_q);
2355 break;
2356 case CCP_DES_MODE_CFB:
2357 case CCP_DES_MODE_ECB:
2358 CCP_LOG_ERR("Unsupported DES cipher mode");
2359 return -ENOTSUP;
2360 }
2361
2362 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2363 op->sym->cipher.data.offset);
2364 if (unlikely(op->sym->m_dst != NULL))
2365 dest_addr =
2366 rte_pktmbuf_iova_offset(op->sym->m_dst,
2367 op->sym->cipher.data.offset);
2368 else
2369 dest_addr = src_addr;
2370
2371 if (iommu_mode == 2)
2372 key_addr = rte_mem_virt2iova(session->cipher.key_ccp);
2373 else
2374 key_addr = rte_mem_virt2phy(session->cipher.key_ccp);
2375
2376 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2377
2378 memset(desc, 0, Q_DESC_SIZE);
2379
2380 /* prepare desc for des command */
2381 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2382
2383 CCP_CMD_SOC(desc) = 0;
2384 CCP_CMD_IOC(desc) = 0;
2385 CCP_CMD_INIT(desc) = 1;
2386 CCP_CMD_EOM(desc) = 1;
2387 CCP_CMD_PROT(desc) = 0;
2388
2389 function.raw = 0;
2390 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2391 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2392 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2393 CCP_CMD_FUNCTION(desc) = function.raw;
2394
2395 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2396
2397 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2398 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2399 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2400
2401 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2402 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2403 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2404
2405 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2406 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2407 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2408
2409 if (session->cipher.um.des_mode)
2410 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2411
2412 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2413
2414 rte_wmb();
2415
2416 /* Write the new tail address back to the queue register */
2417 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2418 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2419 /* Turn the queue back on using our cached control register */
2420 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2421 cmd_q->qcontrol | CMD_Q_RUN);
2422
2423 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2424 return 0;
2425 }
2426
2427 static int
ccp_perform_aes_gcm(struct rte_crypto_op * op,struct ccp_queue * cmd_q)2428 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2429 {
2430 struct ccp_session *session;
2431 union ccp_function function;
2432 uint8_t *iv;
2433 struct ccp_passthru pst;
2434 struct ccp_desc *desc;
2435 uint32_t tail;
2436 uint64_t *temp;
2437 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2438 phys_addr_t digest_dest_addr;
2439 int length, non_align_len;
2440
2441 session = (struct ccp_session *)get_sym_session_private_data(
2442 op->sym->session,
2443 ccp_cryptodev_driver_id);
2444 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2445 key_addr = session->cipher.key_phys;
2446
2447 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2448 op->sym->aead.data.offset);
2449 if (unlikely(op->sym->m_dst != NULL))
2450 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2451 op->sym->aead.data.offset);
2452 else
2453 dest_addr = src_addr;
2454 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2455 digest_dest_addr = op->sym->aead.digest.phys_addr;
2456 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2457 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2458 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2459
2460 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2461 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2462
2463 aad_addr = op->sym->aead.aad.phys_addr;
2464
2465 /* CMD1 IV Passthru */
2466 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2467 session->iv.length);
2468 pst.src_addr = session->cipher.nonce_phys;
2469 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2470 pst.len = CCP_SB_BYTES;
2471 pst.dir = 1;
2472 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2473 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2474 ccp_perform_passthru(&pst, cmd_q);
2475
2476 /* CMD2 GHASH-AAD */
2477 function.raw = 0;
2478 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2479 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2480 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2481
2482 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2483 memset(desc, 0, Q_DESC_SIZE);
2484
2485 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2486 CCP_CMD_INIT(desc) = 1;
2487 CCP_CMD_FUNCTION(desc) = function.raw;
2488
2489 CCP_CMD_LEN(desc) = session->auth.aad_length;
2490
2491 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2492 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2493 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2494
2495 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2496 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2497 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2498
2499 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2500
2501 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2502 rte_wmb();
2503
2504 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2505 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2506 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2507 cmd_q->qcontrol | CMD_Q_RUN);
2508
2509 /* CMD3 : GCTR Plain text */
2510 function.raw = 0;
2511 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2512 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2513 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2514 if (non_align_len == 0)
2515 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2516 else
2517 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2518
2519
2520 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2521 memset(desc, 0, Q_DESC_SIZE);
2522
2523 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2524 CCP_CMD_EOM(desc) = 1;
2525 CCP_CMD_FUNCTION(desc) = function.raw;
2526
2527 CCP_CMD_LEN(desc) = length;
2528
2529 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2530 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2531 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2532
2533 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2534 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2535 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2536
2537 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2538 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2539 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2540
2541 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2542
2543 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2544 rte_wmb();
2545
2546 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2547 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2548 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2549 cmd_q->qcontrol | CMD_Q_RUN);
2550
2551 /* CMD4 : PT to copy IV */
2552 pst.src_addr = session->cipher.nonce_phys;
2553 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2554 pst.len = AES_BLOCK_SIZE;
2555 pst.dir = 1;
2556 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2557 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2558 ccp_perform_passthru(&pst, cmd_q);
2559
2560 /* CMD5 : GHASH-Final */
2561 function.raw = 0;
2562 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2563 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2564 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2565
2566 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2567 memset(desc, 0, Q_DESC_SIZE);
2568
2569 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2570 CCP_CMD_FUNCTION(desc) = function.raw;
2571 /* Last block (AAD_len || PT_len)*/
2572 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2573
2574 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2575 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2576 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2577
2578 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2579 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2580 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2581
2582 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2583 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2584 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2585
2586 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2587
2588 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2589 rte_wmb();
2590
2591 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2592 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2593 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2594 cmd_q->qcontrol | CMD_Q_RUN);
2595
2596 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2597 return 0;
2598 }
2599
2600 static inline int
ccp_crypto_cipher(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2601 ccp_crypto_cipher(struct rte_crypto_op *op,
2602 struct ccp_queue *cmd_q,
2603 struct ccp_batch_info *b_info)
2604 {
2605 int result = 0;
2606 struct ccp_session *session;
2607
2608 session = (struct ccp_session *)get_sym_session_private_data(
2609 op->sym->session,
2610 ccp_cryptodev_driver_id);
2611
2612 switch (session->cipher.algo) {
2613 case CCP_CIPHER_ALGO_AES_CBC:
2614 result = ccp_perform_aes(op, cmd_q, b_info);
2615 b_info->desccnt += 2;
2616 break;
2617 case CCP_CIPHER_ALGO_AES_CTR:
2618 result = ccp_perform_aes(op, cmd_q, b_info);
2619 b_info->desccnt += 2;
2620 break;
2621 case CCP_CIPHER_ALGO_AES_ECB:
2622 result = ccp_perform_aes(op, cmd_q, b_info);
2623 b_info->desccnt += 1;
2624 break;
2625 case CCP_CIPHER_ALGO_3DES_CBC:
2626 result = ccp_perform_3des(op, cmd_q, b_info);
2627 b_info->desccnt += 2;
2628 break;
2629 default:
2630 CCP_LOG_ERR("Unsupported cipher algo %d",
2631 session->cipher.algo);
2632 return -ENOTSUP;
2633 }
2634 return result;
2635 }
2636
2637 static inline int
ccp_crypto_auth(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2638 ccp_crypto_auth(struct rte_crypto_op *op,
2639 struct ccp_queue *cmd_q,
2640 struct ccp_batch_info *b_info)
2641 {
2642
2643 int result = 0;
2644 struct ccp_session *session;
2645
2646 session = (struct ccp_session *)get_sym_session_private_data(
2647 op->sym->session,
2648 ccp_cryptodev_driver_id);
2649
2650 switch (session->auth.algo) {
2651 case CCP_AUTH_ALGO_SHA1:
2652 case CCP_AUTH_ALGO_SHA224:
2653 case CCP_AUTH_ALGO_SHA256:
2654 case CCP_AUTH_ALGO_SHA384:
2655 case CCP_AUTH_ALGO_SHA512:
2656 result = ccp_perform_sha(op, cmd_q);
2657 b_info->desccnt += 3;
2658 break;
2659 case CCP_AUTH_ALGO_MD5_HMAC:
2660 if (session->auth_opt == 0)
2661 result = -1;
2662 break;
2663 case CCP_AUTH_ALGO_SHA1_HMAC:
2664 case CCP_AUTH_ALGO_SHA224_HMAC:
2665 case CCP_AUTH_ALGO_SHA256_HMAC:
2666 if (session->auth_opt == 0) {
2667 result = ccp_perform_hmac(op, cmd_q);
2668 b_info->desccnt += 6;
2669 }
2670 break;
2671 case CCP_AUTH_ALGO_SHA384_HMAC:
2672 case CCP_AUTH_ALGO_SHA512_HMAC:
2673 if (session->auth_opt == 0) {
2674 result = ccp_perform_hmac(op, cmd_q);
2675 b_info->desccnt += 7;
2676 }
2677 break;
2678 case CCP_AUTH_ALGO_SHA3_224:
2679 case CCP_AUTH_ALGO_SHA3_256:
2680 case CCP_AUTH_ALGO_SHA3_384:
2681 case CCP_AUTH_ALGO_SHA3_512:
2682 result = ccp_perform_sha3(op, cmd_q);
2683 b_info->desccnt += 1;
2684 break;
2685 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2686 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2687 result = ccp_perform_sha3_hmac(op, cmd_q);
2688 b_info->desccnt += 3;
2689 break;
2690 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2691 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2692 result = ccp_perform_sha3_hmac(op, cmd_q);
2693 b_info->desccnt += 4;
2694 break;
2695 case CCP_AUTH_ALGO_AES_CMAC:
2696 result = ccp_perform_aes_cmac(op, cmd_q);
2697 b_info->desccnt += 4;
2698 break;
2699 default:
2700 CCP_LOG_ERR("Unsupported auth algo %d",
2701 session->auth.algo);
2702 return -ENOTSUP;
2703 }
2704
2705 return result;
2706 }
2707
2708 static inline int
ccp_crypto_aead(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2709 ccp_crypto_aead(struct rte_crypto_op *op,
2710 struct ccp_queue *cmd_q,
2711 struct ccp_batch_info *b_info)
2712 {
2713 int result = 0;
2714 struct ccp_session *session;
2715
2716 session = (struct ccp_session *)get_sym_session_private_data(
2717 op->sym->session,
2718 ccp_cryptodev_driver_id);
2719
2720 switch (session->auth.algo) {
2721 case CCP_AUTH_ALGO_AES_GCM:
2722 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2723 CCP_LOG_ERR("Incorrect chain order");
2724 return -1;
2725 }
2726 result = ccp_perform_aes_gcm(op, cmd_q);
2727 b_info->desccnt += 5;
2728 break;
2729 default:
2730 CCP_LOG_ERR("Unsupported aead algo %d",
2731 session->aead_algo);
2732 return -ENOTSUP;
2733 }
2734 return result;
2735 }
2736
2737 int
process_ops_to_enqueue(struct ccp_qp * qp,struct rte_crypto_op ** op,struct ccp_queue * cmd_q,uint16_t nb_ops,uint16_t total_nb_ops,int slots_req,uint16_t b_idx)2738 process_ops_to_enqueue(struct ccp_qp *qp,
2739 struct rte_crypto_op **op,
2740 struct ccp_queue *cmd_q,
2741 uint16_t nb_ops,
2742 uint16_t total_nb_ops,
2743 int slots_req,
2744 uint16_t b_idx)
2745 {
2746 int i, result = 0;
2747 struct ccp_batch_info *b_info;
2748 struct ccp_session *session;
2749 EVP_MD_CTX *auth_ctx = NULL;
2750
2751 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2752 CCP_LOG_ERR("batch info allocation failed");
2753 return 0;
2754 }
2755
2756 auth_ctx = EVP_MD_CTX_create();
2757 if (unlikely(!auth_ctx)) {
2758 CCP_LOG_ERR("Unable to create auth ctx");
2759 return 0;
2760 }
2761 b_info->auth_ctr = 0;
2762
2763 /* populate batch info necessary for dequeue */
2764 b_info->op_idx = 0;
2765 b_info->b_idx = 0;
2766 b_info->lsb_buf_idx = 0;
2767 b_info->desccnt = 0;
2768 b_info->cmd_q = cmd_q;
2769 if (iommu_mode == 2)
2770 b_info->lsb_buf_phys =
2771 (phys_addr_t)rte_mem_virt2iova((void *)b_info->lsb_buf);
2772 else
2773 b_info->lsb_buf_phys =
2774 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf);
2775
2776 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2777
2778 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2779 Q_DESC_SIZE);
2780 for (i = b_idx; i < (nb_ops+b_idx); i++) {
2781 session = (struct ccp_session *)get_sym_session_private_data(
2782 op[i]->sym->session,
2783 ccp_cryptodev_driver_id);
2784 switch (session->cmd_id) {
2785 case CCP_CMD_CIPHER:
2786 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2787 break;
2788 case CCP_CMD_AUTH:
2789 if (session->auth_opt) {
2790 b_info->auth_ctr++;
2791 result = cpu_crypto_auth(qp, op[i],
2792 session, auth_ctx);
2793 } else
2794 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2795 break;
2796 case CCP_CMD_CIPHER_HASH:
2797 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2798 if (result)
2799 break;
2800 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2801 break;
2802 case CCP_CMD_HASH_CIPHER:
2803 if (session->auth_opt) {
2804 result = cpu_crypto_auth(qp, op[i],
2805 session, auth_ctx);
2806 if (op[i]->status !=
2807 RTE_CRYPTO_OP_STATUS_SUCCESS)
2808 CCP_LOG_ERR("RTE_CRYPTO_OP_STATUS_AUTH_FAILED");
2809 } else
2810 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2811
2812 if (result)
2813 break;
2814 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2815 break;
2816 case CCP_CMD_COMBINED:
2817 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2818 break;
2819 default:
2820 CCP_LOG_ERR("Unsupported cmd_id");
2821 result = -1;
2822 }
2823 if (unlikely(result < 0)) {
2824 rte_atomic64_add(&b_info->cmd_q->free_slots,
2825 (slots_req - b_info->desccnt));
2826 break;
2827 }
2828 b_info->op[i] = op[i];
2829 }
2830
2831 b_info->opcnt = i;
2832 b_info->b_idx = b_idx;
2833 b_info->total_nb_ops = total_nb_ops;
2834 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2835 Q_DESC_SIZE);
2836
2837 rte_wmb();
2838 /* Write the new tail address back to the queue register */
2839 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2840 b_info->tail_offset);
2841 /* Turn the queue back on using our cached control register */
2842 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2843 cmd_q->qcontrol | CMD_Q_RUN);
2844
2845 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2846
2847 EVP_MD_CTX_destroy(auth_ctx);
2848 return i-b_idx;
2849 }
2850
ccp_auth_dq_prepare(struct rte_crypto_op * op)2851 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2852 {
2853 struct ccp_session *session;
2854 uint8_t *digest_data, *addr;
2855 struct rte_mbuf *m_last;
2856 int offset, digest_offset;
2857 uint8_t digest_le[64];
2858
2859 session = (struct ccp_session *)get_sym_session_private_data(
2860 op->sym->session,
2861 ccp_cryptodev_driver_id);
2862
2863 if (session->cmd_id == CCP_CMD_COMBINED) {
2864 digest_data = op->sym->aead.digest.data;
2865 digest_offset = op->sym->aead.data.offset +
2866 op->sym->aead.data.length;
2867 } else {
2868 digest_data = op->sym->auth.digest.data;
2869 digest_offset = op->sym->auth.data.offset +
2870 op->sym->auth.data.length;
2871 }
2872 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2873 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2874 m_last->data_len - session->auth.ctx_len);
2875
2876 rte_mb();
2877 offset = session->auth.offset;
2878
2879 if (session->auth.engine == CCP_ENGINE_SHA)
2880 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2881 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2882 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2883 /* All other algorithms require byte
2884 * swap done by host
2885 */
2886 unsigned int i;
2887
2888 offset = session->auth.ctx_len -
2889 session->auth.offset - 1;
2890 for (i = 0; i < session->auth.digest_length; i++)
2891 digest_le[i] = addr[offset - i];
2892 offset = 0;
2893 addr = digest_le;
2894 }
2895
2896 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2897 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2898 if (memcmp(addr + offset, digest_data,
2899 session->auth.digest_length) != 0)
2900 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2901
2902 } else {
2903 if (unlikely(digest_data == 0))
2904 digest_data = rte_pktmbuf_mtod_offset(
2905 op->sym->m_dst, uint8_t *,
2906 digest_offset);
2907 rte_memcpy(digest_data, addr + offset,
2908 session->auth.digest_length);
2909 }
2910 /* Trim area used for digest from mbuf. */
2911 rte_pktmbuf_trim(op->sym->m_src,
2912 session->auth.ctx_len);
2913 }
2914
2915 static int
ccp_prepare_ops(struct ccp_qp * qp,struct rte_crypto_op ** op_d,struct ccp_batch_info * b_info,uint16_t nb_ops)2916 ccp_prepare_ops(struct ccp_qp *qp,
2917 struct rte_crypto_op **op_d,
2918 struct ccp_batch_info *b_info,
2919 uint16_t nb_ops)
2920 {
2921 int i, min_ops;
2922 struct ccp_session *session;
2923
2924 EVP_MD_CTX *auth_ctx = NULL;
2925
2926 auth_ctx = EVP_MD_CTX_create();
2927 if (unlikely(!auth_ctx)) {
2928 CCP_LOG_ERR("Unable to create auth ctx");
2929 return 0;
2930 }
2931 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2932
2933 for (i = b_info->b_idx; i < min_ops; i++) {
2934 op_d[i] = b_info->op[b_info->b_idx + b_info->op_idx++];
2935 session = (struct ccp_session *)get_sym_session_private_data(
2936 op_d[i]->sym->session,
2937 ccp_cryptodev_driver_id);
2938 switch (session->cmd_id) {
2939 case CCP_CMD_CIPHER:
2940 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2941 break;
2942 case CCP_CMD_AUTH:
2943 if (session->auth_opt == 0)
2944 ccp_auth_dq_prepare(op_d[i]);
2945 break;
2946 case CCP_CMD_CIPHER_HASH:
2947 if (session->auth_opt)
2948 cpu_crypto_auth(qp, op_d[i],
2949 session, auth_ctx);
2950 else
2951 ccp_auth_dq_prepare(op_d[i]);
2952 break;
2953 case CCP_CMD_HASH_CIPHER:
2954 if (session->auth_opt)
2955 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2956 else
2957 ccp_auth_dq_prepare(op_d[i]);
2958 break;
2959 case CCP_CMD_COMBINED:
2960 ccp_auth_dq_prepare(op_d[i]);
2961 break;
2962 default:
2963 CCP_LOG_ERR("Unsupported cmd_id");
2964 }
2965 }
2966
2967 EVP_MD_CTX_destroy(auth_ctx);
2968 b_info->opcnt -= min_ops;
2969 return min_ops;
2970 }
2971
2972 int
process_ops_to_dequeue(struct ccp_qp * qp,struct rte_crypto_op ** op,uint16_t nb_ops,uint16_t * total_nb_ops)2973 process_ops_to_dequeue(struct ccp_qp *qp,
2974 struct rte_crypto_op **op,
2975 uint16_t nb_ops,
2976 uint16_t *total_nb_ops)
2977 {
2978 struct ccp_batch_info *b_info;
2979 uint32_t cur_head_offset;
2980
2981 if (qp->b_info != NULL) {
2982 b_info = qp->b_info;
2983 if (unlikely(b_info->op_idx > 0))
2984 goto success;
2985 } else if (rte_ring_dequeue(qp->processed_pkts,
2986 (void **)&b_info))
2987 return 0;
2988
2989 if (b_info->auth_ctr == b_info->opcnt)
2990 goto success;
2991 *total_nb_ops = b_info->total_nb_ops;
2992 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2993 CMD_Q_HEAD_LO_BASE);
2994
2995 if (b_info->head_offset < b_info->tail_offset) {
2996 if ((cur_head_offset >= b_info->head_offset) &&
2997 (cur_head_offset < b_info->tail_offset)) {
2998 qp->b_info = b_info;
2999 return 0;
3000 }
3001 } else if (b_info->tail_offset != b_info->head_offset) {
3002 if ((cur_head_offset >= b_info->head_offset) ||
3003 (cur_head_offset < b_info->tail_offset)) {
3004 qp->b_info = b_info;
3005 return 0;
3006 }
3007 }
3008
3009
3010 success:
3011 *total_nb_ops = b_info->total_nb_ops;
3012 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
3013 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
3014 b_info->desccnt = 0;
3015 if (b_info->opcnt > 0) {
3016 qp->b_info = b_info;
3017 } else {
3018 rte_mempool_put(qp->batch_mp, (void *)b_info);
3019 qp->b_info = NULL;
3020 }
3021
3022 return nb_ops;
3023 }
3024