1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Unit tests and benchmarks for the CRC library functions
4 *
5 * Copyright 2024 Google LLC
6 *
7 * Author: Eric Biggers <[email protected]>
8 */
9 #include <kunit/test.h>
10 #include <linux/crc7.h>
11 #include <linux/crc16.h>
12 #include <linux/crc-t10dif.h>
13 #include <linux/crc32.h>
14 #include <linux/crc32c.h>
15 #include <linux/crc64.h>
16 #include <linux/prandom.h>
17 #include <linux/vmalloc.h>
18
19 #define CRC_KUNIT_SEED 42
20 #define CRC_KUNIT_MAX_LEN 16384
21 #define CRC_KUNIT_NUM_TEST_ITERS 1000
22
23 static struct rnd_state rng;
24 static u8 *test_buffer;
25 static size_t test_buflen;
26
27 /**
28 * struct crc_variant - describes a CRC variant
29 * @bits: Number of bits in the CRC, 1 <= @bits <= 64.
30 * @le: true if it's a "little endian" CRC (reversed mapping between bits and
31 * polynomial coefficients in each byte), false if it's a "big endian" CRC
32 * (natural mapping between bits and polynomial coefficients in each byte)
33 * @poly: The generator polynomial with the highest-order term omitted.
34 * Bit-reversed if @le is true.
35 * @func: The function to compute a CRC. The type signature uses u64 so that it
36 * can fit any CRC up to CRC-64. The CRC is passed in, and is expected
37 * to be returned in, the least significant bits of the u64. The
38 * function is expected to *not* invert the CRC at the beginning and end.
39 * @combine_func: Optional function to combine two CRCs.
40 */
41 struct crc_variant {
42 int bits;
43 bool le;
44 u64 poly;
45 u64 (*func)(u64 crc, const u8 *p, size_t len);
46 u64 (*combine_func)(u64 crc1, u64 crc2, size_t len2);
47 };
48
rand32(void)49 static u32 rand32(void)
50 {
51 return prandom_u32_state(&rng);
52 }
53
rand64(void)54 static u64 rand64(void)
55 {
56 u32 n = rand32();
57
58 return ((u64)n << 32) | rand32();
59 }
60
crc_mask(const struct crc_variant * v)61 static u64 crc_mask(const struct crc_variant *v)
62 {
63 return (u64)-1 >> (64 - v->bits);
64 }
65
66 /* Reference implementation of any CRC variant */
crc_ref(const struct crc_variant * v,u64 crc,const u8 * p,size_t len)67 static u64 crc_ref(const struct crc_variant *v,
68 u64 crc, const u8 *p, size_t len)
69 {
70 size_t i, j;
71
72 for (i = 0; i < len; i++) {
73 for (j = 0; j < 8; j++) {
74 if (v->le) {
75 crc ^= (p[i] >> j) & 1;
76 crc = (crc >> 1) ^ ((crc & 1) ? v->poly : 0);
77 } else {
78 crc ^= (u64)((p[i] >> (7 - j)) & 1) <<
79 (v->bits - 1);
80 if (crc & (1ULL << (v->bits - 1)))
81 crc = ((crc << 1) ^ v->poly) &
82 crc_mask(v);
83 else
84 crc <<= 1;
85 }
86 }
87 }
88 return crc;
89 }
90
crc_suite_init(struct kunit_suite * suite)91 static int crc_suite_init(struct kunit_suite *suite)
92 {
93 /*
94 * Allocate the test buffer using vmalloc() with a page-aligned length
95 * so that it is immediately followed by a guard page. This allows
96 * buffer overreads to be detected, even in assembly code.
97 */
98 test_buflen = round_up(CRC_KUNIT_MAX_LEN, PAGE_SIZE);
99 test_buffer = vmalloc(test_buflen);
100 if (!test_buffer)
101 return -ENOMEM;
102
103 prandom_seed_state(&rng, CRC_KUNIT_SEED);
104 prandom_bytes_state(&rng, test_buffer, test_buflen);
105 return 0;
106 }
107
crc_suite_exit(struct kunit_suite * suite)108 static void crc_suite_exit(struct kunit_suite *suite)
109 {
110 vfree(test_buffer);
111 test_buffer = NULL;
112 }
113
114 /* Generate a random initial CRC. */
generate_random_initial_crc(const struct crc_variant * v)115 static u64 generate_random_initial_crc(const struct crc_variant *v)
116 {
117 switch (rand32() % 4) {
118 case 0:
119 return 0;
120 case 1:
121 return crc_mask(v); /* All 1 bits */
122 default:
123 return rand64() & crc_mask(v);
124 }
125 }
126
127 /* Generate a random length, preferring small lengths. */
generate_random_length(size_t max_length)128 static size_t generate_random_length(size_t max_length)
129 {
130 size_t len;
131
132 switch (rand32() % 3) {
133 case 0:
134 len = rand32() % 128;
135 break;
136 case 1:
137 len = rand32() % 3072;
138 break;
139 default:
140 len = rand32();
141 break;
142 }
143 return len % (max_length + 1);
144 }
145
146 /* Test that v->func gives the same CRCs as a reference implementation. */
crc_main_test(struct kunit * test,const struct crc_variant * v)147 static void crc_main_test(struct kunit *test, const struct crc_variant *v)
148 {
149 size_t i;
150
151 for (i = 0; i < CRC_KUNIT_NUM_TEST_ITERS; i++) {
152 u64 init_crc, expected_crc, actual_crc;
153 size_t len, offset;
154 bool nosimd;
155
156 init_crc = generate_random_initial_crc(v);
157 len = generate_random_length(CRC_KUNIT_MAX_LEN);
158
159 /* Generate a random offset. */
160 if (rand32() % 2 == 0) {
161 /* Use a random alignment mod 64 */
162 offset = rand32() % 64;
163 offset = min(offset, CRC_KUNIT_MAX_LEN - len);
164 } else {
165 /* Go up to the guard page, to catch buffer overreads */
166 offset = test_buflen - len;
167 }
168
169 if (rand32() % 8 == 0)
170 /* Refresh the data occasionally. */
171 prandom_bytes_state(&rng, &test_buffer[offset], len);
172
173 nosimd = rand32() % 8 == 0;
174
175 /*
176 * Compute the CRC, and verify that it equals the CRC computed
177 * by a simple bit-at-a-time reference implementation.
178 */
179 expected_crc = crc_ref(v, init_crc, &test_buffer[offset], len);
180 if (nosimd)
181 local_irq_disable();
182 actual_crc = v->func(init_crc, &test_buffer[offset], len);
183 if (nosimd)
184 local_irq_enable();
185 KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc,
186 "Wrong result with len=%zu offset=%zu nosimd=%d",
187 len, offset, nosimd);
188 }
189 }
190
191 /* Test that CRC(concat(A, B)) == combine_CRCs(CRC(A), CRC(B), len(B)). */
crc_combine_test(struct kunit * test,const struct crc_variant * v)192 static void crc_combine_test(struct kunit *test, const struct crc_variant *v)
193 {
194 int i;
195
196 for (i = 0; i < 100; i++) {
197 u64 init_crc = generate_random_initial_crc(v);
198 size_t len1 = generate_random_length(CRC_KUNIT_MAX_LEN);
199 size_t len2 = generate_random_length(CRC_KUNIT_MAX_LEN - len1);
200 u64 crc1, crc2, expected_crc, actual_crc;
201
202 prandom_bytes_state(&rng, test_buffer, len1 + len2);
203 crc1 = v->func(init_crc, test_buffer, len1);
204 crc2 = v->func(0, &test_buffer[len1], len2);
205 expected_crc = v->func(init_crc, test_buffer, len1 + len2);
206 actual_crc = v->combine_func(crc1, crc2, len2);
207 KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc,
208 "CRC combination gave wrong result with len1=%zu len2=%zu\n",
209 len1, len2);
210 }
211 }
212
crc_test(struct kunit * test,const struct crc_variant * v)213 static void crc_test(struct kunit *test, const struct crc_variant *v)
214 {
215 crc_main_test(test, v);
216 if (v->combine_func)
217 crc_combine_test(test, v);
218 }
219
220 static __always_inline void
crc_benchmark(struct kunit * test,u64 (* crc_func)(u64 crc,const u8 * p,size_t len))221 crc_benchmark(struct kunit *test,
222 u64 (*crc_func)(u64 crc, const u8 *p, size_t len))
223 {
224 static const size_t lens_to_test[] = {
225 1, 16, 64, 127, 128, 200, 256, 511, 512, 1024, 3173, 4096, 16384,
226 };
227 size_t len, i, j, num_iters;
228 /*
229 * The CRC value that this function computes in a series of calls to
230 * crc_func is never actually used, so use volatile to ensure that the
231 * computations are done as intended and don't all get optimized out.
232 */
233 volatile u64 crc = 0;
234 u64 t;
235
236 if (!IS_ENABLED(CONFIG_CRC_BENCHMARK))
237 kunit_skip(test, "not enabled");
238
239 /* warm-up */
240 for (i = 0; i < 10000000; i += CRC_KUNIT_MAX_LEN)
241 crc = crc_func(crc, test_buffer, CRC_KUNIT_MAX_LEN);
242
243 for (i = 0; i < ARRAY_SIZE(lens_to_test); i++) {
244 len = lens_to_test[i];
245 KUNIT_ASSERT_LE(test, len, CRC_KUNIT_MAX_LEN);
246 num_iters = 10000000 / (len + 128);
247 preempt_disable();
248 t = ktime_get_ns();
249 for (j = 0; j < num_iters; j++)
250 crc = crc_func(crc, test_buffer, len);
251 t = ktime_get_ns() - t;
252 preempt_enable();
253 kunit_info(test, "len=%zu: %llu MB/s\n",
254 len, div64_u64((u64)len * num_iters * 1000, t));
255 }
256 }
257
258 /* crc7_be */
259
crc7_be_wrapper(u64 crc,const u8 * p,size_t len)260 static u64 crc7_be_wrapper(u64 crc, const u8 *p, size_t len)
261 {
262 /*
263 * crc7_be() left-aligns the 7-bit CRC in a u8, whereas the test wants a
264 * right-aligned CRC (in a u64). Convert between the conventions.
265 */
266 return crc7_be(crc << 1, p, len) >> 1;
267 }
268
269 static const struct crc_variant crc_variant_crc7_be = {
270 .bits = 7,
271 .poly = 0x9,
272 .func = crc7_be_wrapper,
273 };
274
crc7_be_test(struct kunit * test)275 static void crc7_be_test(struct kunit *test)
276 {
277 crc_test(test, &crc_variant_crc7_be);
278 }
279
crc7_be_benchmark(struct kunit * test)280 static void crc7_be_benchmark(struct kunit *test)
281 {
282 crc_benchmark(test, crc7_be_wrapper);
283 }
284
285 /* crc16 */
286
crc16_wrapper(u64 crc,const u8 * p,size_t len)287 static u64 crc16_wrapper(u64 crc, const u8 *p, size_t len)
288 {
289 return crc16(crc, p, len);
290 }
291
292 static const struct crc_variant crc_variant_crc16 = {
293 .bits = 16,
294 .le = true,
295 .poly = 0xa001,
296 .func = crc16_wrapper,
297 };
298
crc16_test(struct kunit * test)299 static void crc16_test(struct kunit *test)
300 {
301 crc_test(test, &crc_variant_crc16);
302 }
303
crc16_benchmark(struct kunit * test)304 static void crc16_benchmark(struct kunit *test)
305 {
306 crc_benchmark(test, crc16_wrapper);
307 }
308
309 /* crc_t10dif */
310
crc_t10dif_wrapper(u64 crc,const u8 * p,size_t len)311 static u64 crc_t10dif_wrapper(u64 crc, const u8 *p, size_t len)
312 {
313 return crc_t10dif_update(crc, p, len);
314 }
315
316 static const struct crc_variant crc_variant_crc_t10dif = {
317 .bits = 16,
318 .le = false,
319 .poly = 0x8bb7,
320 .func = crc_t10dif_wrapper,
321 };
322
crc_t10dif_test(struct kunit * test)323 static void crc_t10dif_test(struct kunit *test)
324 {
325 crc_test(test, &crc_variant_crc_t10dif);
326 }
327
crc_t10dif_benchmark(struct kunit * test)328 static void crc_t10dif_benchmark(struct kunit *test)
329 {
330 crc_benchmark(test, crc_t10dif_wrapper);
331 }
332
333 /* crc32_le */
334
crc32_le_wrapper(u64 crc,const u8 * p,size_t len)335 static u64 crc32_le_wrapper(u64 crc, const u8 *p, size_t len)
336 {
337 return crc32_le(crc, p, len);
338 }
339
crc32_le_combine_wrapper(u64 crc1,u64 crc2,size_t len2)340 static u64 crc32_le_combine_wrapper(u64 crc1, u64 crc2, size_t len2)
341 {
342 return crc32_le_combine(crc1, crc2, len2);
343 }
344
345 static const struct crc_variant crc_variant_crc32_le = {
346 .bits = 32,
347 .le = true,
348 .poly = 0xedb88320,
349 .func = crc32_le_wrapper,
350 .combine_func = crc32_le_combine_wrapper,
351 };
352
crc32_le_test(struct kunit * test)353 static void crc32_le_test(struct kunit *test)
354 {
355 crc_test(test, &crc_variant_crc32_le);
356 }
357
crc32_le_benchmark(struct kunit * test)358 static void crc32_le_benchmark(struct kunit *test)
359 {
360 crc_benchmark(test, crc32_le_wrapper);
361 }
362
363 /* crc32_be */
364
crc32_be_wrapper(u64 crc,const u8 * p,size_t len)365 static u64 crc32_be_wrapper(u64 crc, const u8 *p, size_t len)
366 {
367 return crc32_be(crc, p, len);
368 }
369
370 static const struct crc_variant crc_variant_crc32_be = {
371 .bits = 32,
372 .le = false,
373 .poly = 0x04c11db7,
374 .func = crc32_be_wrapper,
375 };
376
crc32_be_test(struct kunit * test)377 static void crc32_be_test(struct kunit *test)
378 {
379 crc_test(test, &crc_variant_crc32_be);
380 }
381
crc32_be_benchmark(struct kunit * test)382 static void crc32_be_benchmark(struct kunit *test)
383 {
384 crc_benchmark(test, crc32_be_wrapper);
385 }
386
387 /* crc32c */
388
crc32c_wrapper(u64 crc,const u8 * p,size_t len)389 static u64 crc32c_wrapper(u64 crc, const u8 *p, size_t len)
390 {
391 return crc32c(crc, p, len);
392 }
393
crc32c_combine_wrapper(u64 crc1,u64 crc2,size_t len2)394 static u64 crc32c_combine_wrapper(u64 crc1, u64 crc2, size_t len2)
395 {
396 return crc32c_combine(crc1, crc2, len2);
397 }
398
399 static const struct crc_variant crc_variant_crc32c = {
400 .bits = 32,
401 .le = true,
402 .poly = 0x82f63b78,
403 .func = crc32c_wrapper,
404 .combine_func = crc32c_combine_wrapper,
405 };
406
crc32c_test(struct kunit * test)407 static void crc32c_test(struct kunit *test)
408 {
409 crc_test(test, &crc_variant_crc32c);
410 }
411
crc32c_benchmark(struct kunit * test)412 static void crc32c_benchmark(struct kunit *test)
413 {
414 crc_benchmark(test, crc32c_wrapper);
415 }
416
417 /* crc64_be */
418
crc64_be_wrapper(u64 crc,const u8 * p,size_t len)419 static u64 crc64_be_wrapper(u64 crc, const u8 *p, size_t len)
420 {
421 return crc64_be(crc, p, len);
422 }
423
424 static const struct crc_variant crc_variant_crc64_be = {
425 .bits = 64,
426 .le = false,
427 .poly = 0x42f0e1eba9ea3693,
428 .func = crc64_be_wrapper,
429 };
430
crc64_be_test(struct kunit * test)431 static void crc64_be_test(struct kunit *test)
432 {
433 crc_test(test, &crc_variant_crc64_be);
434 }
435
crc64_be_benchmark(struct kunit * test)436 static void crc64_be_benchmark(struct kunit *test)
437 {
438 crc_benchmark(test, crc64_be_wrapper);
439 }
440
441 /* crc64_nvme */
442
crc64_nvme_wrapper(u64 crc,const u8 * p,size_t len)443 static u64 crc64_nvme_wrapper(u64 crc, const u8 *p, size_t len)
444 {
445 /* The inversions that crc64_nvme() does have to be undone here. */
446 return ~crc64_nvme(~crc, p, len);
447 }
448
449 static const struct crc_variant crc_variant_crc64_nvme = {
450 .bits = 64,
451 .le = true,
452 .poly = 0x9a6c9329ac4bc9b5,
453 .func = crc64_nvme_wrapper,
454 };
455
crc64_nvme_test(struct kunit * test)456 static void crc64_nvme_test(struct kunit *test)
457 {
458 crc_test(test, &crc_variant_crc64_nvme);
459 }
460
crc64_nvme_benchmark(struct kunit * test)461 static void crc64_nvme_benchmark(struct kunit *test)
462 {
463 crc_benchmark(test, crc64_nvme_wrapper);
464 }
465
466 static struct kunit_case crc_test_cases[] = {
467 KUNIT_CASE(crc7_be_test),
468 KUNIT_CASE(crc7_be_benchmark),
469 KUNIT_CASE(crc16_test),
470 KUNIT_CASE(crc16_benchmark),
471 KUNIT_CASE(crc_t10dif_test),
472 KUNIT_CASE(crc_t10dif_benchmark),
473 KUNIT_CASE(crc32_le_test),
474 KUNIT_CASE(crc32_le_benchmark),
475 KUNIT_CASE(crc32_be_test),
476 KUNIT_CASE(crc32_be_benchmark),
477 KUNIT_CASE(crc32c_test),
478 KUNIT_CASE(crc32c_benchmark),
479 KUNIT_CASE(crc64_be_test),
480 KUNIT_CASE(crc64_be_benchmark),
481 KUNIT_CASE(crc64_nvme_test),
482 KUNIT_CASE(crc64_nvme_benchmark),
483 {},
484 };
485
486 static struct kunit_suite crc_test_suite = {
487 .name = "crc",
488 .test_cases = crc_test_cases,
489 .suite_init = crc_suite_init,
490 .suite_exit = crc_suite_exit,
491 };
492 kunit_test_suite(crc_test_suite);
493
494 MODULE_DESCRIPTION("Unit tests and benchmarks for the CRC library functions");
495 MODULE_LICENSE("GPL");
496