1 // SPDX-License-Identifier: GPL-2.0-or-later
2 #include "alloc_api.h"
3 
4 static int alloc_test_flags = TEST_F_NONE;
5 
6 static inline const char * const get_memblock_alloc_name(int flags)
7 {
8 	if (flags & TEST_F_RAW)
9 		return "memblock_alloc_raw";
10 	return "memblock_alloc";
11 }
12 
13 static inline void *run_memblock_alloc(phys_addr_t size, phys_addr_t align)
14 {
15 	if (alloc_test_flags & TEST_F_RAW)
16 		return memblock_alloc_raw(size, align);
17 	return memblock_alloc(size, align);
18 }
19 
20 /*
21  * A simple test that tries to allocate a small memory region.
22  * Expect to allocate an aligned region near the end of the available memory.
23  */
24 static int alloc_top_down_simple_check(void)
25 {
26 	struct memblock_region *rgn = &memblock.reserved.regions[0];
27 	void *allocated_ptr = NULL;
28 
29 	PREFIX_PUSH();
30 
31 	phys_addr_t size = SZ_2;
32 	phys_addr_t expected_start;
33 
34 	setup_memblock();
35 
36 	expected_start = memblock_end_of_DRAM() - SMP_CACHE_BYTES;
37 
38 	allocated_ptr = run_memblock_alloc(size, SMP_CACHE_BYTES);
39 
40 	ASSERT_NE(allocated_ptr, NULL);
41 	assert_mem_content(allocated_ptr, size, alloc_test_flags);
42 
43 	ASSERT_EQ(rgn->size, size);
44 	ASSERT_EQ(rgn->base, expected_start);
45 
46 	ASSERT_EQ(memblock.reserved.cnt, 1);
47 	ASSERT_EQ(memblock.reserved.total_size, size);
48 
49 	test_pass_pop();
50 
51 	return 0;
52 }
53 
54 /*
55  * A test that tries to allocate memory next to a reserved region that starts at
56  * the misaligned address. Expect to create two separate entries, with the new
57  * entry aligned to the provided alignment:
58  *
59  *              +
60  * |            +--------+         +--------|
61  * |            |  rgn2  |         |  rgn1  |
62  * +------------+--------+---------+--------+
63  *              ^
64  *              |
65  *              Aligned address boundary
66  *
67  * The allocation direction is top-down and region arrays are sorted from lower
68  * to higher addresses, so the new region will be the first entry in
69  * memory.reserved array. The previously reserved region does not get modified.
70  * Region counter and total size get updated.
71  */
72 static int alloc_top_down_disjoint_check(void)
73 {
74 	/* After allocation, this will point to the "old" region */
75 	struct memblock_region *rgn1 = &memblock.reserved.regions[1];
76 	struct memblock_region *rgn2 = &memblock.reserved.regions[0];
77 	struct region r1;
78 	void *allocated_ptr = NULL;
79 
80 	PREFIX_PUSH();
81 
82 	phys_addr_t r2_size = SZ_16;
83 	/* Use custom alignment */
84 	phys_addr_t alignment = SMP_CACHE_BYTES * 2;
85 	phys_addr_t total_size;
86 	phys_addr_t expected_start;
87 
88 	setup_memblock();
89 
90 	r1.base = memblock_end_of_DRAM() - SZ_2;
91 	r1.size = SZ_2;
92 
93 	total_size = r1.size + r2_size;
94 	expected_start = memblock_end_of_DRAM() - alignment;
95 
96 	memblock_reserve(r1.base, r1.size);
97 
98 	allocated_ptr = run_memblock_alloc(r2_size, alignment);
99 
100 	ASSERT_NE(allocated_ptr, NULL);
101 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
102 
103 	ASSERT_EQ(rgn1->size, r1.size);
104 	ASSERT_EQ(rgn1->base, r1.base);
105 
106 	ASSERT_EQ(rgn2->size, r2_size);
107 	ASSERT_EQ(rgn2->base, expected_start);
108 
109 	ASSERT_EQ(memblock.reserved.cnt, 2);
110 	ASSERT_EQ(memblock.reserved.total_size, total_size);
111 
112 	test_pass_pop();
113 
114 	return 0;
115 }
116 
117 /*
118  * A test that tries to allocate memory when there is enough space at the end
119  * of the previously reserved block (i.e. first fit):
120  *
121  *  |              +--------+--------------|
122  *  |              |   r1   |      r2      |
123  *  +--------------+--------+--------------+
124  *
125  * Expect a merge of both regions. Only the region size gets updated.
126  */
127 static int alloc_top_down_before_check(void)
128 {
129 	struct memblock_region *rgn = &memblock.reserved.regions[0];
130 	void *allocated_ptr = NULL;
131 
132 	PREFIX_PUSH();
133 
134 	/*
135 	 * The first region ends at the aligned address to test region merging
136 	 */
137 	phys_addr_t r1_size = SMP_CACHE_BYTES;
138 	phys_addr_t r2_size = SZ_512;
139 	phys_addr_t total_size = r1_size + r2_size;
140 
141 	setup_memblock();
142 
143 	memblock_reserve(memblock_end_of_DRAM() - total_size, r1_size);
144 
145 	allocated_ptr = run_memblock_alloc(r2_size, SMP_CACHE_BYTES);
146 
147 	ASSERT_NE(allocated_ptr, NULL);
148 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
149 
150 	ASSERT_EQ(rgn->size, total_size);
151 	ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - total_size);
152 
153 	ASSERT_EQ(memblock.reserved.cnt, 1);
154 	ASSERT_EQ(memblock.reserved.total_size, total_size);
155 
156 	test_pass_pop();
157 
158 	return 0;
159 }
160 
161 /*
162  * A test that tries to allocate memory when there is not enough space at the
163  * end of the previously reserved block (i.e. second fit):
164  *
165  *  |            +-----------+------+     |
166  *  |            |     r2    |  r1  |     |
167  *  +------------+-----------+------+-----+
168  *
169  * Expect a merge of both regions. Both the base address and size of the region
170  * get updated.
171  */
172 static int alloc_top_down_after_check(void)
173 {
174 	struct memblock_region *rgn = &memblock.reserved.regions[0];
175 	struct region r1;
176 	void *allocated_ptr = NULL;
177 
178 	PREFIX_PUSH();
179 
180 	phys_addr_t r2_size = SZ_512;
181 	phys_addr_t total_size;
182 
183 	setup_memblock();
184 
185 	/*
186 	 * The first region starts at the aligned address to test region merging
187 	 */
188 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES;
189 	r1.size = SZ_8;
190 
191 	total_size = r1.size + r2_size;
192 
193 	memblock_reserve(r1.base, r1.size);
194 
195 	allocated_ptr = run_memblock_alloc(r2_size, SMP_CACHE_BYTES);
196 
197 	ASSERT_NE(allocated_ptr, NULL);
198 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
199 
200 	ASSERT_EQ(rgn->size, total_size);
201 	ASSERT_EQ(rgn->base, r1.base - r2_size);
202 
203 	ASSERT_EQ(memblock.reserved.cnt, 1);
204 	ASSERT_EQ(memblock.reserved.total_size, total_size);
205 
206 	test_pass_pop();
207 
208 	return 0;
209 }
210 
211 /*
212  * A test that tries to allocate memory when there are two reserved regions with
213  * a gap too small to fit the new region:
214  *
215  *  |       +--------+----------+   +------|
216  *  |       |   r3   |    r2    |   |  r1  |
217  *  +-------+--------+----------+---+------+
218  *
219  * Expect to allocate a region before the one that starts at the lower address,
220  * and merge them into one. The region counter and total size fields get
221  * updated.
222  */
223 static int alloc_top_down_second_fit_check(void)
224 {
225 	struct memblock_region *rgn = &memblock.reserved.regions[0];
226 	struct region r1, r2;
227 	void *allocated_ptr = NULL;
228 
229 	PREFIX_PUSH();
230 
231 	phys_addr_t r3_size = SZ_1K;
232 	phys_addr_t total_size;
233 
234 	setup_memblock();
235 
236 	r1.base = memblock_end_of_DRAM() - SZ_512;
237 	r1.size = SZ_512;
238 
239 	r2.base = r1.base - SZ_512;
240 	r2.size = SZ_256;
241 
242 	total_size = r1.size + r2.size + r3_size;
243 
244 	memblock_reserve(r1.base, r1.size);
245 	memblock_reserve(r2.base, r2.size);
246 
247 	allocated_ptr = run_memblock_alloc(r3_size, SMP_CACHE_BYTES);
248 
249 	ASSERT_NE(allocated_ptr, NULL);
250 	assert_mem_content(allocated_ptr, r3_size, alloc_test_flags);
251 
252 	ASSERT_EQ(rgn->size, r2.size + r3_size);
253 	ASSERT_EQ(rgn->base, r2.base - r3_size);
254 
255 	ASSERT_EQ(memblock.reserved.cnt, 2);
256 	ASSERT_EQ(memblock.reserved.total_size, total_size);
257 
258 	test_pass_pop();
259 
260 	return 0;
261 }
262 
263 /*
264  * A test that tries to allocate memory when there are two reserved regions with
265  * a gap big enough to accommodate the new region:
266  *
267  *  |     +--------+--------+--------+     |
268  *  |     |   r2   |   r3   |   r1   |     |
269  *  +-----+--------+--------+--------+-----+
270  *
271  * Expect to merge all of them, creating one big entry in memblock.reserved
272  * array. The region counter and total size fields get updated.
273  */
274 static int alloc_in_between_generic_check(void)
275 {
276 	struct memblock_region *rgn = &memblock.reserved.regions[0];
277 	struct region r1, r2;
278 	void *allocated_ptr = NULL;
279 
280 	PREFIX_PUSH();
281 
282 	phys_addr_t gap_size = SMP_CACHE_BYTES;
283 	phys_addr_t r3_size = SZ_64;
284 	/*
285 	 * Calculate regions size so there's just enough space for the new entry
286 	 */
287 	phys_addr_t rgn_size = (MEM_SIZE - (2 * gap_size + r3_size)) / 2;
288 	phys_addr_t total_size;
289 
290 	setup_memblock();
291 
292 	r1.size = rgn_size;
293 	r1.base = memblock_end_of_DRAM() - (gap_size + rgn_size);
294 
295 	r2.size = rgn_size;
296 	r2.base = memblock_start_of_DRAM() + gap_size;
297 
298 	total_size = r1.size + r2.size + r3_size;
299 
300 	memblock_reserve(r1.base, r1.size);
301 	memblock_reserve(r2.base, r2.size);
302 
303 	allocated_ptr = run_memblock_alloc(r3_size, SMP_CACHE_BYTES);
304 
305 	ASSERT_NE(allocated_ptr, NULL);
306 	assert_mem_content(allocated_ptr, r3_size, alloc_test_flags);
307 
308 	ASSERT_EQ(rgn->size, total_size);
309 	ASSERT_EQ(rgn->base, r1.base - r2.size - r3_size);
310 
311 	ASSERT_EQ(memblock.reserved.cnt, 1);
312 	ASSERT_EQ(memblock.reserved.total_size, total_size);
313 
314 	test_pass_pop();
315 
316 	return 0;
317 }
318 
319 /*
320  * A test that tries to allocate memory when the memory is filled with reserved
321  * regions with memory gaps too small to fit the new region:
322  *
323  * +-------+
324  * |  new  |
325  * +--+----+
326  *    |    +-----+    +-----+    +-----+    |
327  *    |    | res |    | res |    | res |    |
328  *    +----+-----+----+-----+----+-----+----+
329  *
330  * Expect no allocation to happen.
331  */
332 static int alloc_small_gaps_generic_check(void)
333 {
334 	void *allocated_ptr = NULL;
335 
336 	PREFIX_PUSH();
337 
338 	phys_addr_t region_size = SZ_1K;
339 	phys_addr_t gap_size = SZ_256;
340 	phys_addr_t region_end;
341 
342 	setup_memblock();
343 
344 	region_end = memblock_start_of_DRAM();
345 
346 	while (region_end < memblock_end_of_DRAM()) {
347 		memblock_reserve(region_end + gap_size, region_size);
348 		region_end += gap_size + region_size;
349 	}
350 
351 	allocated_ptr = run_memblock_alloc(region_size, SMP_CACHE_BYTES);
352 
353 	ASSERT_EQ(allocated_ptr, NULL);
354 
355 	test_pass_pop();
356 
357 	return 0;
358 }
359 
360 /*
361  * A test that tries to allocate memory when all memory is reserved.
362  * Expect no allocation to happen.
363  */
364 static int alloc_all_reserved_generic_check(void)
365 {
366 	void *allocated_ptr = NULL;
367 
368 	PREFIX_PUSH();
369 
370 	setup_memblock();
371 
372 	/* Simulate full memory */
373 	memblock_reserve(memblock_start_of_DRAM(), MEM_SIZE);
374 
375 	allocated_ptr = run_memblock_alloc(SZ_256, SMP_CACHE_BYTES);
376 
377 	ASSERT_EQ(allocated_ptr, NULL);
378 
379 	test_pass_pop();
380 
381 	return 0;
382 }
383 
384 /*
385  * A test that tries to allocate memory when the memory is almost full,
386  * with not enough space left for the new region:
387  *
388  *                                +-------+
389  *                                |  new  |
390  *                                +-------+
391  *  |-----------------------------+   |
392  *  |          reserved           |   |
393  *  +-----------------------------+---+
394  *
395  * Expect no allocation to happen.
396  */
397 static int alloc_no_space_generic_check(void)
398 {
399 	void *allocated_ptr = NULL;
400 
401 	PREFIX_PUSH();
402 
403 	setup_memblock();
404 
405 	phys_addr_t available_size = SZ_256;
406 	phys_addr_t reserved_size = MEM_SIZE - available_size;
407 
408 	/* Simulate almost-full memory */
409 	memblock_reserve(memblock_start_of_DRAM(), reserved_size);
410 
411 	allocated_ptr = run_memblock_alloc(SZ_1K, SMP_CACHE_BYTES);
412 
413 	ASSERT_EQ(allocated_ptr, NULL);
414 
415 	test_pass_pop();
416 
417 	return 0;
418 }
419 
420 /*
421  * A test that tries to allocate memory when the memory is almost full,
422  * but there is just enough space left:
423  *
424  *  |---------------------------+---------|
425  *  |          reserved         |   new   |
426  *  +---------------------------+---------+
427  *
428  * Expect to allocate memory and merge all the regions. The total size field
429  * gets updated.
430  */
431 static int alloc_limited_space_generic_check(void)
432 {
433 	struct memblock_region *rgn = &memblock.reserved.regions[0];
434 	void *allocated_ptr = NULL;
435 
436 	PREFIX_PUSH();
437 
438 	phys_addr_t available_size = SZ_256;
439 	phys_addr_t reserved_size = MEM_SIZE - available_size;
440 
441 	setup_memblock();
442 
443 	/* Simulate almost-full memory */
444 	memblock_reserve(memblock_start_of_DRAM(), reserved_size);
445 
446 	allocated_ptr = run_memblock_alloc(available_size, SMP_CACHE_BYTES);
447 
448 	ASSERT_NE(allocated_ptr, NULL);
449 	assert_mem_content(allocated_ptr, available_size, alloc_test_flags);
450 
451 	ASSERT_EQ(rgn->size, MEM_SIZE);
452 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
453 
454 	ASSERT_EQ(memblock.reserved.cnt, 1);
455 	ASSERT_EQ(memblock.reserved.total_size, MEM_SIZE);
456 
457 	test_pass_pop();
458 
459 	return 0;
460 }
461 
462 /*
463  * A test that tries to allocate memory when there is no available memory
464  * registered (i.e. memblock.memory has only a dummy entry).
465  * Expect no allocation to happen.
466  */
467 static int alloc_no_memory_generic_check(void)
468 {
469 	struct memblock_region *rgn = &memblock.reserved.regions[0];
470 	void *allocated_ptr = NULL;
471 
472 	PREFIX_PUSH();
473 
474 	reset_memblock_regions();
475 
476 	allocated_ptr = run_memblock_alloc(SZ_1K, SMP_CACHE_BYTES);
477 
478 	ASSERT_EQ(allocated_ptr, NULL);
479 	ASSERT_EQ(rgn->size, 0);
480 	ASSERT_EQ(rgn->base, 0);
481 	ASSERT_EQ(memblock.reserved.total_size, 0);
482 
483 	test_pass_pop();
484 
485 	return 0;
486 }
487 
488 /*
489  * A test that tries to allocate a region that is larger than the total size of
490  * available memory (memblock.memory):
491  *
492  *  +-----------------------------------+
493  *  |                 new               |
494  *  +-----------------------------------+
495  *  |                                 |
496  *  |                                 |
497  *  +---------------------------------+
498  *
499  * Expect no allocation to happen.
500  */
501 static int alloc_too_large_generic_check(void)
502 {
503 	struct memblock_region *rgn = &memblock.reserved.regions[0];
504 	void *allocated_ptr = NULL;
505 
506 	PREFIX_PUSH();
507 
508 	setup_memblock();
509 
510 	allocated_ptr = run_memblock_alloc(MEM_SIZE + SZ_2, SMP_CACHE_BYTES);
511 
512 	ASSERT_EQ(allocated_ptr, NULL);
513 	ASSERT_EQ(rgn->size, 0);
514 	ASSERT_EQ(rgn->base, 0);
515 	ASSERT_EQ(memblock.reserved.total_size, 0);
516 
517 	test_pass_pop();
518 
519 	return 0;
520 }
521 
522 /*
523  * A simple test that tries to allocate a small memory region.
524  * Expect to allocate an aligned region at the beginning of the available
525  * memory.
526  */
527 static int alloc_bottom_up_simple_check(void)
528 {
529 	struct memblock_region *rgn = &memblock.reserved.regions[0];
530 	void *allocated_ptr = NULL;
531 
532 	PREFIX_PUSH();
533 
534 	setup_memblock();
535 
536 	allocated_ptr = run_memblock_alloc(SZ_2, SMP_CACHE_BYTES);
537 
538 	ASSERT_NE(allocated_ptr, NULL);
539 	assert_mem_content(allocated_ptr, SZ_2, alloc_test_flags);
540 
541 	ASSERT_EQ(rgn->size, SZ_2);
542 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
543 
544 	ASSERT_EQ(memblock.reserved.cnt, 1);
545 	ASSERT_EQ(memblock.reserved.total_size, SZ_2);
546 
547 	test_pass_pop();
548 
549 	return 0;
550 }
551 
552 /*
553  * A test that tries to allocate memory next to a reserved region that starts at
554  * the misaligned address. Expect to create two separate entries, with the new
555  * entry aligned to the provided alignment:
556  *
557  *                      +
558  *  |    +----------+   +----------+     |
559  *  |    |   rgn1   |   |   rgn2   |     |
560  *  +----+----------+---+----------+-----+
561  *                      ^
562  *                      |
563  *                      Aligned address boundary
564  *
565  * The allocation direction is bottom-up, so the new region will be the second
566  * entry in memory.reserved array. The previously reserved region does not get
567  * modified. Region counter and total size get updated.
568  */
569 static int alloc_bottom_up_disjoint_check(void)
570 {
571 	struct memblock_region *rgn1 = &memblock.reserved.regions[0];
572 	struct memblock_region *rgn2 = &memblock.reserved.regions[1];
573 	struct region r1;
574 	void *allocated_ptr = NULL;
575 
576 	PREFIX_PUSH();
577 
578 	phys_addr_t r2_size = SZ_16;
579 	/* Use custom alignment */
580 	phys_addr_t alignment = SMP_CACHE_BYTES * 2;
581 	phys_addr_t total_size;
582 	phys_addr_t expected_start;
583 
584 	setup_memblock();
585 
586 	r1.base = memblock_start_of_DRAM() + SZ_2;
587 	r1.size = SZ_2;
588 
589 	total_size = r1.size + r2_size;
590 	expected_start = memblock_start_of_DRAM() + alignment;
591 
592 	memblock_reserve(r1.base, r1.size);
593 
594 	allocated_ptr = run_memblock_alloc(r2_size, alignment);
595 
596 	ASSERT_NE(allocated_ptr, NULL);
597 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
598 
599 	ASSERT_EQ(rgn1->size, r1.size);
600 	ASSERT_EQ(rgn1->base, r1.base);
601 
602 	ASSERT_EQ(rgn2->size, r2_size);
603 	ASSERT_EQ(rgn2->base, expected_start);
604 
605 	ASSERT_EQ(memblock.reserved.cnt, 2);
606 	ASSERT_EQ(memblock.reserved.total_size, total_size);
607 
608 	test_pass_pop();
609 
610 	return 0;
611 }
612 
613 /*
614  * A test that tries to allocate memory when there is enough space at
615  * the beginning of the previously reserved block (i.e. first fit):
616  *
617  *  |------------------+--------+         |
618  *  |        r1        |   r2   |         |
619  *  +------------------+--------+---------+
620  *
621  * Expect a merge of both regions. Only the region size gets updated.
622  */
623 static int alloc_bottom_up_before_check(void)
624 {
625 	struct memblock_region *rgn = &memblock.reserved.regions[0];
626 	void *allocated_ptr = NULL;
627 
628 	PREFIX_PUSH();
629 
630 	phys_addr_t r1_size = SZ_512;
631 	phys_addr_t r2_size = SZ_128;
632 	phys_addr_t total_size = r1_size + r2_size;
633 
634 	setup_memblock();
635 
636 	memblock_reserve(memblock_start_of_DRAM() + r1_size, r2_size);
637 
638 	allocated_ptr = run_memblock_alloc(r1_size, SMP_CACHE_BYTES);
639 
640 	ASSERT_NE(allocated_ptr, NULL);
641 	assert_mem_content(allocated_ptr, r1_size, alloc_test_flags);
642 
643 	ASSERT_EQ(rgn->size, total_size);
644 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
645 
646 	ASSERT_EQ(memblock.reserved.cnt, 1);
647 	ASSERT_EQ(memblock.reserved.total_size, total_size);
648 
649 	test_pass_pop();
650 
651 	return 0;
652 }
653 
654 /*
655  * A test that tries to allocate memory when there is not enough space at
656  * the beginning of the previously reserved block (i.e. second fit):
657  *
658  *  |    +--------+--------------+         |
659  *  |    |   r1   |      r2      |         |
660  *  +----+--------+--------------+---------+
661  *
662  * Expect a merge of both regions. Only the region size gets updated.
663  */
664 static int alloc_bottom_up_after_check(void)
665 {
666 	struct memblock_region *rgn = &memblock.reserved.regions[0];
667 	struct region r1;
668 	void *allocated_ptr = NULL;
669 
670 	PREFIX_PUSH();
671 
672 	phys_addr_t r2_size = SZ_512;
673 	phys_addr_t total_size;
674 
675 	setup_memblock();
676 
677 	/*
678 	 * The first region starts at the aligned address to test region merging
679 	 */
680 	r1.base = memblock_start_of_DRAM() + SMP_CACHE_BYTES;
681 	r1.size = SZ_64;
682 
683 	total_size = r1.size + r2_size;
684 
685 	memblock_reserve(r1.base, r1.size);
686 
687 	allocated_ptr = run_memblock_alloc(r2_size, SMP_CACHE_BYTES);
688 
689 	ASSERT_NE(allocated_ptr, NULL);
690 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
691 
692 	ASSERT_EQ(rgn->size, total_size);
693 	ASSERT_EQ(rgn->base, r1.base);
694 
695 	ASSERT_EQ(memblock.reserved.cnt, 1);
696 	ASSERT_EQ(memblock.reserved.total_size, total_size);
697 
698 	test_pass_pop();
699 
700 	return 0;
701 }
702 
703 /*
704  * A test that tries to allocate memory when there are two reserved regions, the
705  * first one starting at the beginning of the available memory, with a gap too
706  * small to fit the new region:
707  *
708  *  |------------+     +--------+--------+  |
709  *  |     r1     |     |   r2   |   r3   |  |
710  *  +------------+-----+--------+--------+--+
711  *
712  * Expect to allocate after the second region, which starts at the higher
713  * address, and merge them into one. The region counter and total size fields
714  * get updated.
715  */
716 static int alloc_bottom_up_second_fit_check(void)
717 {
718 	struct memblock_region *rgn  = &memblock.reserved.regions[1];
719 	struct region r1, r2;
720 	void *allocated_ptr = NULL;
721 
722 	PREFIX_PUSH();
723 
724 	phys_addr_t r3_size = SZ_1K;
725 	phys_addr_t total_size;
726 
727 	setup_memblock();
728 
729 	r1.base = memblock_start_of_DRAM();
730 	r1.size = SZ_512;
731 
732 	r2.base = r1.base + r1.size + SZ_512;
733 	r2.size = SZ_256;
734 
735 	total_size = r1.size + r2.size + r3_size;
736 
737 	memblock_reserve(r1.base, r1.size);
738 	memblock_reserve(r2.base, r2.size);
739 
740 	allocated_ptr = run_memblock_alloc(r3_size, SMP_CACHE_BYTES);
741 
742 	ASSERT_NE(allocated_ptr, NULL);
743 	assert_mem_content(allocated_ptr, r3_size, alloc_test_flags);
744 
745 	ASSERT_EQ(rgn->size, r2.size + r3_size);
746 	ASSERT_EQ(rgn->base, r2.base);
747 
748 	ASSERT_EQ(memblock.reserved.cnt, 2);
749 	ASSERT_EQ(memblock.reserved.total_size, total_size);
750 
751 	test_pass_pop();
752 
753 	return 0;
754 }
755 
756 /* Test case wrappers */
757 static int alloc_simple_check(void)
758 {
759 	test_print("\tRunning %s...\n", __func__);
760 	memblock_set_bottom_up(false);
761 	alloc_top_down_simple_check();
762 	memblock_set_bottom_up(true);
763 	alloc_bottom_up_simple_check();
764 
765 	return 0;
766 }
767 
768 static int alloc_disjoint_check(void)
769 {
770 	test_print("\tRunning %s...\n", __func__);
771 	memblock_set_bottom_up(false);
772 	alloc_top_down_disjoint_check();
773 	memblock_set_bottom_up(true);
774 	alloc_bottom_up_disjoint_check();
775 
776 	return 0;
777 }
778 
779 static int alloc_before_check(void)
780 {
781 	test_print("\tRunning %s...\n", __func__);
782 	memblock_set_bottom_up(false);
783 	alloc_top_down_before_check();
784 	memblock_set_bottom_up(true);
785 	alloc_bottom_up_before_check();
786 
787 	return 0;
788 }
789 
790 static int alloc_after_check(void)
791 {
792 	test_print("\tRunning %s...\n", __func__);
793 	memblock_set_bottom_up(false);
794 	alloc_top_down_after_check();
795 	memblock_set_bottom_up(true);
796 	alloc_bottom_up_after_check();
797 
798 	return 0;
799 }
800 
801 static int alloc_in_between_check(void)
802 {
803 	test_print("\tRunning %s...\n", __func__);
804 	run_top_down(alloc_in_between_generic_check);
805 	run_bottom_up(alloc_in_between_generic_check);
806 
807 	return 0;
808 }
809 
810 static int alloc_second_fit_check(void)
811 {
812 	test_print("\tRunning %s...\n", __func__);
813 	memblock_set_bottom_up(false);
814 	alloc_top_down_second_fit_check();
815 	memblock_set_bottom_up(true);
816 	alloc_bottom_up_second_fit_check();
817 
818 	return 0;
819 }
820 
821 static int alloc_small_gaps_check(void)
822 {
823 	test_print("\tRunning %s...\n", __func__);
824 	run_top_down(alloc_small_gaps_generic_check);
825 	run_bottom_up(alloc_small_gaps_generic_check);
826 
827 	return 0;
828 }
829 
830 static int alloc_all_reserved_check(void)
831 {
832 	test_print("\tRunning %s...\n", __func__);
833 	run_top_down(alloc_all_reserved_generic_check);
834 	run_bottom_up(alloc_all_reserved_generic_check);
835 
836 	return 0;
837 }
838 
839 static int alloc_no_space_check(void)
840 {
841 	test_print("\tRunning %s...\n", __func__);
842 	run_top_down(alloc_no_space_generic_check);
843 	run_bottom_up(alloc_no_space_generic_check);
844 
845 	return 0;
846 }
847 
848 static int alloc_limited_space_check(void)
849 {
850 	test_print("\tRunning %s...\n", __func__);
851 	run_top_down(alloc_limited_space_generic_check);
852 	run_bottom_up(alloc_limited_space_generic_check);
853 
854 	return 0;
855 }
856 
857 static int alloc_no_memory_check(void)
858 {
859 	test_print("\tRunning %s...\n", __func__);
860 	run_top_down(alloc_no_memory_generic_check);
861 	run_bottom_up(alloc_no_memory_generic_check);
862 
863 	return 0;
864 }
865 
866 static int alloc_too_large_check(void)
867 {
868 	test_print("\tRunning %s...\n", __func__);
869 	run_top_down(alloc_too_large_generic_check);
870 	run_bottom_up(alloc_too_large_generic_check);
871 
872 	return 0;
873 }
874 
875 static int memblock_alloc_checks_internal(int flags)
876 {
877 	const char *func = get_memblock_alloc_name(flags);
878 
879 	alloc_test_flags = flags;
880 	prefix_reset();
881 	prefix_push(func);
882 	test_print("Running %s tests...\n", func);
883 
884 	reset_memblock_attributes();
885 	dummy_physical_memory_init();
886 
887 	alloc_simple_check();
888 	alloc_disjoint_check();
889 	alloc_before_check();
890 	alloc_after_check();
891 	alloc_second_fit_check();
892 	alloc_small_gaps_check();
893 	alloc_in_between_check();
894 	alloc_all_reserved_check();
895 	alloc_no_space_check();
896 	alloc_limited_space_check();
897 	alloc_no_memory_check();
898 	alloc_too_large_check();
899 
900 	dummy_physical_memory_cleanup();
901 
902 	prefix_pop();
903 
904 	return 0;
905 }
906 
907 int memblock_alloc_checks(void)
908 {
909 	memblock_alloc_checks_internal(TEST_F_NONE);
910 	memblock_alloc_checks_internal(TEST_F_RAW);
911 
912 	return 0;
913 }
914