blob: b6259bef4426ec3e1bfc5c1e45d48bfda5174651 [file] [log] [blame]
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +01001// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Simon Goldschmidt
4 */
5
Sughosh Ganu291bf9c2024-08-26 17:29:18 +05306#include <alist.h>
Simon Glass75c4d412020-07-19 10:15:37 -06007#include <dm.h>
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +01008#include <lmb.h>
Simon Glass0f2af882020-05-10 11:40:05 -06009#include <log.h>
Simon Glass9bc15642020-02-03 07:36:16 -070010#include <malloc.h>
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010011#include <dm/test.h>
Simon Glassb4c722a2023-10-01 19:15:21 -060012#include <test/lib.h>
Simon Glass75c4d412020-07-19 10:15:37 -060013#include <test/test.h>
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010014#include <test/ut.h>
15
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053016static inline bool lmb_is_nomap(struct lmb_region *m)
Heinrich Schuchardta88181e2021-11-14 08:41:07 +010017{
18 return m->flags & LMB_NOMAP;
19}
20
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053021static int check_lmb(struct unit_test_state *uts, struct alist *mem_lst,
22 struct alist *used_lst, phys_addr_t ram_base,
23 phys_size_t ram_size, unsigned long num_reserved,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010024 phys_addr_t base1, phys_size_t size1,
25 phys_addr_t base2, phys_size_t size2,
26 phys_addr_t base3, phys_size_t size3)
27{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053028 struct lmb_region *mem, *used;
29
30 mem = mem_lst->data;
31 used = used_lst->data;
32
Simon Goldschmidtc722dac2019-02-01 21:23:59 +010033 if (ram_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053034 ut_asserteq(mem_lst->count, 1);
35 ut_asserteq(mem[0].base, ram_base);
36 ut_asserteq(mem[0].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +010037 }
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010038
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053039 ut_asserteq(used_lst->count, num_reserved);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010040 if (num_reserved > 0) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053041 ut_asserteq(used[0].base, base1);
42 ut_asserteq(used[0].size, size1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010043 }
44 if (num_reserved > 1) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053045 ut_asserteq(used[1].base, base2);
46 ut_asserteq(used[1].size, size2);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010047 }
48 if (num_reserved > 2) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053049 ut_asserteq(used[2].base, base3);
50 ut_asserteq(used[2].size, size3);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010051 }
52 return 0;
53}
54
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053055#define ASSERT_LMB(mem_lst, used_lst, ram_base, ram_size, num_reserved, base1, size1, \
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010056 base2, size2, base3, size3) \
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053057 ut_assert(!check_lmb(uts, mem_lst, used_lst, ram_base, ram_size, \
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010058 num_reserved, base1, size1, base2, size2, base3, \
59 size3))
60
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053061static int setup_lmb_test(struct unit_test_state *uts, struct lmb *store,
62 struct alist **mem_lstp, struct alist **used_lstp)
63{
64 struct lmb *lmb;
65
66 ut_assertok(lmb_push(store));
67 lmb = lmb_get();
Ilias Apalodimas5421c332024-12-18 09:02:33 +020068 *mem_lstp = &lmb->available_mem;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053069 *used_lstp = &lmb->used_mem;
70
71 return 0;
72}
73
Sughosh Ganu9b0765a2025-06-17 16:13:40 +053074static int lmb_reserve(phys_addr_t addr, phys_size_t size, u32 flags)
75{
76 int err;
77
78 err = lmb_alloc_mem(LMB_MEM_ALLOC_ADDR, 0, &addr, size, flags);
79 if (err)
80 return err;
81
82 return 0;
83}
84
Sughosh Ganu7bdfe122025-06-17 16:13:41 +053085static phys_addr_t lmb_alloc(phys_size_t size, ulong align)
86{
87 int err;
88 phys_addr_t addr;
89
90 err = lmb_alloc_mem(LMB_MEM_ALLOC_ANY, align, &addr, size, LMB_NONE);
91 if (err)
92 return 0;
93
94 return addr;
95}
96
97static phys_addr_t lmb_alloc_base(phys_size_t size, ulong align,
98 phys_addr_t max_addr, u32 flags)
99{
100 int err;
101 phys_addr_t addr;
102
103 addr = max_addr;
104 err = lmb_alloc_mem(LMB_MEM_ALLOC_MAX, align, &addr, size, flags);
105 if (err)
106 return 0;
107
108 return addr;
109}
110
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530111#define lmb_alloc_addr(addr, size, flags) lmb_reserve(addr, size, flags)
112
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100113static int test_multi_alloc(struct unit_test_state *uts, const phys_addr_t ram,
114 const phys_size_t ram_size, const phys_addr_t ram0,
115 const phys_size_t ram0_size,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100116 const phys_addr_t alloc_64k_addr)
117{
118 const phys_addr_t ram_end = ram + ram_size;
119 const phys_addr_t alloc_64k_end = alloc_64k_addr + 0x10000;
120
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100121 long ret;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530122 struct alist *mem_lst, *used_lst;
123 struct lmb_region *mem, *used;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100124 phys_addr_t a, a2, b, b2, c, d;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530125 struct lmb store;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100126
127 /* check for overflow */
128 ut_assert(ram_end == 0 || ram_end > ram);
129 ut_assert(alloc_64k_end > alloc_64k_addr);
130 /* check input addresses + size */
131 ut_assert(alloc_64k_addr >= ram + 8);
132 ut_assert(alloc_64k_end <= ram_end - 8);
133
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530134 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
135 mem = mem_lst->data;
136 used = used_lst->data;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100137
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100138 if (ram0_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530139 ret = lmb_add(ram0, ram0_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100140 ut_asserteq(ret, 0);
141 }
142
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530143 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100144 ut_asserteq(ret, 0);
145
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100146 if (ram0_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530147 ut_asserteq(mem_lst->count, 2);
148 ut_asserteq(mem[0].base, ram0);
149 ut_asserteq(mem[0].size, ram0_size);
150 ut_asserteq(mem[1].base, ram);
151 ut_asserteq(mem[1].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100152 } else {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530153 ut_asserteq(mem_lst->count, 1);
154 ut_asserteq(mem[0].base, ram);
155 ut_asserteq(mem[0].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100156 }
157
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100158 /* reserve 64KiB somewhere */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200159 ret = lmb_reserve(alloc_64k_addr, 0x10000, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100160 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530161 ASSERT_LMB(mem_lst, used_lst, 0, 0, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100162 0, 0, 0, 0);
163
164 /* allocate somewhere, should be at the end of RAM */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530165 a = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100166 ut_asserteq(a, ram_end - 4);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530167 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100168 ram_end - 4, 4, 0, 0);
169 /* alloc below end of reserved region -> below reserved region */
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200170 b = lmb_alloc_base(4, 1, alloc_64k_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100171 ut_asserteq(b, alloc_64k_addr - 4);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530172 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100173 alloc_64k_addr - 4, 0x10000 + 4, ram_end - 4, 4, 0, 0);
174
175 /* 2nd time */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530176 c = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100177 ut_asserteq(c, ram_end - 8);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530178 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100179 alloc_64k_addr - 4, 0x10000 + 4, ram_end - 8, 8, 0, 0);
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200180 d = lmb_alloc_base(4, 1, alloc_64k_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100181 ut_asserteq(d, alloc_64k_addr - 8);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530182 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100183 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 8, 0, 0);
184
Sughosh Ganud80105b2025-06-17 16:13:43 +0530185 ret = lmb_free(a, 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100186 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530187 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100188 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 4, 0, 0);
189 /* allocate again to ensure we get the same address */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530190 a2 = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100191 ut_asserteq(a, a2);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530192 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100193 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 8, 0, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530194 ret = lmb_free(a2, 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100195 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530196 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100197 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 4, 0, 0);
198
Sughosh Ganud80105b2025-06-17 16:13:43 +0530199 ret = lmb_free(b, 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100200 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530201 ASSERT_LMB(mem_lst, used_lst, 0, 0, 3,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100202 alloc_64k_addr - 8, 4, alloc_64k_addr, 0x10000,
203 ram_end - 8, 4);
204 /* allocate again to ensure we get the same address */
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200205 b2 = lmb_alloc_base(4, 1, alloc_64k_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100206 ut_asserteq(b, b2);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530207 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100208 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 4, 0, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530209 ret = lmb_free(b2, 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100210 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530211 ASSERT_LMB(mem_lst, used_lst, 0, 0, 3,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100212 alloc_64k_addr - 8, 4, alloc_64k_addr, 0x10000,
213 ram_end - 8, 4);
214
Sughosh Ganud80105b2025-06-17 16:13:43 +0530215 ret = lmb_free(c, 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100216 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530217 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100218 alloc_64k_addr - 8, 4, alloc_64k_addr, 0x10000, 0, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530219 ret = lmb_free(d, 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100220 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530221 ASSERT_LMB(mem_lst, used_lst, 0, 0, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100222 0, 0, 0, 0);
223
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100224 if (ram0_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530225 ut_asserteq(mem_lst->count, 2);
226 ut_asserteq(mem[0].base, ram0);
227 ut_asserteq(mem[0].size, ram0_size);
228 ut_asserteq(mem[1].base, ram);
229 ut_asserteq(mem[1].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100230 } else {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530231 ut_asserteq(mem_lst->count, 1);
232 ut_asserteq(mem[0].base, ram);
233 ut_asserteq(mem[0].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100234 }
235
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530236 lmb_pop(&store);
237
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100238 return 0;
239}
240
241static int test_multi_alloc_512mb(struct unit_test_state *uts,
242 const phys_addr_t ram)
243{
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100244 return test_multi_alloc(uts, ram, 0x20000000, 0, 0, ram + 0x10000000);
245}
246
247static int test_multi_alloc_512mb_x2(struct unit_test_state *uts,
248 const phys_addr_t ram,
249 const phys_addr_t ram0)
250{
251 return test_multi_alloc(uts, ram, 0x20000000, ram0, 0x20000000,
252 ram + 0x10000000);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100253}
254
255/* Create a memory region with one reserved region and allocate */
256static int lib_test_lmb_simple(struct unit_test_state *uts)
257{
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100258 int ret;
259
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100260 /* simulate 512 MiB RAM beginning at 1GiB */
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100261 ret = test_multi_alloc_512mb(uts, 0x40000000);
262 if (ret)
263 return ret;
264
265 /* simulate 512 MiB RAM beginning at 1.5GiB */
266 return test_multi_alloc_512mb(uts, 0xE0000000);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100267}
Simon Glassb4c722a2023-10-01 19:15:21 -0600268LIB_TEST(lib_test_lmb_simple, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100269
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100270/* Create two memory regions with one reserved region and allocate */
271static int lib_test_lmb_simple_x2(struct unit_test_state *uts)
272{
273 int ret;
274
275 /* simulate 512 MiB RAM beginning at 2GiB and 1 GiB */
276 ret = test_multi_alloc_512mb_x2(uts, 0x80000000, 0x40000000);
277 if (ret)
278 return ret;
279
280 /* simulate 512 MiB RAM beginning at 3.5GiB and 1 GiB */
281 return test_multi_alloc_512mb_x2(uts, 0xE0000000, 0x40000000);
282}
Simon Glassb4c722a2023-10-01 19:15:21 -0600283LIB_TEST(lib_test_lmb_simple_x2, 0);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100284
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100285/* Simulate 512 MiB RAM, allocate some blocks that fit/don't fit */
286static int test_bigblock(struct unit_test_state *uts, const phys_addr_t ram)
287{
288 const phys_size_t ram_size = 0x20000000;
289 const phys_size_t big_block_size = 0x10000000;
290 const phys_addr_t ram_end = ram + ram_size;
291 const phys_addr_t alloc_64k_addr = ram + 0x10000000;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530292 struct alist *mem_lst, *used_lst;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100293 long ret;
294 phys_addr_t a, b;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530295 struct lmb store;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100296
297 /* check for overflow */
298 ut_assert(ram_end == 0 || ram_end > ram);
299
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530300 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100301
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530302 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100303 ut_asserteq(ret, 0);
304
305 /* reserve 64KiB in the middle of RAM */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200306 ret = lmb_reserve(alloc_64k_addr, 0x10000, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100307 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530308 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100309 0, 0, 0, 0);
310
311 /* allocate a big block, should be below reserved */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530312 a = lmb_alloc(big_block_size, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100313 ut_asserteq(a, ram);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530314 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100315 big_block_size + 0x10000, 0, 0, 0, 0);
316 /* allocate 2nd big block */
317 /* This should fail, printing an error */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530318 b = lmb_alloc(big_block_size, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100319 ut_asserteq(b, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530320 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100321 big_block_size + 0x10000, 0, 0, 0, 0);
322
Sughosh Ganud80105b2025-06-17 16:13:43 +0530323 ret = lmb_free(a, big_block_size, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100324 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530325 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100326 0, 0, 0, 0);
327
328 /* allocate too big block */
329 /* This should fail, printing an error */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530330 a = lmb_alloc(ram_size, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100331 ut_asserteq(a, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530332 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100333 0, 0, 0, 0);
334
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530335 lmb_pop(&store);
336
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100337 return 0;
338}
339
340static int lib_test_lmb_big(struct unit_test_state *uts)
341{
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100342 int ret;
343
344 /* simulate 512 MiB RAM beginning at 1GiB */
345 ret = test_bigblock(uts, 0x40000000);
346 if (ret)
347 return ret;
348
349 /* simulate 512 MiB RAM beginning at 1.5GiB */
350 return test_bigblock(uts, 0xE0000000);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100351}
Simon Glassb4c722a2023-10-01 19:15:21 -0600352LIB_TEST(lib_test_lmb_big, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100353
354/* Simulate 512 MiB RAM, allocate a block without previous reservation */
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100355static int test_noreserved(struct unit_test_state *uts, const phys_addr_t ram,
356 const phys_addr_t alloc_size, const ulong align)
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100357{
358 const phys_size_t ram_size = 0x20000000;
359 const phys_addr_t ram_end = ram + ram_size;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100360 long ret;
361 phys_addr_t a, b;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530362 struct lmb store;
363 struct alist *mem_lst, *used_lst;
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100364 const phys_addr_t alloc_size_aligned = (alloc_size + align - 1) &
365 ~(align - 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100366
367 /* check for overflow */
368 ut_assert(ram_end == 0 || ram_end > ram);
369
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530370 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100371
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530372 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100373 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530374 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100375
376 /* allocate a block */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530377 a = lmb_alloc(alloc_size, align);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100378 ut_assert(a != 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530379 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1,
380 ram + ram_size - alloc_size_aligned, alloc_size, 0, 0, 0, 0);
381
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100382 /* allocate another block */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530383 b = lmb_alloc(alloc_size, align);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100384 ut_assert(b != 0);
385 if (alloc_size == alloc_size_aligned) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530386 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram + ram_size -
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100387 (alloc_size_aligned * 2), alloc_size * 2, 0, 0, 0,
388 0);
389 } else {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530390 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram + ram_size -
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100391 (alloc_size_aligned * 2), alloc_size, ram + ram_size
392 - alloc_size_aligned, alloc_size, 0, 0);
393 }
394 /* and free them */
Sughosh Ganud80105b2025-06-17 16:13:43 +0530395 ret = lmb_free(b, alloc_size, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100396 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530397 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1,
398 ram + ram_size - alloc_size_aligned,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100399 alloc_size, 0, 0, 0, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530400 ret = lmb_free(a, alloc_size, LMB_NONE);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100401 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530402 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100403
404 /* allocate a block with base*/
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200405 b = lmb_alloc_base(alloc_size, align, ram_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100406 ut_assert(a == b);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530407 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1,
408 ram + ram_size - alloc_size_aligned,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100409 alloc_size, 0, 0, 0, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100410 /* and free it */
Sughosh Ganud80105b2025-06-17 16:13:43 +0530411 ret = lmb_free(b, alloc_size, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100412 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530413 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
414
415 lmb_pop(&store);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100416
417 return 0;
418}
419
420static int lib_test_lmb_noreserved(struct unit_test_state *uts)
421{
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100422 int ret;
423
424 /* simulate 512 MiB RAM beginning at 1GiB */
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100425 ret = test_noreserved(uts, 0x40000000, 4, 1);
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100426 if (ret)
427 return ret;
428
429 /* simulate 512 MiB RAM beginning at 1.5GiB */
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100430 return test_noreserved(uts, 0xE0000000, 4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100431}
Simon Glassb4c722a2023-10-01 19:15:21 -0600432LIB_TEST(lib_test_lmb_noreserved, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100433
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100434static int lib_test_lmb_unaligned_size(struct unit_test_state *uts)
435{
436 int ret;
437
438 /* simulate 512 MiB RAM beginning at 1GiB */
439 ret = test_noreserved(uts, 0x40000000, 5, 8);
440 if (ret)
441 return ret;
442
443 /* simulate 512 MiB RAM beginning at 1.5GiB */
444 return test_noreserved(uts, 0xE0000000, 5, 8);
445}
Simon Glassb4c722a2023-10-01 19:15:21 -0600446LIB_TEST(lib_test_lmb_unaligned_size, 0);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100447
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100448/*
449 * Simulate a RAM that starts at 0 and allocate down to address 0, which must
450 * fail as '0' means failure for the lmb_alloc functions.
451 */
452static int lib_test_lmb_at_0(struct unit_test_state *uts)
453{
454 const phys_addr_t ram = 0;
455 const phys_size_t ram_size = 0x20000000;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530456 struct lmb store;
457 struct alist *mem_lst, *used_lst;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100458 long ret;
459 phys_addr_t a, b;
460
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530461 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100462
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530463 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100464 ut_asserteq(ret, 0);
465
466 /* allocate nearly everything */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530467 a = lmb_alloc(ram_size - 4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100468 ut_asserteq(a, ram + 4);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530469 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a, ram_size - 4,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100470 0, 0, 0, 0);
471 /* allocate the rest */
472 /* This should fail as the allocated address would be 0 */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530473 b = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100474 ut_asserteq(b, 0);
475 /* check that this was an error by checking lmb */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530476 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a, ram_size - 4,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100477 0, 0, 0, 0);
478 /* check that this was an error by freeing b */
Sughosh Ganud80105b2025-06-17 16:13:43 +0530479 ret = lmb_free(b, 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100480 ut_asserteq(ret, -1);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530481 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a, ram_size - 4,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100482 0, 0, 0, 0);
483
Sughosh Ganud80105b2025-06-17 16:13:43 +0530484 ret = lmb_free(a, ram_size - 4, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100485 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530486 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
487
488 lmb_pop(&store);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100489
490 return 0;
491}
Simon Glassb4c722a2023-10-01 19:15:21 -0600492LIB_TEST(lib_test_lmb_at_0, 0);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100493
494/* Check that calling lmb_reserve with overlapping regions fails. */
495static int lib_test_lmb_overlapping_reserve(struct unit_test_state *uts)
496{
497 const phys_addr_t ram = 0x40000000;
498 const phys_size_t ram_size = 0x20000000;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530499 struct lmb store;
500 struct alist *mem_lst, *used_lst;
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100501 long ret;
502
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530503 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100504
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530505 ret = lmb_add(ram, ram_size);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100506 ut_asserteq(ret, 0);
507
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200508 ret = lmb_reserve(0x40010000, 0x10000, LMB_NONE);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100509 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530510 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100511 0, 0, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530512
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530513 /* allocate overlapping region */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200514 ret = lmb_reserve(0x40011000, 0x10000, LMB_NONE);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300515 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530516 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x11000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100517 0, 0, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530518 /* allocate 2nd region */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200519 ret = lmb_reserve(0x40030000, 0x10000, LMB_NONE);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100520 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530521 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40010000, 0x11000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100522 0x40030000, 0x10000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530523 /* allocate 3rd region , This should coalesce all regions into one */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200524 ret = lmb_reserve(0x40020000, 0x10000, LMB_NONE);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100525 ut_assert(ret >= 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530526 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x30000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100527 0, 0, 0, 0);
528
Udit Kumar27575252023-09-26 16:54:43 +0530529 /* allocate 2nd region, which should be added as first region */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200530 ret = lmb_reserve(0x40000000, 0x8000, LMB_NONE);
Udit Kumar27575252023-09-26 16:54:43 +0530531 ut_assert(ret >= 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530532 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x8000,
Udit Kumar27575252023-09-26 16:54:43 +0530533 0x40010000, 0x30000, 0, 0);
534
535 /* allocate 3rd region, coalesce with first and overlap with second */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200536 ret = lmb_reserve(0x40008000, 0x10000, LMB_NONE);
Udit Kumar27575252023-09-26 16:54:43 +0530537 ut_assert(ret >= 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530538 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40000000, 0x40000,
Udit Kumar27575252023-09-26 16:54:43 +0530539 0, 0, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530540
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530541 /* try to allocate overlapping region with a different flag, should fail */
542 ret = lmb_reserve(0x40008000, 0x1000, LMB_NOOVERWRITE);
543 ut_asserteq(ret, -EEXIST);
544
545 /* allocate another region at 0x40050000 with a different flag */
546 ret = lmb_reserve(0x40050000, 0x10000, LMB_NOOVERWRITE);
547 ut_asserteq(ret, 0);
548 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x40000,
549 0x40050000, 0x10000, 0, 0);
550
551 /*
552 * try to reserve a region adjacent to region 1 overlapping the 2nd region,
553 * should fail
554 */
555 ret = lmb_reserve(0x40040000, 0x20000, LMB_NONE);
556 ut_asserteq(ret, -EEXIST);
557
558 /*
559 * try to reserve a region between the two regions, but without an overlap,
560 * should succeed. this added region coalesces with the region 1
561 */
562 ret = lmb_reserve(0x40040000, 0x10000, LMB_NONE);
563 ut_asserteq(ret, 0);
564 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x50000,
565 0x40050000, 0x10000, 0, 0);
566
567 /*
568 * try to reserve a region which overlaps with both the regions,
569 * should fail as the flags do not match
570 */
571 ret = lmb_reserve(0x40020000, 0x80000, LMB_NONE);
572 ut_asserteq(ret, -EEXIST);
573 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x50000,
574 0x40050000, 0x10000, 0, 0);
575
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530576 lmb_pop(&store);
577
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100578 return 0;
579}
Simon Glassb4c722a2023-10-01 19:15:21 -0600580LIB_TEST(lib_test_lmb_overlapping_reserve, 0);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100581
582/*
583 * Simulate 512 MiB RAM, reserve 3 blocks, allocate addresses in between.
584 * Expect addresses outside the memory range to fail.
585 */
586static int test_alloc_addr(struct unit_test_state *uts, const phys_addr_t ram)
587{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530588 struct lmb store;
589 struct alist *mem_lst, *used_lst;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100590 const phys_size_t ram_size = 0x20000000;
591 const phys_addr_t ram_end = ram + ram_size;
592 const phys_size_t alloc_addr_a = ram + 0x8000000;
593 const phys_size_t alloc_addr_b = ram + 0x8000000 * 2;
594 const phys_size_t alloc_addr_c = ram + 0x8000000 * 3;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100595 long ret;
596 phys_addr_t a, b, c, d, e;
597
598 /* check for overflow */
599 ut_assert(ram_end == 0 || ram_end > ram);
600
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530601 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100602
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530603 ret = lmb_add(ram, ram_size);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100604 ut_asserteq(ret, 0);
605
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200606 /* Try to allocate a page twice */
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200607 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200608 ut_asserteq(b, 0);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200609 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530610 ut_asserteq(b, -EEXIST);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200611 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200612 ut_asserteq(b, 0);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200613 b = lmb_alloc_addr(alloc_addr_a, 0x2000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200614 ut_asserteq(b, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530615 ret = lmb_free(alloc_addr_a, 0x2000, LMB_NONE);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200616 ut_asserteq(ret, 0);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200617 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200618 ut_asserteq(b, 0);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200619 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530620 ut_asserteq(b, -EEXIST);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200621 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530622 ut_asserteq(b, -EEXIST);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530623 ret = lmb_free(alloc_addr_a, 0x1000, LMB_NONE);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200624 ut_asserteq(ret, 0);
625
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530626 /*
627 * Add two regions with different flags, region1 and region2 with
628 * a gap between them.
629 * Try adding another region, adjacent to region 1 and overlapping
630 * region 2. Should fail.
631 */
632 a = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530633 ut_asserteq(a, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530634
635 b = lmb_alloc_addr(alloc_addr_a + 0x4000, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530636 ut_asserteq(b, 0);
637 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
638 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530639
640 c = lmb_alloc_addr(alloc_addr_a + 0x1000, 0x5000, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530641 ut_asserteq(c, -EEXIST);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530642 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
643 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530644
Sughosh Ganud80105b2025-06-17 16:13:43 +0530645 ret = lmb_free(alloc_addr_a, 0x1000, LMB_NONE);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530646 ut_asserteq(ret, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530647 ret = lmb_free(alloc_addr_a + 0x4000, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530648 ut_asserteq(ret, 0);
649
650 /*
651 * Add two regions with same flags(LMB_NONE), region1 and region2
652 * with a gap between them.
653 * Try adding another region, adjacent to region 1 and overlapping
654 * region 2. Should succeed. All regions should coalesce into a
655 * single region.
656 */
657 a = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530658 ut_asserteq(a, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530659
660 b = lmb_alloc_addr(alloc_addr_a + 0x4000, 0x1000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530661 ut_asserteq(b, 0);
662 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
663 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530664
665 c = lmb_alloc_addr(alloc_addr_a + 0x1000, 0x5000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530666 ut_asserteq(c, 0);
667 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_addr_a, 0x6000,
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530668 0, 0, 0, 0);
669
Sughosh Ganud80105b2025-06-17 16:13:43 +0530670 ret = lmb_free(alloc_addr_a, 0x6000, LMB_NONE);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530671 ut_asserteq(ret, 0);
672
673 /*
674 * Add two regions with same flags(LMB_NOOVERWRITE), region1 and
675 * region2 with a gap between them.
676 * Try adding another region, adjacent to region 1 and overlapping
677 * region 2. Should fail.
678 */
679 a = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530680 ut_asserteq(a, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530681
682 b = lmb_alloc_addr(alloc_addr_a + 0x4000, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530683 ut_asserteq(b, 0);
684 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
685 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530686
687 c = lmb_alloc_addr(alloc_addr_a + 0x1000, 0x5000, LMB_NOOVERWRITE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530688 ut_asserteq(c, -EEXIST);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530689 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
690 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530691
Sughosh Ganud80105b2025-06-17 16:13:43 +0530692 ret = lmb_free(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530693 ut_asserteq(ret, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530694 ret = lmb_free(alloc_addr_a + 0x4000, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530695 ut_asserteq(ret, 0);
696
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100697 /* reserve 3 blocks */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200698 ret = lmb_reserve(alloc_addr_a, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100699 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200700 ret = lmb_reserve(alloc_addr_b, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100701 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200702 ret = lmb_reserve(alloc_addr_c, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100703 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530704 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, alloc_addr_a, 0x10000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100705 alloc_addr_b, 0x10000, alloc_addr_c, 0x10000);
706
707 /* allocate blocks */
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200708 a = lmb_alloc_addr(ram, alloc_addr_a - ram, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200709 ut_asserteq(a, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530710 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, ram, 0x8010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100711 alloc_addr_b, 0x10000, alloc_addr_c, 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530712 b = lmb_alloc_addr(alloc_addr_a + 0x10000,
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200713 alloc_addr_b - alloc_addr_a - 0x10000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200714 ut_asserteq(b, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530715 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 0x10010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100716 alloc_addr_c, 0x10000, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530717 c = lmb_alloc_addr(alloc_addr_b + 0x10000,
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200718 alloc_addr_c - alloc_addr_b - 0x10000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200719 ut_asserteq(c, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530720 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100721 0, 0, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530722 d = lmb_alloc_addr(alloc_addr_c + 0x10000,
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200723 ram_end - alloc_addr_c - 0x10000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200724 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530725 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, ram_size,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100726 0, 0, 0, 0);
727
728 /* allocating anything else should fail */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530729 e = lmb_alloc(1, 1);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100730 ut_asserteq(e, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530731 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, ram_size,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100732 0, 0, 0, 0);
733
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200734 /* free thge allocation from d */
Sughosh Ganud80105b2025-06-17 16:13:43 +0530735 ret = lmb_free(alloc_addr_c + 0x10000, ram_end - alloc_addr_c - 0x10000,
736 LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100737 ut_asserteq(ret, 0);
738
739 /* allocate at 3 points in free range */
740
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200741 d = lmb_alloc_addr(ram_end - 4, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200742 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530743 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 0x18010000,
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200744 ram_end - 4, 4, 0, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530745 ret = lmb_free(ram_end - 4, 4, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100746 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530747 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100748 0, 0, 0, 0);
749
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200750 d = lmb_alloc_addr(ram_end - 128, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200751 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530752 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 0x18010000,
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200753 ram_end - 128, 4, 0, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530754 ret = lmb_free(ram_end - 128, 4, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100755 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530756 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100757 0, 0, 0, 0);
758
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200759 d = lmb_alloc_addr(alloc_addr_c + 0x10000, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200760 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530761 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010004,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100762 0, 0, 0, 0);
Sughosh Ganud80105b2025-06-17 16:13:43 +0530763 ret = lmb_free(alloc_addr_c + 0x10000, 4, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100764 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530765 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100766 0, 0, 0, 0);
767
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200768 /* allocate at the bottom a was assigned to ram at the top */
Sughosh Ganud80105b2025-06-17 16:13:43 +0530769 ret = lmb_free(ram, alloc_addr_a - ram, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100770 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530771 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram + 0x8000000,
772 0x10010000, 0, 0, 0, 0);
773
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200774 d = lmb_alloc_addr(ram, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200775 ut_asserteq(d, 0);
776 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 4,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100777 ram + 0x8000000, 0x10010000, 0, 0);
778
779 /* check that allocating outside memory fails */
780 if (ram_end != 0) {
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200781 ret = lmb_alloc_addr(ram_end, 1, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530782 ut_asserteq(ret, -EINVAL);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100783 }
784 if (ram != 0) {
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200785 ret = lmb_alloc_addr(ram - 1, 1, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530786 ut_asserteq(ret, -EINVAL);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100787 }
788
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530789 lmb_pop(&store);
790
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100791 return 0;
792}
793
794static int lib_test_lmb_alloc_addr(struct unit_test_state *uts)
795{
796 int ret;
797
798 /* simulate 512 MiB RAM beginning at 1GiB */
799 ret = test_alloc_addr(uts, 0x40000000);
800 if (ret)
801 return ret;
802
803 /* simulate 512 MiB RAM beginning at 1.5GiB */
804 return test_alloc_addr(uts, 0xE0000000);
805}
Simon Glassb4c722a2023-10-01 19:15:21 -0600806LIB_TEST(lib_test_lmb_alloc_addr, 0);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100807
808/* Simulate 512 MiB RAM, reserve 3 blocks, check addresses in between */
809static int test_get_unreserved_size(struct unit_test_state *uts,
810 const phys_addr_t ram)
811{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530812 struct lmb store;
813 struct alist *mem_lst, *used_lst;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100814 const phys_size_t ram_size = 0x20000000;
815 const phys_addr_t ram_end = ram + ram_size;
816 const phys_size_t alloc_addr_a = ram + 0x8000000;
817 const phys_size_t alloc_addr_b = ram + 0x8000000 * 2;
818 const phys_size_t alloc_addr_c = ram + 0x8000000 * 3;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100819 long ret;
820 phys_size_t s;
821
822 /* check for overflow */
823 ut_assert(ram_end == 0 || ram_end > ram);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530824 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100825
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530826 ret = lmb_add(ram, ram_size);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100827 ut_asserteq(ret, 0);
828
829 /* reserve 3 blocks */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200830 ret = lmb_reserve(alloc_addr_a, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100831 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200832 ret = lmb_reserve(alloc_addr_b, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100833 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200834 ret = lmb_reserve(alloc_addr_c, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100835 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530836 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, alloc_addr_a, 0x10000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100837 alloc_addr_b, 0x10000, alloc_addr_c, 0x10000);
838
839 /* check addresses in between blocks */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530840 s = lmb_get_free_size(ram);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100841 ut_asserteq(s, alloc_addr_a - ram);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530842 s = lmb_get_free_size(ram + 0x10000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100843 ut_asserteq(s, alloc_addr_a - ram - 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530844 s = lmb_get_free_size(alloc_addr_a - 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100845 ut_asserteq(s, 4);
846
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530847 s = lmb_get_free_size(alloc_addr_a + 0x10000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100848 ut_asserteq(s, alloc_addr_b - alloc_addr_a - 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530849 s = lmb_get_free_size(alloc_addr_a + 0x20000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100850 ut_asserteq(s, alloc_addr_b - alloc_addr_a - 0x20000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530851 s = lmb_get_free_size(alloc_addr_b - 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100852 ut_asserteq(s, 4);
853
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530854 s = lmb_get_free_size(alloc_addr_c + 0x10000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100855 ut_asserteq(s, ram_end - alloc_addr_c - 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530856 s = lmb_get_free_size(alloc_addr_c + 0x20000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100857 ut_asserteq(s, ram_end - alloc_addr_c - 0x20000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530858 s = lmb_get_free_size(ram_end - 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100859 ut_asserteq(s, 4);
860
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530861 lmb_pop(&store);
862
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100863 return 0;
864}
865
Simon Goldschmidt7510a562019-01-21 20:29:55 +0100866static int lib_test_lmb_get_free_size(struct unit_test_state *uts)
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100867{
868 int ret;
869
870 /* simulate 512 MiB RAM beginning at 1GiB */
871 ret = test_get_unreserved_size(uts, 0x40000000);
872 if (ret)
873 return ret;
874
875 /* simulate 512 MiB RAM beginning at 1.5GiB */
876 return test_get_unreserved_size(uts, 0xE0000000);
877}
Simon Glassb4c722a2023-10-01 19:15:21 -0600878LIB_TEST(lib_test_lmb_get_free_size, 0);
Patrick Delaunay1fe3adc2021-03-10 10:16:30 +0100879
Patrick Delaunaya1860722021-05-07 14:50:32 +0200880static int lib_test_lmb_flags(struct unit_test_state *uts)
881{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530882 struct lmb store;
883 struct lmb_region *mem, *used;
884 struct alist *mem_lst, *used_lst;
Patrick Delaunaya1860722021-05-07 14:50:32 +0200885 const phys_addr_t ram = 0x40000000;
886 const phys_size_t ram_size = 0x20000000;
Patrick Delaunaya1860722021-05-07 14:50:32 +0200887 long ret;
888
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530889 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
890 mem = mem_lst->data;
891 used = used_lst->data;
Patrick Delaunaya1860722021-05-07 14:50:32 +0200892
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530893 ret = lmb_add(ram, ram_size);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200894 ut_asserteq(ret, 0);
895
896 /* reserve, same flag */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200897 ret = lmb_reserve(0x40010000, 0x10000, LMB_NOMAP);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200898 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530899 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200900 0, 0, 0, 0);
901
902 /* reserve again, same flag */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200903 ret = lmb_reserve(0x40010000, 0x10000, LMB_NOMAP);
Sam Protsenkob4f81102024-12-10 20:17:01 -0600904 ut_asserteq(ret, -EEXIST);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530905 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200906 0, 0, 0, 0);
907
908 /* reserve again, new flag */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200909 ret = lmb_reserve(0x40010000, 0x10000, LMB_NONE);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530910 ut_asserteq(ret, -EEXIST);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530911 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200912 0, 0, 0, 0);
913
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530914 ut_asserteq(lmb_is_nomap(&used[0]), 1);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200915
916 /* merge after */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200917 ret = lmb_reserve(0x40020000, 0x10000, LMB_NOMAP);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300918 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530919 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x20000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200920 0, 0, 0, 0);
921
922 /* merge before */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200923 ret = lmb_reserve(0x40000000, 0x10000, LMB_NOMAP);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300924 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530925 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200926 0, 0, 0, 0);
927
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530928 ut_asserteq(lmb_is_nomap(&used[0]), 1);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200929
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200930 ret = lmb_reserve(0x40030000, 0x10000, LMB_NONE);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200931 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530932 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200933 0x40030000, 0x10000, 0, 0);
934
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530935 ut_asserteq(lmb_is_nomap(&used[0]), 1);
936 ut_asserteq(lmb_is_nomap(&used[1]), 0);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200937
938 /* test that old API use LMB_NONE */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200939 ret = lmb_reserve(0x40040000, 0x10000, LMB_NONE);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300940 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530941 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200942 0x40030000, 0x20000, 0, 0);
943
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530944 ut_asserteq(lmb_is_nomap(&used[0]), 1);
945 ut_asserteq(lmb_is_nomap(&used[1]), 0);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200946
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200947 ret = lmb_reserve(0x40070000, 0x10000, LMB_NOMAP);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200948 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530949 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200950 0x40030000, 0x20000, 0x40070000, 0x10000);
951
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200952 ret = lmb_reserve(0x40050000, 0x10000, LMB_NOMAP);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200953 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530954 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 4, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200955 0x40030000, 0x20000, 0x40050000, 0x10000);
956
957 /* merge with 2 adjacent regions */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200958 ret = lmb_reserve(0x40060000, 0x10000, LMB_NOMAP);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300959 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530960 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200961 0x40030000, 0x20000, 0x40050000, 0x30000);
962
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530963 ut_asserteq(lmb_is_nomap(&used[0]), 1);
964 ut_asserteq(lmb_is_nomap(&used[1]), 0);
965 ut_asserteq(lmb_is_nomap(&used[2]), 1);
966
967 lmb_pop(&store);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200968
969 return 0;
970}
Simon Glassb4c722a2023-10-01 19:15:21 -0600971LIB_TEST(lib_test_lmb_flags, 0);