blob: 751909fc2cfede441a7cae2689851b948a8851a3 [file] [log] [blame]
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +01001// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Simon Goldschmidt
4 */
5
Sughosh Ganu291bf9c2024-08-26 17:29:18 +05306#include <alist.h>
Simon Glass75c4d412020-07-19 10:15:37 -06007#include <dm.h>
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +01008#include <lmb.h>
Simon Glass0f2af882020-05-10 11:40:05 -06009#include <log.h>
Simon Glass9bc15642020-02-03 07:36:16 -070010#include <malloc.h>
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010011#include <dm/test.h>
Simon Glassb4c722a2023-10-01 19:15:21 -060012#include <test/lib.h>
Simon Glass75c4d412020-07-19 10:15:37 -060013#include <test/test.h>
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010014#include <test/ut.h>
15
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053016static inline bool lmb_is_nomap(struct lmb_region *m)
Heinrich Schuchardta88181e2021-11-14 08:41:07 +010017{
18 return m->flags & LMB_NOMAP;
19}
20
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053021static int check_lmb(struct unit_test_state *uts, struct alist *mem_lst,
22 struct alist *used_lst, phys_addr_t ram_base,
23 phys_size_t ram_size, unsigned long num_reserved,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010024 phys_addr_t base1, phys_size_t size1,
25 phys_addr_t base2, phys_size_t size2,
26 phys_addr_t base3, phys_size_t size3)
27{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053028 struct lmb_region *mem, *used;
29
30 mem = mem_lst->data;
31 used = used_lst->data;
32
Simon Goldschmidtc722dac2019-02-01 21:23:59 +010033 if (ram_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053034 ut_asserteq(mem_lst->count, 1);
35 ut_asserteq(mem[0].base, ram_base);
36 ut_asserteq(mem[0].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +010037 }
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010038
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053039 ut_asserteq(used_lst->count, num_reserved);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010040 if (num_reserved > 0) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053041 ut_asserteq(used[0].base, base1);
42 ut_asserteq(used[0].size, size1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010043 }
44 if (num_reserved > 1) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053045 ut_asserteq(used[1].base, base2);
46 ut_asserteq(used[1].size, size2);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010047 }
48 if (num_reserved > 2) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053049 ut_asserteq(used[2].base, base3);
50 ut_asserteq(used[2].size, size3);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010051 }
52 return 0;
53}
54
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053055#define ASSERT_LMB(mem_lst, used_lst, ram_base, ram_size, num_reserved, base1, size1, \
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010056 base2, size2, base3, size3) \
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053057 ut_assert(!check_lmb(uts, mem_lst, used_lst, ram_base, ram_size, \
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010058 num_reserved, base1, size1, base2, size2, base3, \
59 size3))
60
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053061static int setup_lmb_test(struct unit_test_state *uts, struct lmb *store,
62 struct alist **mem_lstp, struct alist **used_lstp)
63{
64 struct lmb *lmb;
65
66 ut_assertok(lmb_push(store));
67 lmb = lmb_get();
Ilias Apalodimas5421c332024-12-18 09:02:33 +020068 *mem_lstp = &lmb->available_mem;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053069 *used_lstp = &lmb->used_mem;
70
71 return 0;
72}
73
Sughosh Ganu9b0765a2025-06-17 16:13:40 +053074static int lmb_reserve(phys_addr_t addr, phys_size_t size, u32 flags)
75{
76 int err;
77
78 err = lmb_alloc_mem(LMB_MEM_ALLOC_ADDR, 0, &addr, size, flags);
79 if (err)
80 return err;
81
82 return 0;
83}
84
85#define lmb_alloc_addr(addr, size, flags) lmb_reserve(addr, size, flags)
86
Simon Goldschmidtc722dac2019-02-01 21:23:59 +010087static int test_multi_alloc(struct unit_test_state *uts, const phys_addr_t ram,
88 const phys_size_t ram_size, const phys_addr_t ram0,
89 const phys_size_t ram0_size,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010090 const phys_addr_t alloc_64k_addr)
91{
92 const phys_addr_t ram_end = ram + ram_size;
93 const phys_addr_t alloc_64k_end = alloc_64k_addr + 0x10000;
94
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010095 long ret;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053096 struct alist *mem_lst, *used_lst;
97 struct lmb_region *mem, *used;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +010098 phys_addr_t a, a2, b, b2, c, d;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +053099 struct lmb store;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100100
101 /* check for overflow */
102 ut_assert(ram_end == 0 || ram_end > ram);
103 ut_assert(alloc_64k_end > alloc_64k_addr);
104 /* check input addresses + size */
105 ut_assert(alloc_64k_addr >= ram + 8);
106 ut_assert(alloc_64k_end <= ram_end - 8);
107
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530108 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
109 mem = mem_lst->data;
110 used = used_lst->data;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100111
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100112 if (ram0_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530113 ret = lmb_add(ram0, ram0_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100114 ut_asserteq(ret, 0);
115 }
116
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530117 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100118 ut_asserteq(ret, 0);
119
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100120 if (ram0_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530121 ut_asserteq(mem_lst->count, 2);
122 ut_asserteq(mem[0].base, ram0);
123 ut_asserteq(mem[0].size, ram0_size);
124 ut_asserteq(mem[1].base, ram);
125 ut_asserteq(mem[1].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100126 } else {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530127 ut_asserteq(mem_lst->count, 1);
128 ut_asserteq(mem[0].base, ram);
129 ut_asserteq(mem[0].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100130 }
131
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100132 /* reserve 64KiB somewhere */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200133 ret = lmb_reserve(alloc_64k_addr, 0x10000, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100134 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530135 ASSERT_LMB(mem_lst, used_lst, 0, 0, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100136 0, 0, 0, 0);
137
138 /* allocate somewhere, should be at the end of RAM */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530139 a = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100140 ut_asserteq(a, ram_end - 4);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530141 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100142 ram_end - 4, 4, 0, 0);
143 /* alloc below end of reserved region -> below reserved region */
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200144 b = lmb_alloc_base(4, 1, alloc_64k_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100145 ut_asserteq(b, alloc_64k_addr - 4);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530146 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100147 alloc_64k_addr - 4, 0x10000 + 4, ram_end - 4, 4, 0, 0);
148
149 /* 2nd time */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530150 c = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100151 ut_asserteq(c, ram_end - 8);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530152 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100153 alloc_64k_addr - 4, 0x10000 + 4, ram_end - 8, 8, 0, 0);
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200154 d = lmb_alloc_base(4, 1, alloc_64k_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100155 ut_asserteq(d, alloc_64k_addr - 8);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530156 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100157 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 8, 0, 0);
158
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530159 ret = lmb_free(a, 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100160 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530161 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100162 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 4, 0, 0);
163 /* allocate again to ensure we get the same address */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530164 a2 = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100165 ut_asserteq(a, a2);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530166 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100167 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 8, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530168 ret = lmb_free(a2, 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100169 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530170 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100171 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 4, 0, 0);
172
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530173 ret = lmb_free(b, 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100174 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530175 ASSERT_LMB(mem_lst, used_lst, 0, 0, 3,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100176 alloc_64k_addr - 8, 4, alloc_64k_addr, 0x10000,
177 ram_end - 8, 4);
178 /* allocate again to ensure we get the same address */
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200179 b2 = lmb_alloc_base(4, 1, alloc_64k_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100180 ut_asserteq(b, b2);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530181 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100182 alloc_64k_addr - 8, 0x10000 + 8, ram_end - 8, 4, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530183 ret = lmb_free(b2, 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100184 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530185 ASSERT_LMB(mem_lst, used_lst, 0, 0, 3,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100186 alloc_64k_addr - 8, 4, alloc_64k_addr, 0x10000,
187 ram_end - 8, 4);
188
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530189 ret = lmb_free(c, 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100190 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530191 ASSERT_LMB(mem_lst, used_lst, 0, 0, 2,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100192 alloc_64k_addr - 8, 4, alloc_64k_addr, 0x10000, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530193 ret = lmb_free(d, 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100194 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530195 ASSERT_LMB(mem_lst, used_lst, 0, 0, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100196 0, 0, 0, 0);
197
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100198 if (ram0_size) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530199 ut_asserteq(mem_lst->count, 2);
200 ut_asserteq(mem[0].base, ram0);
201 ut_asserteq(mem[0].size, ram0_size);
202 ut_asserteq(mem[1].base, ram);
203 ut_asserteq(mem[1].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100204 } else {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530205 ut_asserteq(mem_lst->count, 1);
206 ut_asserteq(mem[0].base, ram);
207 ut_asserteq(mem[0].size, ram_size);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100208 }
209
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530210 lmb_pop(&store);
211
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100212 return 0;
213}
214
215static int test_multi_alloc_512mb(struct unit_test_state *uts,
216 const phys_addr_t ram)
217{
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100218 return test_multi_alloc(uts, ram, 0x20000000, 0, 0, ram + 0x10000000);
219}
220
221static int test_multi_alloc_512mb_x2(struct unit_test_state *uts,
222 const phys_addr_t ram,
223 const phys_addr_t ram0)
224{
225 return test_multi_alloc(uts, ram, 0x20000000, ram0, 0x20000000,
226 ram + 0x10000000);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100227}
228
229/* Create a memory region with one reserved region and allocate */
230static int lib_test_lmb_simple(struct unit_test_state *uts)
231{
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100232 int ret;
233
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100234 /* simulate 512 MiB RAM beginning at 1GiB */
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100235 ret = test_multi_alloc_512mb(uts, 0x40000000);
236 if (ret)
237 return ret;
238
239 /* simulate 512 MiB RAM beginning at 1.5GiB */
240 return test_multi_alloc_512mb(uts, 0xE0000000);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100241}
Simon Glassb4c722a2023-10-01 19:15:21 -0600242LIB_TEST(lib_test_lmb_simple, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100243
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100244/* Create two memory regions with one reserved region and allocate */
245static int lib_test_lmb_simple_x2(struct unit_test_state *uts)
246{
247 int ret;
248
249 /* simulate 512 MiB RAM beginning at 2GiB and 1 GiB */
250 ret = test_multi_alloc_512mb_x2(uts, 0x80000000, 0x40000000);
251 if (ret)
252 return ret;
253
254 /* simulate 512 MiB RAM beginning at 3.5GiB and 1 GiB */
255 return test_multi_alloc_512mb_x2(uts, 0xE0000000, 0x40000000);
256}
Simon Glassb4c722a2023-10-01 19:15:21 -0600257LIB_TEST(lib_test_lmb_simple_x2, 0);
Simon Goldschmidtc722dac2019-02-01 21:23:59 +0100258
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100259/* Simulate 512 MiB RAM, allocate some blocks that fit/don't fit */
260static int test_bigblock(struct unit_test_state *uts, const phys_addr_t ram)
261{
262 const phys_size_t ram_size = 0x20000000;
263 const phys_size_t big_block_size = 0x10000000;
264 const phys_addr_t ram_end = ram + ram_size;
265 const phys_addr_t alloc_64k_addr = ram + 0x10000000;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530266 struct alist *mem_lst, *used_lst;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100267 long ret;
268 phys_addr_t a, b;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530269 struct lmb store;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100270
271 /* check for overflow */
272 ut_assert(ram_end == 0 || ram_end > ram);
273
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530274 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100275
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530276 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100277 ut_asserteq(ret, 0);
278
279 /* reserve 64KiB in the middle of RAM */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200280 ret = lmb_reserve(alloc_64k_addr, 0x10000, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100281 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530282 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100283 0, 0, 0, 0);
284
285 /* allocate a big block, should be below reserved */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530286 a = lmb_alloc(big_block_size, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100287 ut_asserteq(a, ram);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530288 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100289 big_block_size + 0x10000, 0, 0, 0, 0);
290 /* allocate 2nd big block */
291 /* This should fail, printing an error */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530292 b = lmb_alloc(big_block_size, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100293 ut_asserteq(b, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530294 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100295 big_block_size + 0x10000, 0, 0, 0, 0);
296
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530297 ret = lmb_free(a, big_block_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100298 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530299 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100300 0, 0, 0, 0);
301
302 /* allocate too big block */
303 /* This should fail, printing an error */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530304 a = lmb_alloc(ram_size, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100305 ut_asserteq(a, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530306 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_64k_addr, 0x10000,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100307 0, 0, 0, 0);
308
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530309 lmb_pop(&store);
310
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100311 return 0;
312}
313
314static int lib_test_lmb_big(struct unit_test_state *uts)
315{
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100316 int ret;
317
318 /* simulate 512 MiB RAM beginning at 1GiB */
319 ret = test_bigblock(uts, 0x40000000);
320 if (ret)
321 return ret;
322
323 /* simulate 512 MiB RAM beginning at 1.5GiB */
324 return test_bigblock(uts, 0xE0000000);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100325}
Simon Glassb4c722a2023-10-01 19:15:21 -0600326LIB_TEST(lib_test_lmb_big, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100327
328/* Simulate 512 MiB RAM, allocate a block without previous reservation */
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100329static int test_noreserved(struct unit_test_state *uts, const phys_addr_t ram,
330 const phys_addr_t alloc_size, const ulong align)
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100331{
332 const phys_size_t ram_size = 0x20000000;
333 const phys_addr_t ram_end = ram + ram_size;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100334 long ret;
335 phys_addr_t a, b;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530336 struct lmb store;
337 struct alist *mem_lst, *used_lst;
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100338 const phys_addr_t alloc_size_aligned = (alloc_size + align - 1) &
339 ~(align - 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100340
341 /* check for overflow */
342 ut_assert(ram_end == 0 || ram_end > ram);
343
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530344 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100345
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530346 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100347 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530348 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100349
350 /* allocate a block */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530351 a = lmb_alloc(alloc_size, align);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100352 ut_assert(a != 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530353 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1,
354 ram + ram_size - alloc_size_aligned, alloc_size, 0, 0, 0, 0);
355
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100356 /* allocate another block */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530357 b = lmb_alloc(alloc_size, align);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100358 ut_assert(b != 0);
359 if (alloc_size == alloc_size_aligned) {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530360 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram + ram_size -
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100361 (alloc_size_aligned * 2), alloc_size * 2, 0, 0, 0,
362 0);
363 } else {
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530364 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram + ram_size -
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100365 (alloc_size_aligned * 2), alloc_size, ram + ram_size
366 - alloc_size_aligned, alloc_size, 0, 0);
367 }
368 /* and free them */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530369 ret = lmb_free(b, alloc_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100370 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530371 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1,
372 ram + ram_size - alloc_size_aligned,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100373 alloc_size, 0, 0, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530374 ret = lmb_free(a, alloc_size);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100375 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530376 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100377
378 /* allocate a block with base*/
Ilias Apalodimasd1e9a262024-12-18 09:02:36 +0200379 b = lmb_alloc_base(alloc_size, align, ram_end, LMB_NONE);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100380 ut_assert(a == b);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530381 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1,
382 ram + ram_size - alloc_size_aligned,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100383 alloc_size, 0, 0, 0, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100384 /* and free it */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530385 ret = lmb_free(b, alloc_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100386 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530387 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
388
389 lmb_pop(&store);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100390
391 return 0;
392}
393
394static int lib_test_lmb_noreserved(struct unit_test_state *uts)
395{
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100396 int ret;
397
398 /* simulate 512 MiB RAM beginning at 1GiB */
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100399 ret = test_noreserved(uts, 0x40000000, 4, 1);
Simon Goldschmidt6402d9b2019-01-14 22:38:15 +0100400 if (ret)
401 return ret;
402
403 /* simulate 512 MiB RAM beginning at 1.5GiB */
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100404 return test_noreserved(uts, 0xE0000000, 4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100405}
Simon Glassb4c722a2023-10-01 19:15:21 -0600406LIB_TEST(lib_test_lmb_noreserved, 0);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100407
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100408static int lib_test_lmb_unaligned_size(struct unit_test_state *uts)
409{
410 int ret;
411
412 /* simulate 512 MiB RAM beginning at 1GiB */
413 ret = test_noreserved(uts, 0x40000000, 5, 8);
414 if (ret)
415 return ret;
416
417 /* simulate 512 MiB RAM beginning at 1.5GiB */
418 return test_noreserved(uts, 0xE0000000, 5, 8);
419}
Simon Glassb4c722a2023-10-01 19:15:21 -0600420LIB_TEST(lib_test_lmb_unaligned_size, 0);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100421
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100422/*
423 * Simulate a RAM that starts at 0 and allocate down to address 0, which must
424 * fail as '0' means failure for the lmb_alloc functions.
425 */
426static int lib_test_lmb_at_0(struct unit_test_state *uts)
427{
428 const phys_addr_t ram = 0;
429 const phys_size_t ram_size = 0x20000000;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530430 struct lmb store;
431 struct alist *mem_lst, *used_lst;
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100432 long ret;
433 phys_addr_t a, b;
434
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530435 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100436
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530437 ret = lmb_add(ram, ram_size);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100438 ut_asserteq(ret, 0);
439
440 /* allocate nearly everything */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530441 a = lmb_alloc(ram_size - 4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100442 ut_asserteq(a, ram + 4);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530443 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a, ram_size - 4,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100444 0, 0, 0, 0);
445 /* allocate the rest */
446 /* This should fail as the allocated address would be 0 */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530447 b = lmb_alloc(4, 1);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100448 ut_asserteq(b, 0);
449 /* check that this was an error by checking lmb */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530450 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a, ram_size - 4,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100451 0, 0, 0, 0);
452 /* check that this was an error by freeing b */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530453 ret = lmb_free(b, 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100454 ut_asserteq(ret, -1);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530455 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, a, ram_size - 4,
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100456 0, 0, 0, 0);
457
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530458 ret = lmb_free(a, ram_size - 4);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100459 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530460 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 0, 0, 0, 0, 0, 0, 0);
461
462 lmb_pop(&store);
Simon Goldschmidt9f3b6272019-01-14 22:38:14 +0100463
464 return 0;
465}
Simon Glassb4c722a2023-10-01 19:15:21 -0600466LIB_TEST(lib_test_lmb_at_0, 0);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100467
468/* Check that calling lmb_reserve with overlapping regions fails. */
469static int lib_test_lmb_overlapping_reserve(struct unit_test_state *uts)
470{
471 const phys_addr_t ram = 0x40000000;
472 const phys_size_t ram_size = 0x20000000;
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530473 struct lmb store;
474 struct alist *mem_lst, *used_lst;
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100475 long ret;
476
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530477 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100478
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530479 ret = lmb_add(ram, ram_size);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100480 ut_asserteq(ret, 0);
481
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200482 ret = lmb_reserve(0x40010000, 0x10000, LMB_NONE);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100483 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530484 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100485 0, 0, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530486
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530487 /* allocate overlapping region */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200488 ret = lmb_reserve(0x40011000, 0x10000, LMB_NONE);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300489 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530490 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x11000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100491 0, 0, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530492 /* allocate 2nd region */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200493 ret = lmb_reserve(0x40030000, 0x10000, LMB_NONE);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100494 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530495 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40010000, 0x11000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100496 0x40030000, 0x10000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530497 /* allocate 3rd region , This should coalesce all regions into one */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200498 ret = lmb_reserve(0x40020000, 0x10000, LMB_NONE);
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100499 ut_assert(ret >= 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530500 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x30000,
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100501 0, 0, 0, 0);
502
Udit Kumar27575252023-09-26 16:54:43 +0530503 /* allocate 2nd region, which should be added as first region */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200504 ret = lmb_reserve(0x40000000, 0x8000, LMB_NONE);
Udit Kumar27575252023-09-26 16:54:43 +0530505 ut_assert(ret >= 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530506 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x8000,
Udit Kumar27575252023-09-26 16:54:43 +0530507 0x40010000, 0x30000, 0, 0);
508
509 /* allocate 3rd region, coalesce with first and overlap with second */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200510 ret = lmb_reserve(0x40008000, 0x10000, LMB_NONE);
Udit Kumar27575252023-09-26 16:54:43 +0530511 ut_assert(ret >= 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530512 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40000000, 0x40000,
Udit Kumar27575252023-09-26 16:54:43 +0530513 0, 0, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530514
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530515 /* try to allocate overlapping region with a different flag, should fail */
516 ret = lmb_reserve(0x40008000, 0x1000, LMB_NOOVERWRITE);
517 ut_asserteq(ret, -EEXIST);
518
519 /* allocate another region at 0x40050000 with a different flag */
520 ret = lmb_reserve(0x40050000, 0x10000, LMB_NOOVERWRITE);
521 ut_asserteq(ret, 0);
522 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x40000,
523 0x40050000, 0x10000, 0, 0);
524
525 /*
526 * try to reserve a region adjacent to region 1 overlapping the 2nd region,
527 * should fail
528 */
529 ret = lmb_reserve(0x40040000, 0x20000, LMB_NONE);
530 ut_asserteq(ret, -EEXIST);
531
532 /*
533 * try to reserve a region between the two regions, but without an overlap,
534 * should succeed. this added region coalesces with the region 1
535 */
536 ret = lmb_reserve(0x40040000, 0x10000, LMB_NONE);
537 ut_asserteq(ret, 0);
538 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x50000,
539 0x40050000, 0x10000, 0, 0);
540
541 /*
542 * try to reserve a region which overlaps with both the regions,
543 * should fail as the flags do not match
544 */
545 ret = lmb_reserve(0x40020000, 0x80000, LMB_NONE);
546 ut_asserteq(ret, -EEXIST);
547 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x50000,
548 0x40050000, 0x10000, 0, 0);
549
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530550 lmb_pop(&store);
551
Simon Goldschmidtcb57d132019-01-14 22:38:16 +0100552 return 0;
553}
Simon Glassb4c722a2023-10-01 19:15:21 -0600554LIB_TEST(lib_test_lmb_overlapping_reserve, 0);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100555
556/*
557 * Simulate 512 MiB RAM, reserve 3 blocks, allocate addresses in between.
558 * Expect addresses outside the memory range to fail.
559 */
560static int test_alloc_addr(struct unit_test_state *uts, const phys_addr_t ram)
561{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530562 struct lmb store;
563 struct alist *mem_lst, *used_lst;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100564 const phys_size_t ram_size = 0x20000000;
565 const phys_addr_t ram_end = ram + ram_size;
566 const phys_size_t alloc_addr_a = ram + 0x8000000;
567 const phys_size_t alloc_addr_b = ram + 0x8000000 * 2;
568 const phys_size_t alloc_addr_c = ram + 0x8000000 * 3;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100569 long ret;
570 phys_addr_t a, b, c, d, e;
571
572 /* check for overflow */
573 ut_assert(ram_end == 0 || ram_end > ram);
574
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530575 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100576
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530577 ret = lmb_add(ram, ram_size);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100578 ut_asserteq(ret, 0);
579
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200580 /* Try to allocate a page twice */
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200581 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200582 ut_asserteq(b, 0);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200583 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530584 ut_asserteq(b, -EEXIST);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200585 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200586 ut_asserteq(b, 0);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200587 b = lmb_alloc_addr(alloc_addr_a, 0x2000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200588 ut_asserteq(b, 0);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200589 ret = lmb_free(alloc_addr_a, 0x2000);
590 ut_asserteq(ret, 0);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200591 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200592 ut_asserteq(b, 0);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200593 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530594 ut_asserteq(b, -EEXIST);
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200595 b = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530596 ut_asserteq(b, -EEXIST);
Ilias Apalodimas5aaeb0e2024-12-02 16:42:45 +0200597 ret = lmb_free(alloc_addr_a, 0x1000);
598 ut_asserteq(ret, 0);
599
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530600 /*
601 * Add two regions with different flags, region1 and region2 with
602 * a gap between them.
603 * Try adding another region, adjacent to region 1 and overlapping
604 * region 2. Should fail.
605 */
606 a = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530607 ut_asserteq(a, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530608
609 b = lmb_alloc_addr(alloc_addr_a + 0x4000, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530610 ut_asserteq(b, 0);
611 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
612 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530613
614 c = lmb_alloc_addr(alloc_addr_a + 0x1000, 0x5000, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530615 ut_asserteq(c, -EEXIST);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530616 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
617 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530618
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530619 ret = lmb_free(alloc_addr_a, 0x1000);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530620 ut_asserteq(ret, 0);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530621 ret = lmb_free(alloc_addr_a + 0x4000, 0x1000);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530622 ut_asserteq(ret, 0);
623
624 /*
625 * Add two regions with same flags(LMB_NONE), region1 and region2
626 * with a gap between them.
627 * Try adding another region, adjacent to region 1 and overlapping
628 * region 2. Should succeed. All regions should coalesce into a
629 * single region.
630 */
631 a = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530632 ut_asserteq(a, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530633
634 b = lmb_alloc_addr(alloc_addr_a + 0x4000, 0x1000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530635 ut_asserteq(b, 0);
636 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
637 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530638
639 c = lmb_alloc_addr(alloc_addr_a + 0x1000, 0x5000, LMB_NONE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530640 ut_asserteq(c, 0);
641 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, alloc_addr_a, 0x6000,
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530642 0, 0, 0, 0);
643
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530644 ret = lmb_free(alloc_addr_a, 0x6000);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530645 ut_asserteq(ret, 0);
646
647 /*
648 * Add two regions with same flags(LMB_NOOVERWRITE), region1 and
649 * region2 with a gap between them.
650 * Try adding another region, adjacent to region 1 and overlapping
651 * region 2. Should fail.
652 */
653 a = lmb_alloc_addr(alloc_addr_a, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530654 ut_asserteq(a, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530655
656 b = lmb_alloc_addr(alloc_addr_a + 0x4000, 0x1000, LMB_NOOVERWRITE);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530657 ut_asserteq(b, 0);
658 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
659 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530660
661 c = lmb_alloc_addr(alloc_addr_a + 0x1000, 0x5000, LMB_NOOVERWRITE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530662 ut_asserteq(c, -EEXIST);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530663 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, alloc_addr_a, 0x1000,
664 alloc_addr_a + 0x4000, 0x1000, 0, 0);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530665
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530666 ret = lmb_free(alloc_addr_a, 0x1000);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530667 ut_asserteq(ret, 0);
Sughosh Ganu2bbfa552025-03-26 22:23:36 +0530668 ret = lmb_free(alloc_addr_a + 0x4000, 0x1000);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530669 ut_asserteq(ret, 0);
670
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100671 /* reserve 3 blocks */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200672 ret = lmb_reserve(alloc_addr_a, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100673 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200674 ret = lmb_reserve(alloc_addr_b, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100675 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200676 ret = lmb_reserve(alloc_addr_c, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100677 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530678 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, alloc_addr_a, 0x10000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100679 alloc_addr_b, 0x10000, alloc_addr_c, 0x10000);
680
681 /* allocate blocks */
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200682 a = lmb_alloc_addr(ram, alloc_addr_a - ram, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200683 ut_asserteq(a, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530684 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, ram, 0x8010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100685 alloc_addr_b, 0x10000, alloc_addr_c, 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530686 b = lmb_alloc_addr(alloc_addr_a + 0x10000,
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200687 alloc_addr_b - alloc_addr_a - 0x10000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200688 ut_asserteq(b, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530689 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 0x10010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100690 alloc_addr_c, 0x10000, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530691 c = lmb_alloc_addr(alloc_addr_b + 0x10000,
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200692 alloc_addr_c - alloc_addr_b - 0x10000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200693 ut_asserteq(c, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530694 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100695 0, 0, 0, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530696 d = lmb_alloc_addr(alloc_addr_c + 0x10000,
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200697 ram_end - alloc_addr_c - 0x10000, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200698 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530699 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, ram_size,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100700 0, 0, 0, 0);
701
702 /* allocating anything else should fail */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530703 e = lmb_alloc(1, 1);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100704 ut_asserteq(e, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530705 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, ram_size,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100706 0, 0, 0, 0);
707
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200708 /* free thge allocation from d */
709 ret = lmb_free(alloc_addr_c + 0x10000, ram_end - alloc_addr_c - 0x10000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100710 ut_asserteq(ret, 0);
711
712 /* allocate at 3 points in free range */
713
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200714 d = lmb_alloc_addr(ram_end - 4, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200715 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530716 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 0x18010000,
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200717 ram_end - 4, 4, 0, 0);
718 ret = lmb_free(ram_end - 4, 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100719 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530720 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100721 0, 0, 0, 0);
722
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200723 d = lmb_alloc_addr(ram_end - 128, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200724 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530725 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 0x18010000,
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200726 ram_end - 128, 4, 0, 0);
727 ret = lmb_free(ram_end - 128, 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100728 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530729 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100730 0, 0, 0, 0);
731
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200732 d = lmb_alloc_addr(alloc_addr_c + 0x10000, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200733 ut_asserteq(d, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530734 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010004,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100735 0, 0, 0, 0);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200736 ret = lmb_free(alloc_addr_c + 0x10000, 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100737 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530738 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram, 0x18010000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100739 0, 0, 0, 0);
740
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200741 /* allocate at the bottom a was assigned to ram at the top */
742 ret = lmb_free(ram, alloc_addr_a - ram);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100743 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530744 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, ram + 0x8000000,
745 0x10010000, 0, 0, 0, 0);
746
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200747 d = lmb_alloc_addr(ram, 4, LMB_NONE);
Ilias Apalodimas299c87d2025-03-14 12:57:02 +0200748 ut_asserteq(d, 0);
749 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, ram, 4,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100750 ram + 0x8000000, 0x10010000, 0, 0);
751
752 /* check that allocating outside memory fails */
753 if (ram_end != 0) {
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200754 ret = lmb_alloc_addr(ram_end, 1, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530755 ut_asserteq(ret, -EINVAL);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100756 }
757 if (ram != 0) {
Ilias Apalodimascc2ed3d2024-12-18 09:02:35 +0200758 ret = lmb_alloc_addr(ram - 1, 1, LMB_NONE);
Sughosh Ganu9b0765a2025-06-17 16:13:40 +0530759 ut_asserteq(ret, -EINVAL);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100760 }
761
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530762 lmb_pop(&store);
763
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100764 return 0;
765}
766
767static int lib_test_lmb_alloc_addr(struct unit_test_state *uts)
768{
769 int ret;
770
771 /* simulate 512 MiB RAM beginning at 1GiB */
772 ret = test_alloc_addr(uts, 0x40000000);
773 if (ret)
774 return ret;
775
776 /* simulate 512 MiB RAM beginning at 1.5GiB */
777 return test_alloc_addr(uts, 0xE0000000);
778}
Simon Glassb4c722a2023-10-01 19:15:21 -0600779LIB_TEST(lib_test_lmb_alloc_addr, 0);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100780
781/* Simulate 512 MiB RAM, reserve 3 blocks, check addresses in between */
782static int test_get_unreserved_size(struct unit_test_state *uts,
783 const phys_addr_t ram)
784{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530785 struct lmb store;
786 struct alist *mem_lst, *used_lst;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100787 const phys_size_t ram_size = 0x20000000;
788 const phys_addr_t ram_end = ram + ram_size;
789 const phys_size_t alloc_addr_a = ram + 0x8000000;
790 const phys_size_t alloc_addr_b = ram + 0x8000000 * 2;
791 const phys_size_t alloc_addr_c = ram + 0x8000000 * 3;
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100792 long ret;
793 phys_size_t s;
794
795 /* check for overflow */
796 ut_assert(ram_end == 0 || ram_end > ram);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530797 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100798
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530799 ret = lmb_add(ram, ram_size);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100800 ut_asserteq(ret, 0);
801
802 /* reserve 3 blocks */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200803 ret = lmb_reserve(alloc_addr_a, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100804 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200805 ret = lmb_reserve(alloc_addr_b, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100806 ut_asserteq(ret, 0);
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200807 ret = lmb_reserve(alloc_addr_c, 0x10000, LMB_NONE);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100808 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530809 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, alloc_addr_a, 0x10000,
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100810 alloc_addr_b, 0x10000, alloc_addr_c, 0x10000);
811
812 /* check addresses in between blocks */
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530813 s = lmb_get_free_size(ram);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100814 ut_asserteq(s, alloc_addr_a - ram);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530815 s = lmb_get_free_size(ram + 0x10000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100816 ut_asserteq(s, alloc_addr_a - ram - 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530817 s = lmb_get_free_size(alloc_addr_a - 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100818 ut_asserteq(s, 4);
819
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530820 s = lmb_get_free_size(alloc_addr_a + 0x10000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100821 ut_asserteq(s, alloc_addr_b - alloc_addr_a - 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530822 s = lmb_get_free_size(alloc_addr_a + 0x20000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100823 ut_asserteq(s, alloc_addr_b - alloc_addr_a - 0x20000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530824 s = lmb_get_free_size(alloc_addr_b - 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100825 ut_asserteq(s, 4);
826
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530827 s = lmb_get_free_size(alloc_addr_c + 0x10000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100828 ut_asserteq(s, ram_end - alloc_addr_c - 0x10000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530829 s = lmb_get_free_size(alloc_addr_c + 0x20000);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100830 ut_asserteq(s, ram_end - alloc_addr_c - 0x20000);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530831 s = lmb_get_free_size(ram_end - 4);
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100832 ut_asserteq(s, 4);
833
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530834 lmb_pop(&store);
835
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100836 return 0;
837}
838
Simon Goldschmidt7510a562019-01-21 20:29:55 +0100839static int lib_test_lmb_get_free_size(struct unit_test_state *uts)
Simon Goldschmidt7a6ee462019-01-14 22:38:18 +0100840{
841 int ret;
842
843 /* simulate 512 MiB RAM beginning at 1GiB */
844 ret = test_get_unreserved_size(uts, 0x40000000);
845 if (ret)
846 return ret;
847
848 /* simulate 512 MiB RAM beginning at 1.5GiB */
849 return test_get_unreserved_size(uts, 0xE0000000);
850}
Simon Glassb4c722a2023-10-01 19:15:21 -0600851LIB_TEST(lib_test_lmb_get_free_size, 0);
Patrick Delaunay1fe3adc2021-03-10 10:16:30 +0100852
Patrick Delaunaya1860722021-05-07 14:50:32 +0200853static int lib_test_lmb_flags(struct unit_test_state *uts)
854{
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530855 struct lmb store;
856 struct lmb_region *mem, *used;
857 struct alist *mem_lst, *used_lst;
Patrick Delaunaya1860722021-05-07 14:50:32 +0200858 const phys_addr_t ram = 0x40000000;
859 const phys_size_t ram_size = 0x20000000;
Patrick Delaunaya1860722021-05-07 14:50:32 +0200860 long ret;
861
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530862 ut_assertok(setup_lmb_test(uts, &store, &mem_lst, &used_lst));
863 mem = mem_lst->data;
864 used = used_lst->data;
Patrick Delaunaya1860722021-05-07 14:50:32 +0200865
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530866 ret = lmb_add(ram, ram_size);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200867 ut_asserteq(ret, 0);
868
869 /* reserve, same flag */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200870 ret = lmb_reserve(0x40010000, 0x10000, LMB_NOMAP);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200871 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530872 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200873 0, 0, 0, 0);
874
875 /* reserve again, same flag */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200876 ret = lmb_reserve(0x40010000, 0x10000, LMB_NOMAP);
Sam Protsenkob4f81102024-12-10 20:17:01 -0600877 ut_asserteq(ret, -EEXIST);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530878 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200879 0, 0, 0, 0);
880
881 /* reserve again, new flag */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200882 ret = lmb_reserve(0x40010000, 0x10000, LMB_NONE);
Sughosh Ganu9fa71c92025-03-03 19:02:26 +0530883 ut_asserteq(ret, -EEXIST);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530884 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x10000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200885 0, 0, 0, 0);
886
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530887 ut_asserteq(lmb_is_nomap(&used[0]), 1);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200888
889 /* merge after */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200890 ret = lmb_reserve(0x40020000, 0x10000, LMB_NOMAP);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300891 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530892 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40010000, 0x20000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200893 0, 0, 0, 0);
894
895 /* merge before */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200896 ret = lmb_reserve(0x40000000, 0x10000, LMB_NOMAP);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300897 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530898 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 1, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200899 0, 0, 0, 0);
900
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530901 ut_asserteq(lmb_is_nomap(&used[0]), 1);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200902
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200903 ret = lmb_reserve(0x40030000, 0x10000, LMB_NONE);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200904 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530905 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200906 0x40030000, 0x10000, 0, 0);
907
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530908 ut_asserteq(lmb_is_nomap(&used[0]), 1);
909 ut_asserteq(lmb_is_nomap(&used[1]), 0);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200910
911 /* test that old API use LMB_NONE */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200912 ret = lmb_reserve(0x40040000, 0x10000, LMB_NONE);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300913 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530914 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 2, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200915 0x40030000, 0x20000, 0, 0);
916
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530917 ut_asserteq(lmb_is_nomap(&used[0]), 1);
918 ut_asserteq(lmb_is_nomap(&used[1]), 0);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200919
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200920 ret = lmb_reserve(0x40070000, 0x10000, LMB_NOMAP);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200921 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530922 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200923 0x40030000, 0x20000, 0x40070000, 0x10000);
924
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200925 ret = lmb_reserve(0x40050000, 0x10000, LMB_NOMAP);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200926 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530927 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 4, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200928 0x40030000, 0x20000, 0x40050000, 0x10000);
929
930 /* merge with 2 adjacent regions */
Ilias Apalodimasf72c55e2024-12-18 09:02:32 +0200931 ret = lmb_reserve(0x40060000, 0x10000, LMB_NOMAP);
Ilias Apalodimas98491252024-10-23 18:22:00 +0300932 ut_asserteq(ret, 0);
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530933 ASSERT_LMB(mem_lst, used_lst, ram, ram_size, 3, 0x40000000, 0x30000,
Patrick Delaunaya1860722021-05-07 14:50:32 +0200934 0x40030000, 0x20000, 0x40050000, 0x30000);
935
Sughosh Ganu291bf9c2024-08-26 17:29:18 +0530936 ut_asserteq(lmb_is_nomap(&used[0]), 1);
937 ut_asserteq(lmb_is_nomap(&used[1]), 0);
938 ut_asserteq(lmb_is_nomap(&used[2]), 1);
939
940 lmb_pop(&store);
Patrick Delaunaya1860722021-05-07 14:50:32 +0200941
942 return 0;
943}
Simon Glassb4c722a2023-10-01 19:15:21 -0600944LIB_TEST(lib_test_lmb_flags, 0);