|
| 1 | +/* |
| 2 | + * Copyright (c) 2023 Carlo Caione <[email protected]> |
| 3 | + * |
| 4 | + * SPDX-License-Identifier: Apache-2.0 |
| 5 | + */ |
| 6 | + |
| 7 | +#include <zephyr/kernel.h> |
| 8 | +#include <zephyr/ztest.h> |
| 9 | +#include <zephyr/mem_mgmt/mem_attr_heap.h> |
| 10 | +#include <zephyr/dt-bindings/memory-attr/memory-attr-sw.h> |
| 11 | + |
| 12 | +#define ADDR_MEM_CACHE DT_REG_ADDR(DT_NODELABEL(mem_cache)) |
| 13 | +#define ADDR_MEM_CACHE_SW DT_REG_ADDR(DT_NODELABEL(mem_cache_sw)) |
| 14 | +#define ADDR_MEM_NON_CACHE_SW DT_REG_ADDR(DT_NODELABEL(mem_noncache_sw)) |
| 15 | +#define ADDR_MEM_DMA_SW DT_REG_ADDR(DT_NODELABEL(mem_dma_sw)) |
| 16 | +#define ADDR_MEM_CACHE_BIG_SW DT_REG_ADDR(DT_NODELABEL(mem_cache_sw_big)) |
| 17 | +#define ADDR_MEM_CACHE_DMA_SW DT_REG_ADDR(DT_NODELABEL(mem_cache_cache_dma_multi)) |
| 18 | + |
| 19 | +ZTEST(mem_attr_heap, test_mem_attr_heap) |
| 20 | +{ |
| 21 | + const struct mem_attr_region_t *region; |
| 22 | + void *block, *old_block; |
| 23 | + int ret; |
| 24 | + |
| 25 | + /* |
| 26 | + * Init the pool. |
| 27 | + */ |
| 28 | + ret = mem_attr_heap_pool_init(); |
| 29 | + zassert_equal(0, ret, "Failed initialization"); |
| 30 | + |
| 31 | + /* |
| 32 | + * Any subsequent initialization should fail. |
| 33 | + */ |
| 34 | + ret = mem_attr_heap_pool_init(); |
| 35 | + zassert_equal(-EALREADY, ret, "Second init should be failing"); |
| 36 | + |
| 37 | + /* |
| 38 | + * Allocate 0x100 bytes of cacheable memory. |
| 39 | + */ |
| 40 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_CACHE, 0x100); |
| 41 | + zassert_not_null(block, "Failed to allocate memory"); |
| 42 | + |
| 43 | + /* |
| 44 | + * Check that the just allocated memory was allocated from the correct |
| 45 | + * memory region. |
| 46 | + */ |
| 47 | + region = mem_attr_heap_get_region(block); |
| 48 | + zassert_equal(region->dt_addr, ADDR_MEM_CACHE_SW, |
| 49 | + "Memory allocated from the wrong region"); |
| 50 | + |
| 51 | + /* |
| 52 | + * Allocate 0x100 bytes of non-cacheable memory. |
| 53 | + */ |
| 54 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_NON_CACHE, 0x100); |
| 55 | + zassert_not_null(block, "Failed to allocate memory"); |
| 56 | + |
| 57 | + /* |
| 58 | + * Check that the just allocated memory was allocated from the correct |
| 59 | + * memory region. |
| 60 | + */ |
| 61 | + region = mem_attr_heap_get_region(block); |
| 62 | + zassert_equal(region->dt_addr, ADDR_MEM_NON_CACHE_SW, |
| 63 | + "Memory allocated from the wrong region"); |
| 64 | + |
| 65 | + /* |
| 66 | + * Allocate 0x100 bytes of DMA memory. |
| 67 | + */ |
| 68 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_DMA, 0x100); |
| 69 | + zassert_not_null(block, "Failed to allocate memory"); |
| 70 | + |
| 71 | + /* |
| 72 | + * Check that the just allocated memory was allocated from the correct |
| 73 | + * memory region. |
| 74 | + */ |
| 75 | + region = mem_attr_heap_get_region(block); |
| 76 | + zassert_equal(region->dt_addr, ADDR_MEM_DMA_SW, |
| 77 | + "Memory allocated from the wrong region"); |
| 78 | + |
| 79 | + /* |
| 80 | + * Allocate 0x100 bytes of cacheable and DMA memory. |
| 81 | + */ |
| 82 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_CACHE | DT_MEM_SW_ALLOC_DMA, 0x100); |
| 83 | + zassert_not_null(block, "Failed to allocate memory"); |
| 84 | + |
| 85 | + /* |
| 86 | + * Check that the just allocated memory was allocated from the correct |
| 87 | + * memory region (CACHE + DMA and not just CACHE or just DMA). |
| 88 | + */ |
| 89 | + region = mem_attr_heap_get_region(block); |
| 90 | + zassert_equal(region->dt_addr, ADDR_MEM_CACHE_DMA_SW, |
| 91 | + "Memory allocated from the wrong region"); |
| 92 | + |
| 93 | + /* |
| 94 | + * Allocate memory with a non-existing attribute. |
| 95 | + */ |
| 96 | + block = mem_attr_heap_alloc(DT_MEM_SW(DT_MEM_SW_ATTR_UNKNOWN), 0x100); |
| 97 | + zassert_is_null(block, "Memory allocated with non-existing attribute"); |
| 98 | + |
| 99 | + /* |
| 100 | + * Allocate memory too big to fit into the first cacheable memory |
| 101 | + * region. It should be allocated from the second bigger memory region. |
| 102 | + */ |
| 103 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_CACHE, 0x1500); |
| 104 | + zassert_not_null(block, "Failed to allocate memory"); |
| 105 | + |
| 106 | + /* |
| 107 | + * Check that the just allocated memory was allocated from the correct |
| 108 | + * (bigger) cacheable memory region |
| 109 | + */ |
| 110 | + region = mem_attr_heap_get_region(block); |
| 111 | + zassert_equal(region->dt_addr, ADDR_MEM_CACHE_BIG_SW, |
| 112 | + "Memory allocated from the wrong region"); |
| 113 | + |
| 114 | + /* |
| 115 | + * Try to allocate a buffer too big. |
| 116 | + */ |
| 117 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_CACHE, 0x4000); |
| 118 | + zassert_is_null(block, "Buffer too big for regions correctly allocated"); |
| 119 | + |
| 120 | + /* |
| 121 | + * Check if the memory is correctly released and can be reused |
| 122 | + */ |
| 123 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_CACHE, 0x100); |
| 124 | + old_block = block; |
| 125 | + mem_attr_heap_free(block); |
| 126 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_CACHE, 0x100); |
| 127 | + zassert_equal_ptr(old_block, block, "Memory not correctly released"); |
| 128 | + |
| 129 | + /* |
| 130 | + * Check if the memory is correctly aligned when requested |
| 131 | + */ |
| 132 | + block = mem_attr_heap_alloc(DT_MEM_SW_ALLOC_NON_CACHE, 0x100); |
| 133 | + zassert_true(((uintptr_t) block % 32 != 0), ""); |
| 134 | + mem_attr_heap_free(block); |
| 135 | + block = mem_attr_heap_aligned_alloc(DT_MEM_SW_ALLOC_NON_CACHE, 0x100, 32); |
| 136 | + zassert_true(((uintptr_t) block % 32 == 0), ""); |
| 137 | + |
| 138 | + /* |
| 139 | + * Try with a different alignment |
| 140 | + */ |
| 141 | + block = mem_attr_heap_aligned_alloc(DT_MEM_SW_ALLOC_NON_CACHE, 0x100, 64); |
| 142 | + zassert_true(((uintptr_t) block % 64 == 0), ""); |
| 143 | +} |
| 144 | + |
| 145 | +ZTEST_SUITE(mem_attr_heap, NULL, NULL, NULL, NULL, NULL); |
0 commit comments