soc: nordic: common: dmm: fix region alignment getter

Getting the required alignment size for memory region node
and device node needs to be handled by a separate macro.
Otherwise alignment of single byte is reported for any region.
Add a test that checks for this particular issue.

Signed-off-by: Nikodem Kastelik <nikodem.kastelik@nordicsemi.no>
This commit is contained in:
Nikodem Kastelik 2024-07-08 12:13:13 +02:00 committed by Anas Nashif
parent a575d9bc31
commit c0d508a142
3 changed files with 21 additions and 10 deletions

View file

@ -23,7 +23,7 @@
{.dt_addr = DT_REG_ADDR(node_id), \ {.dt_addr = DT_REG_ADDR(node_id), \
.dt_size = DT_REG_SIZE(node_id), \ .dt_size = DT_REG_SIZE(node_id), \
.dt_attr = DT_PROP(node_id, zephyr_memory_attr), \ .dt_attr = DT_PROP(node_id, zephyr_memory_attr), \
.dt_align = DMM_ALIGN_SIZE(node_id), \ .dt_align = DMM_REG_ALIGN_SIZE(node_id), \
.dt_allc = &_BUILD_LINKER_END_VAR(node_id)}, .dt_allc = &_BUILD_LINKER_END_VAR(node_id)},
/* Generate declarations of linker variables used to determine size of preallocated variables /* Generate declarations of linker variables used to determine size of preallocated variables

View file

@ -23,19 +23,24 @@ extern "C" {
/** @cond INTERNAL_HIDDEN */ /** @cond INTERNAL_HIDDEN */
/* Determine if memory region for the peripheral is cacheable. */ /* Determine if memory region is cacheable. */
#define DMM_IS_REG_CACHEABLE(node_id) \ #define DMM_IS_REG_CACHEABLE(node_id) \
COND_CODE_1(CONFIG_DCACHE, \ COND_CODE_1(CONFIG_DCACHE, \
(COND_CODE_1(DT_NODE_HAS_PROP(DT_PHANDLE(node_id, memory_regions), zephyr_memory_attr), \ (COND_CODE_1(DT_NODE_HAS_PROP(node_id, zephyr_memory_attr), \
(DT_PROP(DT_PHANDLE(node_id, memory_regions), zephyr_memory_attr) & DT_MEM_CACHEABLE), \ ((DT_PROP(node_id, zephyr_memory_attr) & DT_MEM_CACHEABLE)), \
(0))), (0)) (0))), (0))
/* Determine required alignment of the static buffers in memory regions. Cache line alignment is /* Determine required alignment of the data buffers in specified memory region.
* required if region is cacheable and data cache is enabled. * Cache line alignment is required if region is cacheable and data cache is enabled.
*/ */
#define DMM_ALIGN_SIZE(node_id) \ #define DMM_REG_ALIGN_SIZE(node_id) \
(DMM_IS_REG_CACHEABLE(node_id) ? CONFIG_DCACHE_LINE_SIZE : sizeof(uint8_t)) (DMM_IS_REG_CACHEABLE(node_id) ? CONFIG_DCACHE_LINE_SIZE : sizeof(uint8_t))
/* Determine required alignment of the data buffers in memory region
* associated with specified device node.
*/
#define DMM_ALIGN_SIZE(node_id) DMM_REG_ALIGN_SIZE(DT_PHANDLE(node_id, memory_regions))
/** /**
* @brief Get reference to memory region associated with the specified device node * @brief Get reference to memory region associated with the specified device node
* *
@ -46,6 +51,7 @@ extern "C" {
#define DMM_DEV_TO_REG(node_id) \ #define DMM_DEV_TO_REG(node_id) \
COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \ COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \
((void *)DT_REG_ADDR(DT_PHANDLE(node_id, memory_regions))), (NULL)) ((void *)DT_REG_ADDR(DT_PHANDLE(node_id, memory_regions))), (NULL))
/** /**
* @brief Preallocate buffer in memory region associated with the specified device node * @brief Preallocate buffer in memory region associated with the specified device node
* *
@ -55,7 +61,7 @@ extern "C" {
COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \ COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \
(__attribute__((__section__(LINKER_DT_NODE_REGION_NAME( \ (__attribute__((__section__(LINKER_DT_NODE_REGION_NAME( \
DT_PHANDLE(node_id, memory_regions))))) \ DT_PHANDLE(node_id, memory_regions))))) \
__aligned(DMM_ALIGN_SIZE(node_id))), \ __aligned(DMM_ALIGN_SIZE(node_id))), \
()) ())
#ifdef CONFIG_HAS_NORDIC_DMM #ifdef CONFIG_HAS_NORDIC_DMM

View file

@ -23,6 +23,11 @@
COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \ COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \
(DT_REG_SIZE(DT_PHANDLE(node_id, memory_regions))), (0)) (DT_REG_SIZE(DT_PHANDLE(node_id, memory_regions))), (0))
#if CONFIG_DCACHE
BUILD_ASSERT(DMM_ALIGN_SIZE(DUT_CACHE) == CONFIG_DCACHE_LINE_SIZE);
BUILD_ASSERT(DMM_ALIGN_SIZE(DUT_NOCACHE) == 1);
#endif
struct dmm_test_region { struct dmm_test_region {
void *mem_reg; void *mem_reg;
uintptr_t start; uintptr_t start;