diff --git a/components/app_trace/heap_trace_tohost.c b/components/app_trace/heap_trace_tohost.c index fb19aea6f1..7a52294f5c 100644 --- a/components/app_trace/heap_trace_tohost.c +++ b/components/app_trace/heap_trace_tohost.c @@ -8,7 +8,7 @@ #define HEAP_TRACE_SRCFILE /* don't warn on inclusion here */ #include "esp_heap_trace.h" #undef HEAP_TRACE_SRCFILE - +#include "esp_heap_caps.h" #if CONFIG_APPTRACE_SV_ENABLE #include "esp_app_trace.h" #include "esp_sysview_trace.h" @@ -85,7 +85,7 @@ void heap_trace_dump_caps(__attribute__((unused)) const uint32_t caps) } /* Add a new allocation to the heap trace records */ -static IRAM_ATTR void record_allocation(const heap_trace_record_t *record) +static HEAP_IRAM_ATTR void record_allocation(const heap_trace_record_t *record) { if (!s_tracing) { return; @@ -100,7 +100,7 @@ static IRAM_ATTR void record_allocation(const heap_trace_record_t *record) For HEAP_TRACE_ALL, this means filling in the freed_by pointer. For HEAP_TRACE_LEAKS, this means removing the record from the log. */ -static IRAM_ATTR void record_free(void *p, void **callers) +static HEAP_IRAM_ATTR void record_free(void *p, void **callers) { if (!s_tracing) { return; diff --git a/components/driver/Kconfig b/components/driver/Kconfig index a8ff75a3ae..ce13140885 100644 --- a/components/driver/Kconfig +++ b/components/driver/Kconfig @@ -85,10 +85,15 @@ menu "Driver Configurations" config SPI_MASTER_ISR_IN_IRAM bool "Place SPI master ISR function into IRAM" default y + depends on !HEAP_PLACE_FUNCTION_INTO_FLASH select PERIPH_CTRL_FUNC_IN_IRAM help Place the SPI master ISR in to IRAM to avoid possible cache miss. + Enabling this configuration is possible only when HEAP_PLACE_FUNCTION_INTO_FLASH + is disabled since the spi master uses can allocate transactions buffers into DMA + memory section using the heap component API that ipso facto has to be placed in IRAM. + Also you can forbid the ISR being disabled during flash writing access, by add ESP_INTR_FLAG_IRAM when initializing the driver. diff --git a/components/heap/Kconfig b/components/heap/Kconfig index 170e720668..8e65b6cca7 100644 --- a/components/heap/Kconfig +++ b/components/heap/Kconfig @@ -113,4 +113,13 @@ menu "Heap memory debugging" features will be added and bugs will be fixed in the IDF source but cannot be synced to ROM. + config HEAP_PLACE_FUNCTION_INTO_FLASH + bool "Force the entire heap component to be placed in flash memory" + depends on !HEAP_TLSF_USE_ROM_IMPL + default n + help + Enable this flag to save up RAM space by placing the heap component in the flash memory + + Note that it is only safe to enable this configuration if no functions from esp_heap_caps.h + or esp_heap_trace.h are called from ISR. endmenu diff --git a/components/heap/heap_caps.c b/components/heap/heap_caps.c index 70848f7b0a..9df5fb212e 100644 --- a/components/heap/heap_caps.c +++ b/components/heap/heap_caps.c @@ -43,14 +43,14 @@ possible. This should optimize the amount of RAM accessible to the code without static esp_alloc_failed_hook_t alloc_failed_callback; #ifdef CONFIG_HEAP_ABORT_WHEN_ALLOCATION_FAILS -IRAM_ATTR static void hex_to_str(char buf[8], uint32_t n) +HEAP_IRAM_ATTR static void hex_to_str(char buf[8], uint32_t n) { for (int i = 0; i < 8; i++) { uint8_t b4 = (n >> (28 - i * 4)) & 0b1111; buf[i] = b4 <= 9 ? '0' + b4 : 'a' + b4 - 10; } } -IRAM_ATTR static void fmt_abort_str(char dest[48], size_t size, uint32_t caps) +HEAP_IRAM_ATTR static void fmt_abort_str(char dest[48], size_t size, uint32_t caps) { char sSize[8]; char sCaps[8]; @@ -67,7 +67,7 @@ IRAM_ATTR static void fmt_abort_str(char dest[48], size_t size, uint32_t caps) IRAM in such a way that it can be later freed. It assumes both the address as well as the length to be word-aligned. It returns a region that's 1 word smaller than the region given because it stores the original Dram address there. */ -IRAM_ATTR static void *dram_alloc_to_iram_addr(void *addr, size_t len) +HEAP_IRAM_ATTR static void *dram_alloc_to_iram_addr(void *addr, size_t len) { uintptr_t dstart = (uintptr_t)addr; //First word uintptr_t dend __attribute__((unused)) = dstart + len - 4; //Last word @@ -84,7 +84,7 @@ IRAM_ATTR static void *dram_alloc_to_iram_addr(void *addr, size_t len) return iptr + 1; } -IRAM_ATTR NOINLINE_ATTR static void heap_caps_alloc_failed(size_t requested_size, uint32_t caps, const char *function_name) +HEAP_IRAM_ATTR NOINLINE_ATTR static void heap_caps_alloc_failed(size_t requested_size, uint32_t caps, const char *function_name) { if (alloc_failed_callback) { alloc_failed_callback(requested_size, caps, function_name); @@ -118,7 +118,7 @@ bool heap_caps_match(const heap_t *heap, uint32_t caps) This function should not be called directly as it does not check for failure / call heap_caps_alloc_failed() */ -IRAM_ATTR static void *heap_caps_malloc_base( size_t size, uint32_t caps) +HEAP_IRAM_ATTR static void *heap_caps_malloc_base( size_t size, uint32_t caps) { void *ret = NULL; @@ -192,7 +192,7 @@ IRAM_ATTR static void *heap_caps_malloc_base( size_t size, uint32_t caps) /* Routine to allocate a bit of memory with certain capabilities. caps is a bitfield of MALLOC_CAP_* bits. */ -IRAM_ATTR void *heap_caps_malloc( size_t size, uint32_t caps){ +HEAP_IRAM_ATTR void *heap_caps_malloc( size_t size, uint32_t caps){ void* ptr = heap_caps_malloc_base(size, caps); @@ -217,7 +217,7 @@ void heap_caps_malloc_extmem_enable(size_t limit) /* Default memory allocation implementation. Should return standard 8-bit memory. malloc() essentially resolves to this function. */ -IRAM_ATTR void *heap_caps_malloc_default( size_t size ) +HEAP_IRAM_ATTR void *heap_caps_malloc_default( size_t size ) { if (malloc_alwaysinternal_limit==MALLOC_DISABLE_EXTERNAL_ALLOCS) { return heap_caps_malloc( size, MALLOC_CAP_DEFAULT | MALLOC_CAP_INTERNAL); @@ -250,7 +250,7 @@ IRAM_ATTR void *heap_caps_malloc_default( size_t size ) Same for realloc() Note: keep the logic in here the same as in heap_caps_malloc_default (or merge the two as soon as this gets more complex...) */ -IRAM_ATTR void *heap_caps_realloc_default( void *ptr, size_t size ) +HEAP_IRAM_ATTR void *heap_caps_realloc_default( void *ptr, size_t size ) { if (malloc_alwaysinternal_limit==MALLOC_DISABLE_EXTERNAL_ALLOCS) { return heap_caps_realloc( ptr, size, MALLOC_CAP_DEFAULT | MALLOC_CAP_INTERNAL ); @@ -282,7 +282,7 @@ IRAM_ATTR void *heap_caps_realloc_default( void *ptr, size_t size ) /* Memory allocation as preference in decreasing order. */ -IRAM_ATTR void *heap_caps_malloc_prefer( size_t size, size_t num, ... ) +HEAP_IRAM_ATTR void *heap_caps_malloc_prefer( size_t size, size_t num, ... ) { va_list argp; va_start( argp, num ); @@ -306,7 +306,7 @@ IRAM_ATTR void *heap_caps_malloc_prefer( size_t size, size_t num, ... ) /* Memory reallocation as preference in decreasing order. */ -IRAM_ATTR void *heap_caps_realloc_prefer( void *ptr, size_t size, size_t num, ... ) +HEAP_IRAM_ATTR void *heap_caps_realloc_prefer( void *ptr, size_t size, size_t num, ... ) { va_list argp; va_start( argp, num ); @@ -330,7 +330,7 @@ IRAM_ATTR void *heap_caps_realloc_prefer( void *ptr, size_t size, size_t num, .. /* Memory callocation as preference in decreasing order. */ -IRAM_ATTR void *heap_caps_calloc_prefer( size_t n, size_t size, size_t num, ... ) +HEAP_IRAM_ATTR void *heap_caps_calloc_prefer( size_t n, size_t size, size_t num, ... ) { va_list argp; va_start( argp, num ); @@ -357,7 +357,7 @@ IRAM_ATTR void *heap_caps_calloc_prefer( size_t n, size_t size, size_t num, ... (This confirms if ptr is inside the heap's region, doesn't confirm if 'ptr' is an allocated block or is some other random address inside the heap.) */ -IRAM_ATTR static heap_t *find_containing_heap(void *ptr ) +HEAP_IRAM_ATTR static heap_t *find_containing_heap(void *ptr ) { intptr_t p = (intptr_t)ptr; heap_t *heap; @@ -369,7 +369,7 @@ IRAM_ATTR static heap_t *find_containing_heap(void *ptr ) return NULL; } -IRAM_ATTR void heap_caps_free( void *ptr) +HEAP_IRAM_ATTR void heap_caps_free( void *ptr) { if (ptr == NULL) { return; @@ -394,7 +394,7 @@ IRAM_ATTR void heap_caps_free( void *ptr) This function should not be called directly as it does not check for failure / call heap_caps_alloc_failed() */ -IRAM_ATTR static void *heap_caps_realloc_base( void *ptr, size_t size, uint32_t caps) +HEAP_IRAM_ATTR static void *heap_caps_realloc_base( void *ptr, size_t size, uint32_t caps) { bool ptr_in_diram_case = false; heap_t *heap = NULL; @@ -469,7 +469,7 @@ IRAM_ATTR static void *heap_caps_realloc_base( void *ptr, size_t size, uint32_t return NULL; } -IRAM_ATTR void *heap_caps_realloc( void *ptr, size_t size, uint32_t caps) +HEAP_IRAM_ATTR void *heap_caps_realloc( void *ptr, size_t size, uint32_t caps) { ptr = heap_caps_realloc_base(ptr, size, caps); @@ -485,7 +485,7 @@ IRAM_ATTR void *heap_caps_realloc( void *ptr, size_t size, uint32_t caps) This function should not be called directly as it does not check for failure / call heap_caps_alloc_failed() */ -IRAM_ATTR static void *heap_caps_calloc_base( size_t n, size_t size, uint32_t caps) +HEAP_IRAM_ATTR static void *heap_caps_calloc_base( size_t n, size_t size, uint32_t caps) { void *result; size_t size_bytes; @@ -501,7 +501,7 @@ IRAM_ATTR static void *heap_caps_calloc_base( size_t n, size_t size, uint32_t ca return result; } -IRAM_ATTR void *heap_caps_calloc( size_t n, size_t size, uint32_t caps) +HEAP_IRAM_ATTR void *heap_caps_calloc( size_t n, size_t size, uint32_t caps) { void* ptr = heap_caps_calloc_base(n, size, caps); @@ -655,7 +655,7 @@ size_t heap_caps_get_allocated_size( void *ptr ) return size; } -IRAM_ATTR void *heap_caps_aligned_alloc(size_t alignment, size_t size, uint32_t caps) +HEAP_IRAM_ATTR void *heap_caps_aligned_alloc(size_t alignment, size_t size, uint32_t caps) { void *ret = NULL; @@ -708,7 +708,7 @@ IRAM_ATTR void *heap_caps_aligned_alloc(size_t alignment, size_t size, uint32_t return NULL; } -IRAM_ATTR void heap_caps_aligned_free(void *ptr) +HEAP_IRAM_ATTR void heap_caps_aligned_free(void *ptr) { heap_caps_free(ptr); } diff --git a/components/heap/heap_trace_standalone.c b/components/heap/heap_trace_standalone.c index 7c03a9f93e..d45ff65a34 100644 --- a/components/heap/heap_trace_standalone.c +++ b/components/heap/heap_trace_standalone.c @@ -10,14 +10,13 @@ #define HEAP_TRACE_SRCFILE /* don't warn on inclusion here */ #include "esp_heap_trace.h" #undef HEAP_TRACE_SRCFILE - +#include "esp_heap_caps.h" #include "esp_attr.h" #include "freertos/FreeRTOS.h" #include "freertos/task.h" #include "esp_memory_utils.h" #include "sys/queue.h" - #define STACK_DEPTH CONFIG_HEAP_TRACING_STACK_DEPTH #if CONFIG_HEAP_TRACING_STANDALONE @@ -89,7 +88,7 @@ static heap_trace_hash_list_t hash_map[(size_t)CONFIG_HEAP_TRACE_HASH_MAP_SIZE]; static size_t total_hashmap_hits; static size_t total_hashmap_miss; -static size_t hash_idx(void* p) +static HEAP_IRAM_ATTR size_t hash_idx(void* p) { static const uint32_t fnv_prime = 16777619UL; // expression 2^24 + 2^8 + 0x93 (32 bits size) // since all the addresses are 4 bytes aligned, computing address * fnv_prime always gives @@ -100,19 +99,19 @@ static size_t hash_idx(void* p) ((uint32_t)p >> 7)) * fnv_prime) % (uint32_t)CONFIG_HEAP_TRACE_HASH_MAP_SIZE; } -static void map_add(heap_trace_record_t *r_add) +static HEAP_IRAM_ATTR void map_add(heap_trace_record_t *r_add) { size_t idx = hash_idx(r_add->address); TAILQ_INSERT_TAIL(&hash_map[idx], r_add, tailq_hashmap); } -static void map_remove(heap_trace_record_t *r_remove) +static HEAP_IRAM_ATTR void map_remove(heap_trace_record_t *r_remove) { size_t idx = hash_idx(r_remove->address); TAILQ_REMOVE(&hash_map[idx], r_remove, tailq_hashmap); } -static heap_trace_record_t* map_find(void *p) +static HEAP_IRAM_ATTR heap_trace_record_t* map_find(void *p) { size_t idx = hash_idx(p); heap_trace_record_t *r_cur = NULL; @@ -385,7 +384,7 @@ static void heap_trace_dump_base(bool internal_ram, bool psram) } /* Add a new allocation to the heap trace records */ -static IRAM_ATTR void record_allocation(const heap_trace_record_t *r_allocation) +static HEAP_IRAM_ATTR void record_allocation(const heap_trace_record_t *r_allocation) { if (!tracing || r_allocation->address == NULL) { return; @@ -420,7 +419,7 @@ static IRAM_ATTR void record_allocation(const heap_trace_record_t *r_allocation) callers is an array of STACK_DEPTH function pointer from the call stack leading to the call of record_free. */ -static IRAM_ATTR void record_free(void *p, void **callers) +static HEAP_IRAM_ATTR void record_free(void *p, void **callers) { if (!tracing || p == NULL) { return; @@ -473,7 +472,7 @@ static void list_setup(void) /* 1. removes record r_remove from records.list, 2. places it into records.unused */ -static IRAM_ATTR void list_remove(heap_trace_record_t *r_remove) +static HEAP_IRAM_ATTR void list_remove(heap_trace_record_t* r_remove) { assert(records.count > 0); @@ -497,7 +496,7 @@ static IRAM_ATTR void list_remove(heap_trace_record_t *r_remove) // pop record from unused list -static IRAM_ATTR heap_trace_record_t* list_pop_unused(void) +static HEAP_IRAM_ATTR heap_trace_record_t* list_pop_unused(void) { // no records left? if (records.count >= records.capacity) { @@ -517,7 +516,7 @@ static IRAM_ATTR heap_trace_record_t* list_pop_unused(void) // deep copy a record. // Note: only copies the *allocation data*, not the next & prev ptrs -static IRAM_ATTR void record_deep_copy(heap_trace_record_t *r_dest, const heap_trace_record_t *r_src) +static HEAP_IRAM_ATTR void record_deep_copy(heap_trace_record_t *r_dest, const heap_trace_record_t *r_src) { r_dest->ccount = r_src->ccount; r_dest->address = r_src->address; @@ -528,7 +527,7 @@ static IRAM_ATTR void record_deep_copy(heap_trace_record_t *r_dest, const heap_t // Append a record to records.list // Note: This deep copies r_append -static IRAM_ATTR heap_trace_record_t* list_add(const heap_trace_record_t *r_append) +static HEAP_IRAM_ATTR heap_trace_record_t* list_add(const heap_trace_record_t *r_append) { if (records.count < records.capacity) { @@ -566,7 +565,7 @@ static IRAM_ATTR heap_trace_record_t* list_add(const heap_trace_record_t *r_appe } // search records.list backwards for the allocation record matching this address -static IRAM_ATTR heap_trace_record_t* list_find_address_reverse(void *p) +static HEAP_IRAM_ATTR heap_trace_record_t* list_find_address_reverse(void* p) { heap_trace_record_t *r_found = NULL; diff --git a/components/heap/include/esp_heap_caps.h b/components/heap/include/esp_heap_caps.h index f3d1026c8b..f5b33a7c82 100644 --- a/components/heap/include/esp_heap_caps.h +++ b/components/heap/include/esp_heap_caps.h @@ -17,6 +17,12 @@ extern "C" { #endif +#if CONFIG_HEAP_PLACE_FUNCTION_INTO_FLASH +#define HEAP_IRAM_ATTR +#else +#define HEAP_IRAM_ATTR IRAM_ATTR +#endif + /** * @brief Flags to indicate the capabilities of the various memory systems */ @@ -63,7 +69,7 @@ esp_err_t heap_caps_register_failed_alloc_callback(esp_alloc_failed_hook_t callb * @note this hook is called on the same thread as the allocation, which may be within a low level operation. * You should refrain from doing heavy work, logging, flash writes, or any locking. */ -__attribute__((weak)) IRAM_ATTR void esp_heap_trace_alloc_hook(void* ptr, size_t size, uint32_t caps); +__attribute__((weak)) HEAP_IRAM_ATTR void esp_heap_trace_alloc_hook(void* ptr, size_t size, uint32_t caps); /** * @brief callback called after every free @@ -71,7 +77,7 @@ __attribute__((weak)) IRAM_ATTR void esp_heap_trace_alloc_hook(void* ptr, size_t * @note this hook is called on the same thread as the allocation, which may be within a low level operation. * You should refrain from doing heavy work, logging, flash writes, or any locking. */ -__attribute__((weak)) IRAM_ATTR void esp_heap_trace_free_hook(void* ptr); +__attribute__((weak)) HEAP_IRAM_ATTR void esp_heap_trace_free_hook(void* ptr); #endif /** diff --git a/components/heap/include/heap_trace.inc b/components/heap/include/heap_trace.inc index d04b566f4b..fd0d56520f 100644 --- a/components/heap/include/heap_trace.inc +++ b/components/heap/include/heap_trace.inc @@ -57,7 +57,7 @@ inline static uint32_t get_ccount(void) Calls to __builtin_return_address are "unrolled" via TEST_STACK macro as gcc requires the argument to be a compile-time constant. */ -static IRAM_ATTR __attribute__((noinline)) void get_call_stack(void **callers) +static HEAP_IRAM_ATTR __attribute__((noinline)) void get_call_stack(void **callers) { bzero(callers, sizeof(void *) * STACK_DEPTH); TEST_STACK(0); @@ -86,7 +86,7 @@ void *__real_heap_caps_malloc_default( size_t size ); void *__real_heap_caps_realloc_default( void *ptr, size_t size ); /* trace any 'malloc' event */ -static IRAM_ATTR __attribute__((noinline)) void *trace_malloc(size_t size, uint32_t caps, trace_malloc_mode_t mode) +static HEAP_IRAM_ATTR __attribute__((noinline)) void *trace_malloc(size_t size, uint32_t caps, trace_malloc_mode_t mode) { uint32_t ccount = get_ccount(); void *p; @@ -110,7 +110,7 @@ static IRAM_ATTR __attribute__((noinline)) void *trace_malloc(size_t size, uint3 void __real_heap_caps_free(void *p); /* trace any 'free' event */ -static IRAM_ATTR __attribute__((noinline)) void trace_free(void *p) +static HEAP_IRAM_ATTR __attribute__((noinline)) void trace_free(void *p) { void *callers[STACK_DEPTH]; get_call_stack(callers); @@ -122,7 +122,7 @@ static IRAM_ATTR __attribute__((noinline)) void trace_free(void *p) void * __real_heap_caps_realloc(void *p, size_t size, uint32_t caps); /* trace any 'realloc' event */ -static IRAM_ATTR __attribute__((noinline)) void *trace_realloc(void *p, size_t size, uint32_t caps, trace_malloc_mode_t mode) +static HEAP_IRAM_ATTR __attribute__((noinline)) void *trace_realloc(void *p, size_t size, uint32_t caps, trace_malloc_mode_t mode) { void *callers[STACK_DEPTH]; uint32_t ccount = get_ccount(); @@ -154,22 +154,22 @@ static IRAM_ATTR __attribute__((noinline)) void *trace_realloc(void *p, size_t s as they no longer go via the libc functions in ROM. But more or less the same in the end. */ -IRAM_ATTR void *__wrap_malloc(size_t size) +HEAP_IRAM_ATTR void *__wrap_malloc(size_t size) { return trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); } -IRAM_ATTR void __wrap_free(void *p) +HEAP_IRAM_ATTR void __wrap_free(void *p) { trace_free(p); } -IRAM_ATTR void *__wrap_realloc(void *p, size_t size) +HEAP_IRAM_ATTR void *__wrap_realloc(void *p, size_t size) { return trace_realloc(p, size, 0, TRACE_MALLOC_DEFAULT); } -IRAM_ATTR void *__wrap_calloc(size_t nmemb, size_t size) +HEAP_IRAM_ATTR void *__wrap_calloc(size_t nmemb, size_t size) { size = size * nmemb; void *result = trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); @@ -179,24 +179,24 @@ IRAM_ATTR void *__wrap_calloc(size_t nmemb, size_t size) return result; } -IRAM_ATTR void *__wrap_heap_caps_malloc(size_t size, uint32_t caps) +HEAP_IRAM_ATTR void *__wrap_heap_caps_malloc(size_t size, uint32_t caps) { return trace_malloc(size, caps, TRACE_MALLOC_CAPS); } void __wrap_heap_caps_free(void *p) __attribute__((alias("__wrap_free"))); -IRAM_ATTR void *__wrap_heap_caps_realloc(void *p, size_t size, uint32_t caps) +HEAP_IRAM_ATTR void *__wrap_heap_caps_realloc(void *p, size_t size, uint32_t caps) { return trace_realloc(p, size, caps, TRACE_MALLOC_CAPS); } -IRAM_ATTR void *__wrap_heap_caps_malloc_default( size_t size ) +HEAP_IRAM_ATTR void *__wrap_heap_caps_malloc_default( size_t size ) { return trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); } -IRAM_ATTR void *__wrap_heap_caps_realloc_default( void *ptr, size_t size ) +HEAP_IRAM_ATTR void *__wrap_heap_caps_realloc_default( void *ptr, size_t size ) { return trace_realloc(ptr, size, 0, TRACE_MALLOC_DEFAULT); } diff --git a/components/heap/linker.lf b/components/heap/linker.lf index 9f04259780..7d30fc20c7 100644 --- a/components/heap/linker.lf +++ b/components/heap/linker.lf @@ -1,54 +1,55 @@ [mapping:heap] archive: libheap.a entries: - if HEAP_TLSF_USE_ROM_IMPL = n: - tlsf:tlsf_block_size (noflash) - tlsf:tlsf_size (noflash) - tlsf:tlsf_align_size (noflash) - tlsf:tlsf_block_size_min (noflash) - tlsf:tlsf_block_size_max (noflash) - tlsf:tlsf_alloc_overhead (noflash) - tlsf:tlsf_get_pool (noflash) - tlsf:tlsf_malloc (noflash) - tlsf:tlsf_memalign_offs (noflash) - tlsf:tlsf_memalign (noflash) - tlsf:tlsf_free (noflash) - tlsf:tlsf_realloc (noflash) + if HEAP_PLACE_FUNCTION_INTO_FLASH = n: + if HEAP_TLSF_USE_ROM_IMPL = n: + tlsf:tlsf_block_size (noflash) + tlsf:tlsf_size (noflash) + tlsf:tlsf_align_size (noflash) + tlsf:tlsf_block_size_min (noflash) + tlsf:tlsf_block_size_max (noflash) + tlsf:tlsf_alloc_overhead (noflash) + tlsf:tlsf_get_pool (noflash) + tlsf:tlsf_malloc (noflash) + tlsf:tlsf_memalign_offs (noflash) + tlsf:tlsf_memalign (noflash) + tlsf:tlsf_free (noflash) + tlsf:tlsf_realloc (noflash) - multi_heap:multi_heap_get_block_address_impl (noflash) - multi_heap:multi_heap_get_allocated_size_impl (noflash) - multi_heap:multi_heap_set_lock (noflash) - multi_heap:multi_heap_get_first_block (noflash) - multi_heap:multi_heap_get_next_block (noflash) - multi_heap:multi_heap_is_free (noflash) - multi_heap:multi_heap_malloc_impl (noflash) - multi_heap:multi_heap_free_impl (noflash) - multi_heap:multi_heap_realloc_impl (noflash) - multi_heap:multi_heap_aligned_alloc_impl_offs (noflash) - multi_heap:multi_heap_aligned_alloc_impl (noflash) - multi_heap:multi_heap_internal_lock (noflash) - multi_heap:multi_heap_internal_unlock (noflash) - multi_heap:assert_valid_block (noflash) + multi_heap:multi_heap_get_block_address_impl (noflash) + multi_heap:multi_heap_get_allocated_size_impl (noflash) + multi_heap:multi_heap_set_lock (noflash) + multi_heap:multi_heap_get_first_block (noflash) + multi_heap:multi_heap_get_next_block (noflash) + multi_heap:multi_heap_is_free (noflash) + multi_heap:multi_heap_malloc_impl (noflash) + multi_heap:multi_heap_free_impl (noflash) + multi_heap:multi_heap_realloc_impl (noflash) + multi_heap:multi_heap_aligned_alloc_impl_offs (noflash) + multi_heap:multi_heap_aligned_alloc_impl (noflash) + multi_heap:multi_heap_internal_lock (noflash) + multi_heap:multi_heap_internal_unlock (noflash) + multi_heap:assert_valid_block (noflash) - if HEAP_TLSF_USE_ROM_IMPL = y: - multi_heap:_multi_heap_lock (noflash) - multi_heap:_multi_heap_unlock (noflash) - multi_heap:multi_heap_in_rom_init (noflash) + if HEAP_TLSF_USE_ROM_IMPL = y: + multi_heap:_multi_heap_lock (noflash) + multi_heap:_multi_heap_unlock (noflash) + multi_heap:multi_heap_in_rom_init (noflash) - if HEAP_POISONING_DISABLED = n: - multi_heap_poisoning:poison_allocated_region (noflash) - multi_heap_poisoning:verify_allocated_region (noflash) - multi_heap_poisoning:multi_heap_aligned_alloc (noflash) - multi_heap_poisoning:multi_heap_malloc (noflash) - multi_heap_poisoning:multi_heap_free (noflash) - multi_heap_poisoning:multi_heap_aligned_free (noflash) - multi_heap_poisoning:multi_heap_realloc (noflash) - multi_heap_poisoning:multi_heap_get_block_address (noflash) - multi_heap_poisoning:multi_heap_get_block_owner (noflash) - multi_heap_poisoning:multi_heap_get_allocated_size (noflash) - multi_heap_poisoning:multi_heap_internal_check_block_poisoning (noflash) - multi_heap_poisoning:multi_heap_internal_poison_fill_region (noflash) + if HEAP_POISONING_DISABLED = n: + multi_heap_poisoning:poison_allocated_region (noflash) + multi_heap_poisoning:verify_allocated_region (noflash) + multi_heap_poisoning:multi_heap_aligned_alloc (noflash) + multi_heap_poisoning:multi_heap_malloc (noflash) + multi_heap_poisoning:multi_heap_free (noflash) + multi_heap_poisoning:multi_heap_aligned_free (noflash) + multi_heap_poisoning:multi_heap_realloc (noflash) + multi_heap_poisoning:multi_heap_get_block_address (noflash) + multi_heap_poisoning:multi_heap_get_block_owner (noflash) + multi_heap_poisoning:multi_heap_get_allocated_size (noflash) + multi_heap_poisoning:multi_heap_internal_check_block_poisoning (noflash) + multi_heap_poisoning:multi_heap_internal_poison_fill_region (noflash) - if HEAP_POISONING_COMPREHENSIVE = y: - multi_heap_poisoning:verify_fill_pattern (noflash) - multi_heap_poisoning:block_absorb_post_hook (noflash) + if HEAP_POISONING_COMPREHENSIVE = y: + multi_heap_poisoning:verify_fill_pattern (noflash) + multi_heap_poisoning:block_absorb_post_hook (noflash) diff --git a/components/heap/test_apps/heap_tests/main/test_malloc_caps.c b/components/heap/test_apps/heap_tests/main/test_malloc_caps.c index 44370beefc..255a4fa5b6 100644 --- a/components/heap/test_apps/heap_tests/main/test_malloc_caps.c +++ b/components/heap/test_apps/heap_tests/main/test_malloc_caps.c @@ -174,7 +174,7 @@ TEST_CASE("heap_caps metadata test", "[heap]") /* Small function runs from IRAM to check that malloc/free/realloc all work OK when cache is disabled... */ -#ifndef CONFIG_ESP_SYSTEM_MEMPROT_FEATURE +#if !CONFIG_ESP_SYSTEM_MEMPROT_FEATURE && !CONFIG_HEAP_PLACE_FUNCTION_INTO_FLASH static IRAM_ATTR __attribute__((noinline)) bool iram_malloc_test(void) { spi_flash_guard_get()->start(); // Disables flash cache diff --git a/components/heap/test_apps/heap_tests/pytest_heap.py b/components/heap/test_apps/heap_tests/pytest_heap.py index 56a495021e..ac9edac0a1 100644 --- a/components/heap/test_apps/heap_tests/pytest_heap.py +++ b/components/heap/test_apps/heap_tests/pytest_heap.py @@ -20,6 +20,19 @@ def test_heap_poisoning(dut: Dut) -> None: dut.run_all_single_board_cases() +@pytest.mark.generic +@pytest.mark.esp32 +@pytest.mark.esp32c6 +@pytest.mark.parametrize( + 'config', + [ + 'in_flash' + ] +) +def test_heap_in_flash(dut: Dut) -> None: + dut.run_all_single_board_cases() + + @pytest.mark.generic @pytest.mark.esp32 @pytest.mark.esp32s2 diff --git a/components/heap/test_apps/heap_tests/sdkconfig.ci.in_flash b/components/heap/test_apps/heap_tests/sdkconfig.ci.in_flash new file mode 100644 index 0000000000..5e673806a5 --- /dev/null +++ b/components/heap/test_apps/heap_tests/sdkconfig.ci.in_flash @@ -0,0 +1,2 @@ +CONFIG_HEAP_TLSF_USE_ROM_IMPL=n +CONFIG_HEAP_PLACE_FUNCTION_INTO_FLASH=y diff --git a/docs/en/api-guides/performance/ram-usage.rst b/docs/en/api-guides/performance/ram-usage.rst index ff08a79075..7e6aa502ce 100644 --- a/docs/en/api-guides/performance/ram-usage.rst +++ b/docs/en/api-guides/performance/ram-usage.rst @@ -141,6 +141,7 @@ The following options will reduce IRAM usage of some ESP-IDF features: - Disabling :ref:`CONFIG_SPI_MASTER_ISR_IN_IRAM` prevents spi_master interrupts from being serviced while writing to flash, and may otherwise reduce spi_master performance, but will save some IRAM. - Setting :ref:`CONFIG_HAL_DEFAULT_ASSERTION_LEVEL` to disable assertion for HAL component will save some IRAM especially for HAL code who calls `HAL_ASSERT` a lot and resides in IRAM. - Refer to sdkconfig menu ``Auto-detect flash chips`` and you can disable flash drivers which you don't need to save some IRAM. + - Enable :ref:`CONFIG_HEAP_PLACE_FUNCTION_INTO_FLASH`. Provided that :ref:`CONFIG_SPI_MASTER_ISR_IN_IRAM` is not enabled and the heap functions are not (incorrectly) used from ISRs, this option is safe to enable in all configuration. .. only:: esp32c3 diff --git a/docs/en/api-guides/performance/size.rst b/docs/en/api-guides/performance/size.rst index 96daff94f0..d4fb6936bc 100644 --- a/docs/en/api-guides/performance/size.rst +++ b/docs/en/api-guides/performance/size.rst @@ -452,13 +452,11 @@ VFS :CONFIG_ESP_ROM_HAS_HAL_SYSTIMER: * Enabling :ref:`CONFIG_HAL_SYSTIMER_USE_ROM_IMPL` can reduce the IRAM usage and binary size by linking in the systimer HAL driver of ROM implementation. :CONFIG_ESP_ROM_HAS_HAL_WDT: * Enabling :ref:`CONFIG_HAL_WDT_USE_ROM_IMPL` can reduce the IRAM usage and binary size by linking in the watchdog HAL driver of ROM implementation. -.. only:: CONFIG_ESP_ROM_HAS_HEAP_TLSF - Heap @@@@ .. list:: - + * Enabling :ref:`CONFIG_HEAP_PLACE_FUNCTION_INTO_FLASH` can reduce the IRAM usage and binary size by placing the entirety of the heap functionalities in flash memory. :CONFIG_ESP_ROM_HAS_HEAP_TLSF: * Enabling :ref:`CONFIG_HEAP_TLSF_USE_ROM_IMPL` can reduce the IRAM usage and binary size by linking in the TLSF library of ROM implementation. Bootloader Size