2023-02-14 23:29:34 -05:00
/*
2024-03-22 00:24:03 -04:00
* SPDX - FileCopyrightText : 2023 - 2024 Espressif Systems ( Shanghai ) CO LTD
2023-02-14 23:29:34 -05:00
*
* SPDX - License - Identifier : Apache - 2.0
*/
# include <sys/param.h>
# include <inttypes.h>
2023-09-08 03:20:22 -04:00
# include <string.h>
2023-02-14 23:29:34 -05:00
# include "sdkconfig.h"
# include "esp_check.h"
# include "esp_log.h"
2023-09-08 03:20:22 -04:00
# include "esp_heap_caps.h"
2023-07-13 03:09:12 -04:00
# include "esp_rom_caps.h"
2023-02-14 23:29:34 -05:00
# include "soc/soc_caps.h"
# include "hal/mmu_hal.h"
# include "hal/cache_hal.h"
2023-09-08 03:20:22 -04:00
# include "hal/cache_ll.h"
2023-02-14 23:29:34 -05:00
# include "esp_cache.h"
2023-09-08 03:20:22 -04:00
# include "esp_private/esp_cache_private.h"
2023-02-14 23:29:34 -05:00
# include "esp_private/critical_section.h"
static const char * TAG = " cache " ;
2023-09-08 03:20:22 -04:00
# define ALIGN_UP_BY(num, align) (((num) + ((align) - 1)) & ~((align) - 1))
2023-02-14 23:29:34 -05:00
DEFINE_CRIT_SECTION_LOCK_STATIC ( s_spinlock ) ;
esp_err_t esp_cache_msync ( void * addr , size_t size , int flags )
{
ESP_RETURN_ON_FALSE_ISR ( addr , ESP_ERR_INVALID_ARG , TAG , " null pointer " ) ;
2023-09-14 00:14:08 -04:00
uint32_t addr_end = 0 ;
bool ovf = __builtin_add_overflow ( ( uint32_t ) addr , size , & addr_end ) ;
2024-03-07 10:43:45 -05:00
ESP_EARLY_LOGV ( TAG , " addr_end: 0x% " PRIx32 , addr_end ) ;
2023-09-14 00:14:08 -04:00
ESP_RETURN_ON_FALSE_ISR ( ! ovf , ESP_ERR_INVALID_ARG , TAG , " wrong size, total size overflow " ) ;
2023-07-05 07:55:32 -04:00
bool both_dir = ( flags & ESP_CACHE_MSYNC_FLAG_DIR_C2M ) & & ( flags & ESP_CACHE_MSYNC_FLAG_DIR_M2C ) ;
2023-09-14 07:55:24 -04:00
bool both_type = ( flags & ESP_CACHE_MSYNC_FLAG_TYPE_DATA ) & & ( flags & ESP_CACHE_MSYNC_FLAG_TYPE_INST ) ;
ESP_RETURN_ON_FALSE_ISR ( ! both_dir & & ! both_type , ESP_ERR_INVALID_ARG , TAG , " both C2M and M2C directions, or both data and instruction type are selected, you should only select one direction or one type " ) ;
2023-09-14 00:14:08 -04:00
uint32_t vaddr = ( uint32_t ) addr ;
bool valid = false ;
uint32_t cache_level = 0 ;
uint32_t cache_id = 0 ;
valid = cache_hal_vaddr_to_cache_level_id ( vaddr , size , & cache_level , & cache_id ) ;
ESP_RETURN_ON_FALSE_ISR ( valid , ESP_ERR_INVALID_ARG , TAG , " invalid addr or null pointer " ) ;
2023-09-14 07:55:24 -04:00
cache_type_t cache_type = CACHE_TYPE_DATA ;
if ( flags & ESP_CACHE_MSYNC_FLAG_TYPE_INST ) {
cache_type = CACHE_TYPE_INSTRUCTION ;
}
2023-09-15 08:11:52 -04:00
uint32_t cache_line_size = cache_hal_get_cache_line_size ( cache_level , cache_type ) ;
2023-07-20 00:20:42 -04:00
if ( ( flags & ESP_CACHE_MSYNC_FLAG_UNALIGNED ) = = 0 ) {
2023-09-14 07:55:24 -04:00
bool aligned_addr = ( ( ( uint32_t ) addr % cache_line_size ) = = 0 ) & & ( ( size % cache_line_size ) = = 0 ) ;
2024-03-07 10:43:45 -05:00
ESP_RETURN_ON_FALSE_ISR ( aligned_addr , ESP_ERR_INVALID_ARG , TAG , " start address: 0x% " PRIx32 " , or the size: 0x% " PRIx32 " is(are) not aligned with cache line size (0x% " PRIx32 " )B " , ( uint32_t ) addr , ( uint32_t ) size , cache_line_size ) ;
2023-07-20 00:20:42 -04:00
}
2023-07-05 07:55:32 -04:00
if ( flags & ESP_CACHE_MSYNC_FLAG_DIR_M2C ) {
2023-09-14 00:14:08 -04:00
ESP_EARLY_LOGV ( TAG , " M2C DIR " ) ;
2023-07-05 07:55:32 -04:00
2024-03-15 03:45:29 -04:00
if ( flags & ESP_CACHE_MSYNC_FLAG_UNALIGNED ) {
ESP_RETURN_ON_FALSE_ISR ( false , ESP_ERR_INVALID_ARG , TAG , " M2C direction doesn't allow ESP_CACHE_MSYNC_FLAG_UNALIGNED " ) ;
}
2023-07-05 07:55:32 -04:00
esp_os_enter_critical_safe ( & s_spinlock ) ;
//Add preload feature / flag here, IDF-7800
2023-09-14 00:14:08 -04:00
valid = cache_hal_invalidate_addr ( vaddr , size ) ;
2023-07-05 07:55:32 -04:00
esp_os_exit_critical_safe ( & s_spinlock ) ;
2023-09-14 00:14:08 -04:00
assert ( valid ) ;
2023-07-05 07:55:32 -04:00
} else {
2023-09-14 00:14:08 -04:00
ESP_EARLY_LOGV ( TAG , " C2M DIR " ) ;
2023-09-14 07:55:24 -04:00
if ( flags & ESP_CACHE_MSYNC_FLAG_TYPE_INST ) {
ESP_RETURN_ON_FALSE_ISR ( false , ESP_ERR_INVALID_ARG , TAG , " C2M direction doesn't support instruction type " ) ;
}
2023-02-14 23:29:34 -05:00
# if SOC_CACHE_WRITEBACK_SUPPORTED
2023-07-05 07:55:32 -04:00
esp_os_enter_critical_safe ( & s_spinlock ) ;
2023-09-14 00:14:08 -04:00
valid = cache_hal_writeback_addr ( vaddr , size ) ;
2023-07-13 03:09:12 -04:00
esp_os_exit_critical_safe ( & s_spinlock ) ;
2023-09-14 00:14:08 -04:00
assert ( valid ) ;
2023-07-13 03:09:12 -04:00
2023-07-05 07:55:32 -04:00
if ( flags & ESP_CACHE_MSYNC_FLAG_INVALIDATE ) {
2023-07-13 03:09:12 -04:00
esp_os_enter_critical_safe ( & s_spinlock ) ;
2023-09-14 00:14:08 -04:00
valid & = cache_hal_invalidate_addr ( vaddr , size ) ;
2023-07-13 03:09:12 -04:00
esp_os_exit_critical_safe ( & s_spinlock ) ;
2023-07-05 07:55:32 -04:00
}
2023-09-14 00:14:08 -04:00
assert ( valid ) ;
2023-02-14 23:29:34 -05:00
# endif
2023-07-05 07:55:32 -04:00
}
2023-02-14 23:29:34 -05:00
return ESP_OK ;
}
2023-09-08 03:20:22 -04:00
2024-04-23 00:59:39 -04:00
//The esp_cache_aligned_malloc function is marked deprecated but also called by other
//(also deprecated) functions in this file. In order to work around that generating warnings, it's
//split into a non-deprecated internal function and the stubbed external deprecated function.
static esp_err_t esp_cache_aligned_malloc_internal ( size_t size , uint32_t heap_caps , void * * out_ptr , size_t * actual_size )
2023-09-08 03:20:22 -04:00
{
ESP_RETURN_ON_FALSE_ISR ( out_ptr , ESP_ERR_INVALID_ARG , TAG , " null pointer " ) ;
2024-04-07 22:59:13 -04:00
uint32_t valid_caps = MALLOC_CAP_SPIRAM | MALLOC_CAP_INTERNAL | MALLOC_CAP_DMA ;
ESP_RETURN_ON_FALSE_ISR ( ( heap_caps & valid_caps ) > 0 , ESP_ERR_INVALID_ARG , TAG , " not supported cap matches " ) ;
2023-09-08 03:20:22 -04:00
uint32_t cache_level = CACHE_LL_LEVEL_INT_MEM ;
uint32_t data_cache_line_size = 0 ;
void * ptr = NULL ;
2024-04-07 22:59:13 -04:00
if ( heap_caps & MALLOC_CAP_SPIRAM ) {
2023-09-08 03:20:22 -04:00
cache_level = CACHE_LL_LEVEL_EXT_MEM ;
}
2023-09-15 08:11:52 -04:00
data_cache_line_size = cache_hal_get_cache_line_size ( cache_level , CACHE_TYPE_DATA ) ;
2023-10-08 04:26:37 -04:00
if ( data_cache_line_size = = 0 ) {
//default alignment
2023-09-08 03:20:22 -04:00
data_cache_line_size = 4 ;
}
size = ALIGN_UP_BY ( size , data_cache_line_size ) ;
2024-04-07 22:59:13 -04:00
ptr = heap_caps_aligned_alloc ( data_cache_line_size , size , ( uint32_t ) heap_caps ) ;
2024-04-07 21:36:01 -04:00
if ( ! ptr ) {
return ESP_ERR_NO_MEM ;
}
2023-09-08 03:20:22 -04:00
* out_ptr = ptr ;
if ( actual_size ) {
* actual_size = size ;
}
return ESP_OK ;
}
2024-04-23 00:59:39 -04:00
//this is the deprecated stub for the above function
esp_err_t esp_cache_aligned_malloc ( size_t size , uint32_t heap_caps , void * * out_ptr , size_t * actual_size )
{
return esp_cache_aligned_malloc_internal ( size , heap_caps , out_ptr , actual_size ) ;
}
2024-03-22 00:24:03 -04:00
esp_err_t esp_cache_aligned_malloc_prefer ( size_t size , void * * out_ptr , size_t * actual_size , size_t flag_nums , . . . )
{
ESP_RETURN_ON_FALSE_ISR ( out_ptr , ESP_ERR_INVALID_ARG , TAG , " null pointer " ) ;
esp_err_t ret = ESP_FAIL ;
va_list argp ;
uint32_t flags = 0 ;
va_start ( argp , flag_nums ) ;
* out_ptr = NULL ;
while ( flag_nums - - ) {
2024-06-06 04:17:27 -04:00
flags = va_arg ( argp , int ) ;
2024-04-23 00:59:39 -04:00
ret = esp_cache_aligned_malloc_internal ( size , flags , out_ptr , actual_size ) ;
2024-03-22 00:24:03 -04:00
if ( ret = = ESP_OK ) {
break ;
}
}
va_end ( argp ) ;
return ret ;
}
2024-04-07 22:59:13 -04:00
esp_err_t esp_cache_aligned_calloc ( size_t n , size_t size , uint32_t heap_caps , void * * out_ptr , size_t * actual_size )
2023-09-08 03:20:22 -04:00
{
ESP_RETURN_ON_FALSE_ISR ( out_ptr , ESP_ERR_INVALID_ARG , TAG , " null pointer " ) ;
esp_err_t ret = ESP_FAIL ;
size_t size_bytes = 0 ;
bool ovf = false ;
ovf = __builtin_mul_overflow ( n , size , & size_bytes ) ;
ESP_RETURN_ON_FALSE_ISR ( ! ovf , ESP_ERR_INVALID_ARG , TAG , " wrong size, total size overflow " ) ;
void * ptr = NULL ;
2024-04-23 00:59:39 -04:00
ret = esp_cache_aligned_malloc_internal ( size_bytes , heap_caps , & ptr , actual_size ) ;
2023-09-08 03:20:22 -04:00
if ( ret = = ESP_OK ) {
memset ( ptr , 0 , size_bytes ) ;
* out_ptr = ptr ;
}
return ret ;
}
2023-10-08 04:26:37 -04:00
2024-03-22 00:24:03 -04:00
esp_err_t esp_cache_aligned_calloc_prefer ( size_t n , size_t size , void * * out_ptr , size_t * actual_size , size_t flag_nums , . . . )
{
ESP_RETURN_ON_FALSE_ISR ( out_ptr , ESP_ERR_INVALID_ARG , TAG , " null pointer " ) ;
esp_err_t ret = ESP_FAIL ;
size_t size_bytes = 0 ;
bool ovf = false ;
* out_ptr = NULL ;
ovf = __builtin_mul_overflow ( n , size , & size_bytes ) ;
ESP_RETURN_ON_FALSE_ISR ( ! ovf , ESP_ERR_INVALID_ARG , TAG , " wrong size, total size overflow " ) ;
void * ptr = NULL ;
va_list argp ;
va_start ( argp , flag_nums ) ;
int arg ;
for ( int i = 0 ; i < flag_nums ; i + + ) {
arg = va_arg ( argp , int ) ;
2024-04-23 00:59:39 -04:00
ret = esp_cache_aligned_malloc_internal ( size_bytes , arg , & ptr , actual_size ) ;
2024-03-22 00:24:03 -04:00
if ( ret = = ESP_OK ) {
memset ( ptr , 0 , size_bytes ) ;
* out_ptr = ptr ;
break ;
}
}
va_end ( argp ) ;
return ret ;
}
2024-04-07 22:59:13 -04:00
esp_err_t esp_cache_get_alignment ( uint32_t heap_caps , size_t * out_alignment )
2023-10-08 04:26:37 -04:00
{
ESP_RETURN_ON_FALSE ( out_alignment , ESP_ERR_INVALID_ARG , TAG , " null pointer " ) ;
uint32_t cache_level = CACHE_LL_LEVEL_INT_MEM ;
uint32_t data_cache_line_size = 0 ;
2024-04-07 22:59:13 -04:00
if ( heap_caps & MALLOC_CAP_SPIRAM ) {
2023-10-08 04:26:37 -04:00
cache_level = CACHE_LL_LEVEL_EXT_MEM ;
}
data_cache_line_size = cache_hal_get_cache_line_size ( cache_level , CACHE_TYPE_DATA ) ;
* out_alignment = data_cache_line_size ;
return ESP_OK ;
}