mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
feat(cache): support cache driver on esp32p4
This commit is contained in:
parent
83aedb202f
commit
ea38a2e9a4
@ -12,10 +12,6 @@
|
||||
#include "soc/soc_caps.h"
|
||||
#include "hal/efuse_ll.h"
|
||||
#include "hal/efuse_hal.h"
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
//TODO: IDF-7516
|
||||
#include "esp32p4/rom/cache.h"
|
||||
#endif
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
# include "soc/spi_struct.h"
|
||||
@ -128,6 +124,7 @@ esp_err_t bootloader_flash_erase_range(uint32_t start_addr, uint32_t size)
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32S3
|
||||
#include "esp32s3/rom/opi_flash.h"
|
||||
@ -205,7 +202,7 @@ const void *bootloader_mmap(uint32_t src_paddr, uint32_t size)
|
||||
Cache_Read_Disable(0);
|
||||
Cache_Flush(0);
|
||||
#else
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
|
||||
//---------------Do mapping------------------------
|
||||
@ -238,15 +235,10 @@ const void *bootloader_mmap(uint32_t src_paddr, uint32_t size)
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
Cache_Read_Enable(0);
|
||||
#else
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
/**
|
||||
* TODO: IDF-7516
|
||||
* we need to invalidate l1 dcache to make each mmap clean
|
||||
* to that vaddr
|
||||
*/
|
||||
Cache_Invalidate_Addr(CACHE_MAP_L1_DCACHE, MMU_BLOCK0_VADDR, actual_mapped_len);
|
||||
#if SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE
|
||||
cache_ll_invalidate_addr(CACHE_LL_LEVEL_ALL, CACHE_TYPE_ALL, CACHE_LL_ID_ALL, MMU_BLOCK0_VADDR, actual_mapped_len);
|
||||
#endif
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
|
||||
mapped = true;
|
||||
@ -263,7 +255,7 @@ void bootloader_munmap(const void *mapping)
|
||||
Cache_Flush(0);
|
||||
mmu_init(0);
|
||||
#else
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
mmu_hal_unmap_all();
|
||||
#endif
|
||||
mapped = false;
|
||||
@ -291,7 +283,7 @@ static esp_err_t bootloader_flash_read_no_decrypt(size_t src_addr, void *dest, s
|
||||
Cache_Read_Disable(0);
|
||||
Cache_Flush(0);
|
||||
#else
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
|
||||
esp_rom_spiflash_result_t r = esp_rom_spiflash_read(src_addr, dest, size);
|
||||
@ -299,7 +291,7 @@ static esp_err_t bootloader_flash_read_no_decrypt(size_t src_addr, void *dest, s
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
Cache_Read_Enable(0);
|
||||
#else
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
|
||||
return spi_to_esp_err(r);
|
||||
@ -322,7 +314,7 @@ static esp_err_t bootloader_flash_read_allow_decrypt(size_t src_addr, void *dest
|
||||
Cache_Read_Disable(0);
|
||||
Cache_Flush(0);
|
||||
#else
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
|
||||
//---------------Do mapping------------------------
|
||||
@ -341,11 +333,10 @@ static esp_err_t bootloader_flash_read_allow_decrypt(size_t src_addr, void *dest
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
Cache_Read_Enable(0);
|
||||
#else
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
//TODO: IDF-7516
|
||||
Cache_Invalidate_Addr(CACHE_MAP_L1_DCACHE, FLASH_READ_VADDR, actual_mapped_len);
|
||||
#if SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE
|
||||
cache_ll_invalidate_addr(CACHE_LL_LEVEL_ALL, CACHE_TYPE_ALL, CACHE_LL_ID_ALL, MMU_BLOCK0_VADDR, actual_mapped_len);
|
||||
#endif
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
}
|
||||
map_ptr = (uint32_t *)(FLASH_READ_VADDR + (word_src - map_at));
|
||||
@ -468,9 +459,9 @@ void bootloader_flash_32bits_address_map_enable(esp_rom_spiflash_read_mode_t fla
|
||||
assert(false);
|
||||
break;
|
||||
}
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
esp_rom_opiflash_cache_mode_config(flash_mode, &cache_rd);
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2020-2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@ -23,8 +23,9 @@
|
||||
#include "bootloader_flash_priv.h"
|
||||
#include "bootloader_init.h"
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
#define FLASH_IO_MATRIX_DUMMY_40M 0
|
||||
#define FLASH_IO_MATRIX_DUMMY_80M 0
|
||||
@ -126,10 +127,10 @@ static void update_flash_config(const esp_image_header_t *bootloader_hdr)
|
||||
default:
|
||||
size = 2;
|
||||
}
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// Set flash chip size
|
||||
esp_rom_spiflash_config_param(rom_spiflash_legacy_data->chip.device_id, size * 0x100000, 0x10000, 0x1000, 0x100, 0xffff); // TODO: set mode
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
static void print_flash_info(const esp_image_header_t *bootloader_hdr)
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2020-2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@ -24,8 +24,9 @@
|
||||
#include "bootloader_flash_priv.h"
|
||||
#include "bootloader_init.h"
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
#define FLASH_IO_MATRIX_DUMMY_40M 0
|
||||
#define FLASH_IO_MATRIX_DUMMY_80M 0
|
||||
@ -137,10 +138,10 @@ static void update_flash_config(const esp_image_header_t *bootloader_hdr)
|
||||
default:
|
||||
size = 2;
|
||||
}
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// Set flash chip size
|
||||
esp_rom_spiflash_config_param(rom_spiflash_legacy_data->chip.device_id, size * 0x100000, 0x10000, 0x1000, 0x100, 0xffff); // TODO: set mode
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
static void print_flash_info(const esp_image_header_t *bootloader_hdr)
|
||||
|
@ -24,8 +24,9 @@
|
||||
#include "bootloader_flash_priv.h"
|
||||
#include "bootloader_init.h"
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
void bootloader_flash_update_id()
|
||||
{
|
||||
@ -102,10 +103,10 @@ static void update_flash_config(const esp_image_header_t *bootloader_hdr)
|
||||
default:
|
||||
size = 2;
|
||||
}
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// Set flash chip size
|
||||
esp_rom_spiflash_config_param(rom_spiflash_legacy_data->chip.device_id, size * 0x100000, 0x10000, 0x1000, 0x100, 0xffff); // TODO: set mode
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
static void print_flash_info(const esp_image_header_t *bootloader_hdr)
|
||||
|
@ -24,8 +24,9 @@
|
||||
#include "bootloader_flash_priv.h"
|
||||
#include "bootloader_init.h"
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
#include "soc/pcr_reg.h"
|
||||
|
||||
void bootloader_flash_update_id()
|
||||
@ -109,10 +110,10 @@ static void update_flash_config(const esp_image_header_t *bootloader_hdr)
|
||||
default:
|
||||
size = 2;
|
||||
}
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// Set flash chip size
|
||||
esp_rom_spiflash_config_param(rom_spiflash_legacy_data->chip.device_id, size * 0x100000, 0x10000, 0x1000, 0x100, 0xffff); // TODO: set mode
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
static void print_flash_info(const esp_image_header_t *bootloader_hdr)
|
||||
|
@ -18,8 +18,9 @@
|
||||
#include "bootloader_flash_priv.h"
|
||||
#include "bootloader_init.h"
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
void bootloader_flash_update_id()
|
||||
{
|
||||
@ -96,10 +97,10 @@ static void update_flash_config(const esp_image_header_t *bootloader_hdr)
|
||||
default:
|
||||
size = 2;
|
||||
}
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// Set flash chip size
|
||||
esp_rom_spiflash_config_param(rom_spiflash_legacy_data->chip.device_id, size * 0x100000, 0x10000, 0x1000, 0x100, 0xffff); // TODO: set mode
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
static void print_flash_info(const esp_image_header_t *bootloader_hdr)
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2019-2022 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2019-2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@ -23,7 +23,9 @@
|
||||
#include "bootloader_common.h"
|
||||
#include "bootloader_init.h"
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
#define FLASH_IO_MATRIX_DUMMY_40M 0
|
||||
#define FLASH_IO_MATRIX_DUMMY_80M 0
|
||||
@ -150,12 +152,12 @@ static void update_flash_config(const esp_image_header_t *bootloader_hdr)
|
||||
default:
|
||||
size = 2;
|
||||
}
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// Set flash chip size
|
||||
esp_rom_spiflash_config_param(g_rom_flashchip.device_id, size * 0x100000, 0x10000, 0x1000, 0x100, 0xffff);
|
||||
// TODO: set mode
|
||||
// TODO: set frequency
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
static void print_flash_info(const esp_image_header_t *bootloader_hdr)
|
||||
|
@ -24,7 +24,9 @@
|
||||
#include "bootloader_flash.h"
|
||||
#include "bootloader_init.h"
|
||||
#include "hal/mmu_hal.h"
|
||||
#include "hal/mmu_ll.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
#define FLASH_IO_MATRIX_DUMMY_40M 0
|
||||
#define FLASH_IO_MATRIX_DUMMY_80M 0
|
||||
@ -157,12 +159,12 @@ static void update_flash_config(const esp_image_header_t *bootloader_hdr)
|
||||
size = 2;
|
||||
}
|
||||
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// Set flash chip size
|
||||
esp_rom_spiflash_config_param(g_rom_flashchip.device_id, size * 0x100000, 0x10000, 0x1000, 0x100, 0xffff);
|
||||
// TODO: set mode
|
||||
// TODO: set frequency
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
static void print_flash_info(const esp_image_header_t *bootloader_hdr)
|
||||
|
@ -836,7 +836,7 @@ static void set_cache_and_start_app(
|
||||
Cache_Read_Disable(0);
|
||||
Cache_Flush(0);
|
||||
#else
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
//reset MMU table first
|
||||
mmu_hal_unmap_all();
|
||||
@ -896,7 +896,7 @@ static void set_cache_and_start_app(
|
||||
// Application will need to do Cache_Flush(1) and Cache_Read_Enable(1)
|
||||
Cache_Read_Enable(0);
|
||||
#else
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
|
||||
ESP_LOGD(TAG, "start: 0x%08"PRIx32, entry_addr);
|
||||
|
@ -21,6 +21,7 @@
|
||||
#include "esp_rom_sys.h"
|
||||
#include "bootloader_memory_utils.h"
|
||||
#include "soc/soc_caps.h"
|
||||
#include "hal/cache_ll.h"
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
#include "esp32/rom/secure_boot.h"
|
||||
#elif CONFIG_IDF_TARGET_ESP32S2
|
||||
@ -41,7 +42,6 @@
|
||||
#elif CONFIG_IDF_TARGET_ESP32P4
|
||||
#include "esp32p4/rom/rtc.h"
|
||||
#include "esp32p4/rom/secure_boot.h"
|
||||
#include "esp32p4/rom/cache.h"
|
||||
#endif
|
||||
|
||||
#define ALIGN_UP(num, align) (((num) + ((align) - 1)) & ~((align) - 1))
|
||||
@ -236,9 +236,8 @@ static esp_err_t image_load(esp_image_load_mode_t mode, const esp_partition_pos_
|
||||
}
|
||||
}
|
||||
}
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
//TODO: IDF-7516
|
||||
Cache_WriteBack_All(CACHE_MAP_L1_DCACHE);
|
||||
#if SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE
|
||||
cache_ll_writeback_all(CACHE_LL_LEVEL_INT_MEM, CACHE_TYPE_DATA, CACHE_LL_ID_ALL);
|
||||
#endif
|
||||
}
|
||||
#endif // BOOTLOADER_BUILD
|
||||
@ -675,10 +674,9 @@ static esp_err_t process_segment_data(intptr_t load_addr, uint32_t data_addr, ui
|
||||
MIN(SHA_CHUNK, data_len - i));
|
||||
}
|
||||
}
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
//TODO: IDF-7516
|
||||
#if SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE
|
||||
if (do_load && esp_ptr_in_diram_iram((uint32_t *)load_addr)) {
|
||||
Cache_WriteBack_All(CACHE_MAP_L1_DCACHE);
|
||||
cache_ll_writeback_all(CACHE_LL_LEVEL_INT_MEM, CACHE_TYPE_DATA, CACHE_LL_ID_ALL);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -45,6 +45,7 @@
|
||||
#include "esp_private/periph_ctrl.h"
|
||||
#include "gdma_priv.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
|
||||
static const char *TAG = "gdma";
|
||||
|
||||
@ -357,7 +358,7 @@ esp_err_t gdma_set_transfer_ability(gdma_channel_handle_t dma_chan, const gdma_t
|
||||
ESP_RETURN_ON_FALSE((sram_alignment & (sram_alignment - 1)) == 0, ESP_ERR_INVALID_ARG,
|
||||
TAG, "invalid sram alignment: %zu", sram_alignment);
|
||||
|
||||
uint32_t data_cache_line_size = cache_hal_get_cache_line_size(CACHE_TYPE_DATA);
|
||||
uint32_t data_cache_line_size = cache_hal_get_cache_line_size(CACHE_TYPE_DATA, CACHE_LL_LEVEL_EXT_MEM);
|
||||
if (psram_alignment == 0) {
|
||||
// fall back to use the same size of the psram data cache line size
|
||||
psram_alignment = data_cache_line_size;
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include "soc/soc.h"
|
||||
#include "hal/spi_flash_hal.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
#include "esp_private/mspi_timing_tuning.h"
|
||||
#include "mspi_timing_config.h"
|
||||
#include "mspi_timing_by_mspi_delay.h"
|
||||
@ -473,7 +474,7 @@ void mspi_timing_change_speed_mode_cache_safe(bool switch_down)
|
||||
* for preventing concurrent from MSPI to external memory
|
||||
*/
|
||||
#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
cache_hal_freeze(CACHE_TYPE_ALL);
|
||||
cache_hal_freeze(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif //#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
|
||||
if (switch_down) {
|
||||
@ -485,7 +486,7 @@ void mspi_timing_change_speed_mode_cache_safe(bool switch_down)
|
||||
}
|
||||
|
||||
#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
cache_hal_unfreeze(CACHE_TYPE_ALL);
|
||||
cache_hal_unfreeze(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif //#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
}
|
||||
|
||||
|
@ -43,6 +43,7 @@
|
||||
#include "regi2c_ctrl.h" //For `REGI2C_ANA_CALI_PD_WORKAROUND`, temp
|
||||
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
#include "hal/wdt_hal.h"
|
||||
#include "hal/uart_hal.h"
|
||||
#if SOC_TOUCH_SENSOR_SUPPORTED
|
||||
@ -403,7 +404,7 @@ static int s_cache_suspend_cnt = 0;
|
||||
static void IRAM_ATTR suspend_cache(void) {
|
||||
s_cache_suspend_cnt++;
|
||||
if (s_cache_suspend_cnt == 1) {
|
||||
cache_hal_suspend(CACHE_TYPE_ALL);
|
||||
cache_hal_suspend(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
}
|
||||
|
||||
@ -412,7 +413,7 @@ static void IRAM_ATTR resume_cache(void) {
|
||||
s_cache_suspend_cnt--;
|
||||
assert(s_cache_suspend_cnt >= 0 && DRAM_STR("cache resume doesn't match suspend ops"));
|
||||
if (s_cache_suspend_cnt == 0) {
|
||||
cache_hal_resume(CACHE_TYPE_ALL);
|
||||
cache_hal_resume(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,5 +11,5 @@ endif()
|
||||
# In order for the cases defined by `TEST_CASE` to be linked into the final elf,
|
||||
# the component can be registered as WHOLE_ARCHIVE
|
||||
idf_component_register(SRCS ${srcs}
|
||||
PRIV_REQUIRES unity
|
||||
PRIV_REQUIRES unity esp_mm
|
||||
WHOLE_ARCHIVE)
|
||||
|
@ -16,7 +16,7 @@
|
||||
#include "soc/soc_caps.h"
|
||||
#include "hal/gdma_ll.h"
|
||||
#include "hal/cache_ll.h"
|
||||
#include "rom/cache.h"
|
||||
#include "esp_cache.h"
|
||||
|
||||
TEST_CASE("GDMA channel allocation", "[GDMA]")
|
||||
{
|
||||
@ -187,7 +187,7 @@ static void test_gdma_m2m_mode(gdma_channel_handle_t tx_chan, gdma_channel_handl
|
||||
uint8_t *dst_data = dst_buf + 64;
|
||||
|
||||
// prepare the source data
|
||||
for (int i = 0; i < 100; i++) {
|
||||
for (int i = 0; i < 128; i++) {
|
||||
src_data[i] = i;
|
||||
}
|
||||
|
||||
@ -198,41 +198,41 @@ static void test_gdma_m2m_mode(gdma_channel_handle_t tx_chan, gdma_channel_handl
|
||||
dma_descriptor_align8_t *rx_descs_noncache = (dma_descriptor_align8_t *)(CACHE_LL_L2MEM_NON_CACHE_ADDR(rx_descs));
|
||||
|
||||
tx_descs_noncache[0].buffer = src_data;
|
||||
tx_descs_noncache[0].dw0.size = 50;
|
||||
tx_descs_noncache[0].dw0.length = 50;
|
||||
tx_descs_noncache[0].dw0.size = 64;
|
||||
tx_descs_noncache[0].dw0.length = 64;
|
||||
tx_descs_noncache[0].dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA;
|
||||
tx_descs_noncache[0].dw0.suc_eof = 0;
|
||||
tx_descs_noncache[0].next = &tx_descs[1]; // Note, the DMA doesn't recognize a non-cacheable address, here must be the cached address
|
||||
|
||||
tx_descs_noncache[1].buffer = src_data + 50;
|
||||
tx_descs_noncache[1].dw0.size = 50;
|
||||
tx_descs_noncache[1].dw0.length = 50;
|
||||
tx_descs_noncache[1].buffer = src_data + 64;
|
||||
tx_descs_noncache[1].dw0.size = 64;
|
||||
tx_descs_noncache[1].dw0.length = 64;
|
||||
tx_descs_noncache[1].dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA;
|
||||
tx_descs_noncache[1].dw0.suc_eof = 1;
|
||||
tx_descs_noncache[1].next = NULL;
|
||||
|
||||
rx_descs_noncache->buffer = dst_data;
|
||||
rx_descs_noncache->dw0.size = 100;
|
||||
rx_descs_noncache->dw0.size = 128;
|
||||
rx_descs_noncache->dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA;
|
||||
rx_descs_noncache->dw0.suc_eof = 1;
|
||||
rx_descs_noncache->next = NULL;
|
||||
#else
|
||||
tx_descs->buffer = src_data;
|
||||
tx_descs->dw0.size = 100;
|
||||
tx_descs->dw0.length = 100;
|
||||
tx_descs->dw0.size = 128;
|
||||
tx_descs->dw0.length = 128;
|
||||
tx_descs->dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA;
|
||||
tx_descs->dw0.suc_eof = 1;
|
||||
tx_descs->next = NULL;
|
||||
|
||||
rx_descs->buffer = dst_data;
|
||||
rx_descs->dw0.size = 100;
|
||||
rx_descs->dw0.size = 128;
|
||||
rx_descs->dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA;
|
||||
rx_descs->next = NULL;
|
||||
#endif
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
// do write-back for the source data because it's in the cache
|
||||
Cache_WriteBack_Addr(CACHE_MAP_L1_DCACHE, (uint32_t)src_data, 100);
|
||||
TEST_ESP_OK(esp_cache_msync((void *)src_data, 128, ESP_CACHE_MSYNC_FLAG_DIR_C2M));
|
||||
#endif
|
||||
|
||||
TEST_ESP_OK(gdma_start(rx_chan, (intptr_t)rx_descs));
|
||||
@ -242,14 +242,14 @@ static void test_gdma_m2m_mode(gdma_channel_handle_t tx_chan, gdma_channel_handl
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
// the destination data are not reflected to the cache, so do an invalidate to ask the cache load new data
|
||||
Cache_Invalidate_Addr(CACHE_MAP_L1_DCACHE, (uint32_t)dst_data, 100);
|
||||
TEST_ESP_OK(esp_cache_msync((void *)dst_data, 128, ESP_CACHE_MSYNC_FLAG_DIR_M2C));
|
||||
#endif
|
||||
|
||||
// check the DMA descriptor write-back feature
|
||||
TEST_ASSERT_EQUAL(DMA_DESCRIPTOR_BUFFER_OWNER_CPU, tx_descs[0].dw0.owner);
|
||||
TEST_ASSERT_EQUAL(DMA_DESCRIPTOR_BUFFER_OWNER_CPU, rx_descs[0].dw0.owner);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
for (int i = 0; i < 128; i++) {
|
||||
TEST_ASSERT_EQUAL(i, dst_data[i]);
|
||||
}
|
||||
free((void *)src_buf);
|
||||
|
@ -30,6 +30,7 @@
|
||||
#include "hal/dma_types.h"
|
||||
#include "hal/gpio_hal.h"
|
||||
#include "hal/cache_hal.h"
|
||||
#include "hal/cache_ll.h"
|
||||
#include "esp_private/gdma.h"
|
||||
#include "driver/gpio.h"
|
||||
#include "esp_private/periph_ctrl.h"
|
||||
@ -491,7 +492,7 @@ static esp_err_t panel_io_i80_tx_color(esp_lcd_panel_io_t *io, int lcd_cmd, cons
|
||||
trans_desc->user_ctx = i80_device->user_ctx;
|
||||
|
||||
if (esp_ptr_external_ram(color)) {
|
||||
uint32_t dcache_line_size = cache_hal_get_cache_line_size(CACHE_TYPE_DATA);
|
||||
uint32_t dcache_line_size = cache_hal_get_cache_line_size(CACHE_TYPE_DATA, CACHE_LL_LEVEL_EXT_MEM);
|
||||
// flush frame buffer from cache to the physical PSRAM
|
||||
// note the esp_cache_msync function will check the alignment of the address and size, make sure they're aligned to current cache line size
|
||||
esp_cache_msync((void *)ALIGN_DOWN((intptr_t)color, dcache_line_size), ALIGN_UP(color_size, dcache_line_size), 0);
|
||||
|
@ -23,38 +23,52 @@ DEFINE_CRIT_SECTION_LOCK_STATIC(s_spinlock);
|
||||
esp_err_t esp_cache_msync(void *addr, size_t size, int flags)
|
||||
{
|
||||
ESP_RETURN_ON_FALSE_ISR(addr, ESP_ERR_INVALID_ARG, TAG, "null pointer");
|
||||
ESP_RETURN_ON_FALSE_ISR(mmu_hal_check_valid_ext_vaddr_region(0, (uint32_t)addr, size, MMU_VADDR_DATA), ESP_ERR_INVALID_ARG, TAG, "invalid address");
|
||||
|
||||
uint32_t addr_end = 0;
|
||||
bool ovf = __builtin_add_overflow((uint32_t)addr, size, &addr_end);
|
||||
ESP_EARLY_LOGV(TAG, "addr_end: 0x%x\n", addr_end);
|
||||
ESP_RETURN_ON_FALSE_ISR(!ovf, ESP_ERR_INVALID_ARG, TAG, "wrong size, total size overflow");
|
||||
|
||||
bool both_dir = (flags & ESP_CACHE_MSYNC_FLAG_DIR_C2M) && (flags & ESP_CACHE_MSYNC_FLAG_DIR_M2C);
|
||||
ESP_RETURN_ON_FALSE_ISR(!both_dir, ESP_ERR_INVALID_ARG, TAG, "both C2M and M2C directions are selected, you should only select one");
|
||||
uint32_t data_cache_line_size = cache_hal_get_cache_line_size(CACHE_TYPE_DATA);
|
||||
if ((flags & ESP_CACHE_MSYNC_FLAG_UNALIGNED) == 0) {
|
||||
bool aligned_addr = (((uint32_t)addr % data_cache_line_size) == 0) && ((size % data_cache_line_size) == 0);
|
||||
ESP_RETURN_ON_FALSE_ISR(aligned_addr, ESP_ERR_INVALID_ARG, TAG, "start address, end address or the size is(are) not aligned with the data cache line size (%d)B", data_cache_line_size);
|
||||
}
|
||||
|
||||
uint32_t vaddr = (uint32_t)addr;
|
||||
bool valid = false;
|
||||
uint32_t cache_level = 0;
|
||||
uint32_t cache_id = 0;
|
||||
valid = cache_hal_vaddr_to_cache_level_id(vaddr, size, &cache_level, &cache_id);
|
||||
ESP_RETURN_ON_FALSE_ISR(valid, ESP_ERR_INVALID_ARG, TAG, "invalid addr or null pointer");
|
||||
|
||||
uint32_t data_cache_line_size = cache_hal_get_cache_line_size(CACHE_TYPE_DATA, cache_level);
|
||||
if ((flags & ESP_CACHE_MSYNC_FLAG_UNALIGNED) == 0) {
|
||||
bool aligned_addr = (((uint32_t)addr % data_cache_line_size) == 0) && ((size % data_cache_line_size) == 0);
|
||||
ESP_RETURN_ON_FALSE_ISR(aligned_addr, ESP_ERR_INVALID_ARG, TAG, "start address: 0x%x, or the size: 0x%x is(are) not aligned with the data cache line size (0x%x)B", (uint32_t)addr, size, data_cache_line_size);
|
||||
}
|
||||
|
||||
if (flags & ESP_CACHE_MSYNC_FLAG_DIR_M2C) {
|
||||
ESP_EARLY_LOGD(TAG, "M2C DIR");
|
||||
ESP_EARLY_LOGV(TAG, "M2C DIR");
|
||||
|
||||
esp_os_enter_critical_safe(&s_spinlock);
|
||||
//Add preload feature / flag here, IDF-7800
|
||||
cache_hal_invalidate_addr(vaddr, size);
|
||||
valid = cache_hal_invalidate_addr(vaddr, size);
|
||||
esp_os_exit_critical_safe(&s_spinlock);
|
||||
|
||||
assert(valid);
|
||||
} else {
|
||||
ESP_EARLY_LOGD(TAG, "C2M DIR");
|
||||
ESP_EARLY_LOGV(TAG, "C2M DIR");
|
||||
|
||||
#if SOC_CACHE_WRITEBACK_SUPPORTED
|
||||
|
||||
esp_os_enter_critical_safe(&s_spinlock);
|
||||
cache_hal_writeback_addr(vaddr, size);
|
||||
valid = cache_hal_writeback_addr(vaddr, size);
|
||||
esp_os_exit_critical_safe(&s_spinlock);
|
||||
assert(valid);
|
||||
|
||||
if (flags & ESP_CACHE_MSYNC_FLAG_INVALIDATE) {
|
||||
esp_os_enter_critical_safe(&s_spinlock);
|
||||
cache_hal_invalidate_addr(vaddr, size);
|
||||
valid &= cache_hal_invalidate_addr(vaddr, size);
|
||||
esp_os_exit_critical_safe(&s_spinlock);
|
||||
}
|
||||
assert(valid);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -118,6 +118,16 @@ static bool s_is_overlapped(uint32_t block_start, uint32_t block_end, uint32_t n
|
||||
|
||||
|
||||
#if CONFIG_APP_BUILD_USE_FLASH_SECTIONS
|
||||
|
||||
static cache_bus_mask_t s_get_bus_mask(uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
#if CACHE_LL_EXT_MEM_VIA_L2CACHE
|
||||
return cache_ll_l2_get_bus(0, vaddr_start, len);
|
||||
#else
|
||||
return cache_ll_l1_get_bus(0, vaddr_start, len);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void s_reserve_irom_region(mem_region_t *hw_mem_regions, int region_nums)
|
||||
{
|
||||
/**
|
||||
@ -133,7 +143,7 @@ static void s_reserve_irom_region(mem_region_t *hw_mem_regions, int region_nums)
|
||||
|
||||
irom_len_to_reserve += (uint32_t)&_instruction_reserved_start - ALIGN_DOWN_BY((uint32_t)&_instruction_reserved_start, CONFIG_MMU_PAGE_SIZE);
|
||||
irom_len_to_reserve = ALIGN_UP_BY(irom_len_to_reserve, CONFIG_MMU_PAGE_SIZE);
|
||||
cache_bus_mask_t bus_mask = cache_ll_l1_get_bus(0, (uint32_t)&_instruction_reserved_start, irom_len_to_reserve);
|
||||
cache_bus_mask_t bus_mask = s_get_bus_mask((uint32_t)&_instruction_reserved_start, irom_len_to_reserve);
|
||||
|
||||
for (int i = 0; i < SOC_MMU_LINEAR_ADDRESS_REGION_NUM; i++) {
|
||||
if (bus_mask & hw_mem_regions[i].bus_id) {
|
||||
@ -161,7 +171,7 @@ static void s_reserve_drom_region(mem_region_t *hw_mem_regions, int region_nums)
|
||||
|
||||
drom_len_to_reserve += (uint32_t)&_rodata_reserved_start - ALIGN_DOWN_BY((uint32_t)&_rodata_reserved_start, CONFIG_MMU_PAGE_SIZE);
|
||||
drom_len_to_reserve = ALIGN_UP_BY(drom_len_to_reserve, CONFIG_MMU_PAGE_SIZE);
|
||||
cache_bus_mask_t bus_mask = cache_ll_l1_get_bus(0, (uint32_t)&_rodata_reserved_start, drom_len_to_reserve);
|
||||
cache_bus_mask_t bus_mask = s_get_bus_mask((uint32_t)&_rodata_reserved_start, drom_len_to_reserve);
|
||||
|
||||
for (int i = 0; i < SOC_MMU_LINEAR_ADDRESS_REGION_NUM; i++) {
|
||||
if (bus_mask & hw_mem_regions[i].bus_id) {
|
||||
|
@ -23,6 +23,7 @@
|
||||
#include "esp_partition.h"
|
||||
#include "esp_flash.h"
|
||||
#include "test_mm_utils.h"
|
||||
#include "soc/ext_mem_defs.h"
|
||||
|
||||
const static char *TAG = "CACHE_TEST";
|
||||
|
||||
@ -32,9 +33,11 @@ const static char *TAG = "CACHE_TEST";
|
||||
|
||||
#define TEST_OFFSET 0x100000
|
||||
#if CONFIG_IDF_TARGET_ESP32S2
|
||||
#define TEST_SYNC_START (0x3F500000 + TEST_OFFSET)
|
||||
#define TEST_SYNC_START (SOC_DPORT_CACHE_ADDRESS_LOW + TEST_OFFSET)
|
||||
#elif CONFIG_IDF_TARGET_ESP32S3
|
||||
#define TEST_SYNC_START (0x3C000000 + TEST_OFFSET)
|
||||
#define TEST_SYNC_START (SOC_DRAM0_CACHE_ADDRESS_LOW + TEST_OFFSET)
|
||||
#elif CONFIG_IDF_TARGET_ESP32P4
|
||||
#define TEST_SYNC_START (SOC_DRAM_PSRAM_ADDRESS_LOW + TEST_OFFSET)
|
||||
#endif
|
||||
#define TEST_SYNC_SIZE 0x8000
|
||||
|
||||
|
@ -341,7 +341,7 @@ void IRAM_ATTR do_multicore_settings(void)
|
||||
cache_bus_mask_t cache_bus_mask_core0 = cache_ll_l1_get_enabled_bus(0);
|
||||
#ifndef CONFIG_IDF_TARGET_ESP32
|
||||
// 1. disable the cache before changing its settings.
|
||||
cache_hal_disable(CACHE_TYPE_ALL);
|
||||
cache_hal_disable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
for (unsigned core = 1; core < SOC_CPU_CORES_NUM; core++) {
|
||||
// 2. change cache settings. All cores must have the same settings.
|
||||
@ -349,7 +349,7 @@ void IRAM_ATTR do_multicore_settings(void)
|
||||
}
|
||||
#ifndef CONFIG_IDF_TARGET_ESP32
|
||||
// 3. enable the cache after changing its settings.
|
||||
cache_hal_enable(CACHE_TYPE_ALL);
|
||||
cache_hal_enable(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
#endif
|
||||
}
|
||||
#endif //#if !CONFIG_IDF_TARGET_ESP32P4
|
||||
@ -496,7 +496,7 @@ void IRAM_ATTR call_start_cpu0(void)
|
||||
#endif // CONFIG_IDF_TARGET_ESP32S3
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
//TODO: IDF-7516, add cache init API
|
||||
//TODO: IDF-5670, add cache init API
|
||||
extern void esp_config_l2_cache_mode(void);
|
||||
esp_config_l2_cache_mode();
|
||||
#endif
|
||||
|
@ -1,43 +1,47 @@
|
||||
menu "Cache config"
|
||||
|
||||
choice ESP32P4_L2_CACHE_SIZE
|
||||
choice CACHE_L2_CACHE_SIZE
|
||||
prompt "L2 cache size"
|
||||
default ESP32P4_L2_CACHE_128KB
|
||||
default CACHE_L2_CACHE_128KB
|
||||
help
|
||||
L2 cache size to be set on application startup.
|
||||
|
||||
config ESP32P4_L2_CACHE_128KB
|
||||
config CACHE_L2_CACHE_128KB
|
||||
bool "128KB"
|
||||
config ESP32P4_L2_CACHE_256KB
|
||||
config CACHE_L2_CACHE_256KB
|
||||
bool "256KB"
|
||||
config ESP32P4_L2_CACHE_512KB
|
||||
config CACHE_L2_CACHE_512KB
|
||||
bool "512KB"
|
||||
endchoice
|
||||
|
||||
config ESP32P4_L2_CACHE_SIZE
|
||||
config CACHE_L2_CACHE_SIZE
|
||||
hex
|
||||
default 0x20000 if ESP32P4_L2_CACHE_128KB
|
||||
default 0x40000 if ESP32P4_L2_CACHE_256KB
|
||||
default 0x80000 if ESP32P4_L2_CACHE_512KB
|
||||
default 0x20000 if CACHE_L2_CACHE_128KB
|
||||
default 0x40000 if CACHE_L2_CACHE_256KB
|
||||
default 0x80000 if CACHE_L2_CACHE_512KB
|
||||
|
||||
choice ESP32P4_L2_CACHE_LINE_SIZE
|
||||
choice CACHE_L2_CACHE_LINE_SIZE
|
||||
prompt "L2 cache line size"
|
||||
default ESP32P4_L2_CACHE_LINE_64B if ESP32P4_L2_CACHE_128KB
|
||||
default ESP32P4_L2_CACHE_LINE_64B if ESP32P4_L2_CACHE_256KB
|
||||
default ESP32P4_L2_CACHE_LINE_128B if ESP32P4_L2_CACHE_512KB
|
||||
default CACHE_L2_CACHE_LINE_64B if CACHE_L2_CACHE_128KB
|
||||
default CACHE_L2_CACHE_LINE_64B if CACHE_L2_CACHE_256KB
|
||||
default CACHE_L2_CACHE_LINE_128B if CACHE_L2_CACHE_512KB
|
||||
help
|
||||
L2 cache line size to be set on application startup.
|
||||
|
||||
config ESP32P4_L2_CACHE_LINE_64B
|
||||
config CACHE_L2_CACHE_LINE_64B
|
||||
bool "64 Bytes"
|
||||
depends on ESP32P4_L2_CACHE_128KB || ESP32P4_L2_CACHE_256KB
|
||||
config ESP32P4_L2_CACHE_LINE_128B
|
||||
depends on CACHE_L2_CACHE_128KB || CACHE_L2_CACHE_256KB
|
||||
config CACHE_L2_CACHE_LINE_128B
|
||||
bool "128 Bytes"
|
||||
endchoice
|
||||
|
||||
config ESP32P4_L2_CACHE_LINE_SIZE
|
||||
config CACHE_L2_CACHE_LINE_SIZE
|
||||
int
|
||||
default 64 if ESP32P4_L2_CACHE_LINE_64B
|
||||
default 128 if ESP32P4_L2_CACHE_LINE_128B
|
||||
default 64 if CACHE_L2_CACHE_LINE_64B
|
||||
default 128 if CACHE_L2_CACHE_LINE_128B
|
||||
|
||||
config CACHE_L1_CACHE_LINE_SIZE
|
||||
int
|
||||
default 64
|
||||
|
||||
endmenu # Cache config
|
||||
|
@ -5,6 +5,7 @@
|
||||
*/
|
||||
#include <sys/param.h>
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
#include "sdkconfig.h"
|
||||
#include "esp_err.h"
|
||||
#include "esp_attr.h"
|
||||
@ -24,47 +25,45 @@
|
||||
* Now this file doesn't compile on ESP32
|
||||
*----------------------------------------------------------------------------*/
|
||||
|
||||
/**
|
||||
* To know if autoload is enabled or not.
|
||||
*
|
||||
* We should have a unified flag for this aim, then we don't need to call following 2 functions
|
||||
* to know the flag.
|
||||
*
|
||||
* Suggest ROM keeping this flag value to BIT(2). Then we can replace following lines to:
|
||||
* #define DATA_AUTOLOAD_FLAG BIT(2)
|
||||
* #define INST_AUTOLOAD_FLAG BIT(2)
|
||||
*/
|
||||
#if CONFIG_IDF_TARGET_ESP32P4 //TODO: IDF-7516
|
||||
#define DATA_AUTOLOAD_ENABLE Cache_Disable_L2_Cache()
|
||||
#define INST_AUTOLOAD_ENABLE Cache_Disable_L2_Cache()
|
||||
#else
|
||||
#define DATA_AUTOLOAD_ENABLE cache_ll_is_cache_autoload_enabled(CACHE_TYPE_DATA)
|
||||
#define INST_AUTOLOAD_ENABLE cache_ll_is_cache_autoload_enabled(CACHE_TYPE_INSTRUCTION)
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Necessary hal contexts, could be maintained by upper layer in the future
|
||||
*/
|
||||
typedef struct {
|
||||
bool data_autoload_en;
|
||||
bool inst_autoload_en;
|
||||
bool i_autoload_en;
|
||||
bool d_autoload_en;
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
// There's no register indicating if cache is enabled on these chips, use sw flag to save this state.
|
||||
volatile bool cache_enabled;
|
||||
bool i_cache_enabled;
|
||||
bool d_cache_enabled;
|
||||
#endif
|
||||
} cache_hal_state_t;
|
||||
|
||||
|
||||
typedef struct {
|
||||
cache_hal_state_t l1;
|
||||
cache_hal_state_t l2;
|
||||
} cache_hal_context_t;
|
||||
|
||||
static cache_hal_context_t ctx;
|
||||
|
||||
|
||||
void s_cache_hal_init_ctx(void)
|
||||
{
|
||||
ctx.l1.d_autoload_en = cache_ll_is_cache_autoload_enabled(1, CACHE_TYPE_DATA, CACHE_LL_ID_ALL);
|
||||
ctx.l1.i_autoload_en = cache_ll_is_cache_autoload_enabled(1, CACHE_TYPE_INSTRUCTION, CACHE_LL_ID_ALL);
|
||||
ctx.l2.d_autoload_en = cache_ll_is_cache_autoload_enabled(2, CACHE_TYPE_DATA, CACHE_LL_ID_ALL);
|
||||
ctx.l2.i_autoload_en = cache_ll_is_cache_autoload_enabled(2, CACHE_TYPE_INSTRUCTION, CACHE_LL_ID_ALL);
|
||||
}
|
||||
|
||||
void cache_hal_init(void)
|
||||
{
|
||||
ctx.data_autoload_en = DATA_AUTOLOAD_ENABLE;
|
||||
ctx.inst_autoload_en = INST_AUTOLOAD_ENABLE;
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
Cache_Enable_L2_Cache(ctx.inst_autoload_en);
|
||||
#else
|
||||
cache_ll_enable_cache(CACHE_TYPE_ALL, ctx.inst_autoload_en, ctx.data_autoload_en);
|
||||
#endif //SOC_CACHE_L2_SUPPORTED
|
||||
s_cache_hal_init_ctx();
|
||||
|
||||
if (CACHE_LL_LEVEL_EXT_MEM == 1) {
|
||||
cache_ll_enable_cache(1, CACHE_TYPE_ALL, CACHE_LL_ID_ALL, ctx.l1.i_autoload_en, ctx.l1.d_autoload_en);
|
||||
} else if (CACHE_LL_LEVEL_EXT_MEM == 2) {
|
||||
cache_ll_enable_cache(2, CACHE_TYPE_ALL, CACHE_LL_ID_ALL, ctx.l2.i_autoload_en, ctx.l2.d_autoload_en);
|
||||
}
|
||||
|
||||
cache_ll_l1_enable_bus(0, CACHE_LL_DEFAULT_DBUS_MASK);
|
||||
cache_ll_l1_enable_bus(0, CACHE_LL_DEFAULT_IBUS_MASK);
|
||||
@ -74,125 +73,222 @@ void cache_hal_init(void)
|
||||
#endif
|
||||
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
ctx.cache_enabled = 1;
|
||||
ctx.l1.i_cache_enabled = 1;
|
||||
ctx.l1.d_cache_enabled = 1;
|
||||
ctx.l2.i_cache_enabled = 1;
|
||||
ctx.l2.d_cache_enabled = 1;
|
||||
#endif
|
||||
}
|
||||
|
||||
void cache_hal_disable(cache_type_t type)
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
void s_update_cache_state(uint32_t cache_level, cache_type_t type, bool en)
|
||||
{
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
Cache_Disable_L2_Cache();
|
||||
#else
|
||||
cache_ll_disable_cache(type);
|
||||
#endif //SOC_CACHE_L2_SUPPORTED
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
switch (cache_level) {
|
||||
case 1:
|
||||
if (type == CACHE_TYPE_INSTRUCTION) {
|
||||
ctx.l1.i_cache_enabled = en;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_DATA) {
|
||||
ctx.l1.d_cache_enabled = en;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_ALL) {
|
||||
ctx.l1.i_cache_enabled = en;
|
||||
ctx.l1.d_cache_enabled = en;
|
||||
break;
|
||||
} else {
|
||||
HAL_ASSERT(false);
|
||||
break;
|
||||
}
|
||||
case 2:
|
||||
if (type == CACHE_TYPE_INSTRUCTION) {
|
||||
ctx.l2.i_cache_enabled = en;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_DATA) {
|
||||
ctx.l2.d_cache_enabled = en;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_ALL) {
|
||||
ctx.l2.i_cache_enabled = en;
|
||||
ctx.l2.d_cache_enabled = en;
|
||||
break;
|
||||
} else {
|
||||
HAL_ASSERT(false);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
HAL_ASSERT(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool s_get_cache_state(uint32_t cache_level, cache_type_t type)
|
||||
{
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
bool enabled = false;
|
||||
|
||||
switch (cache_level) {
|
||||
case 1:
|
||||
if (type == CACHE_TYPE_INSTRUCTION) {
|
||||
enabled = ctx.l1.i_cache_enabled;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_DATA) {
|
||||
enabled = ctx.l1.d_cache_enabled;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_ALL) {
|
||||
enabled = ctx.l1.i_cache_enabled;
|
||||
enabled &= ctx.l1.d_cache_enabled;
|
||||
break;
|
||||
} else {
|
||||
HAL_ASSERT(false);
|
||||
break;
|
||||
}
|
||||
case 2:
|
||||
if (type == CACHE_TYPE_INSTRUCTION) {
|
||||
enabled = ctx.l2.i_cache_enabled;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_DATA) {
|
||||
enabled = ctx.l2.d_cache_enabled;
|
||||
break;
|
||||
} else if (type == CACHE_TYPE_ALL) {
|
||||
enabled = ctx.l2.i_cache_enabled;
|
||||
enabled &= ctx.l2.d_cache_enabled;
|
||||
break;
|
||||
} else {
|
||||
HAL_ASSERT(false);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
HAL_ASSERT(false);
|
||||
break;
|
||||
}
|
||||
|
||||
return enabled;
|
||||
}
|
||||
#endif //#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
|
||||
void cache_hal_disable(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
cache_ll_disable_cache(cache_level, type, CACHE_LL_ID_ALL);
|
||||
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
ctx.cache_enabled = 0;
|
||||
s_update_cache_state(cache_level, type, false);
|
||||
#endif
|
||||
}
|
||||
|
||||
void cache_hal_enable(cache_type_t type)
|
||||
void cache_hal_enable(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
Cache_Enable_L2_Cache(ctx.inst_autoload_en);
|
||||
#else
|
||||
cache_ll_enable_cache(type, ctx.inst_autoload_en, ctx.data_autoload_en);
|
||||
#endif //SOC_CACHE_L2_SUPPORTED
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
if (cache_level == 1) {
|
||||
cache_ll_enable_cache(1, type, CACHE_LL_ID_ALL, ctx.l1.i_autoload_en, ctx.l1.d_autoload_en);
|
||||
} else if (cache_level == 2) {
|
||||
cache_ll_enable_cache(2, type, CACHE_LL_ID_ALL, ctx.l2.i_autoload_en, ctx.l2.d_autoload_en);
|
||||
}
|
||||
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
ctx.cache_enabled = 1;
|
||||
s_update_cache_state(cache_level, type, true);
|
||||
#endif
|
||||
}
|
||||
|
||||
void cache_hal_suspend(cache_type_t type)
|
||||
void cache_hal_suspend(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
Cache_Suspend_L2_Cache();
|
||||
#else
|
||||
cache_ll_suspend_cache(type);
|
||||
#endif //SOC_CACHE_L2_SUPPORTED
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
cache_ll_suspend_cache(cache_level, type, CACHE_LL_ID_ALL);
|
||||
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
ctx.cache_enabled = 0;
|
||||
s_update_cache_state(cache_level, type, false);
|
||||
#endif
|
||||
}
|
||||
|
||||
void cache_hal_resume(cache_type_t type)
|
||||
void cache_hal_resume(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
Cache_Resume_L2_Cache(ctx.inst_autoload_en);
|
||||
#else
|
||||
cache_ll_resume_cache(type, ctx.inst_autoload_en, ctx.data_autoload_en);
|
||||
#endif
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
if (cache_level == 1) {
|
||||
cache_ll_resume_cache(1, type, CACHE_LL_ID_ALL, ctx.l1.i_autoload_en, ctx.l1.d_autoload_en);
|
||||
} else if (cache_level == 2) {
|
||||
cache_ll_resume_cache(2, type, CACHE_LL_ID_ALL, ctx.l2.i_autoload_en, ctx.l2.d_autoload_en);
|
||||
}
|
||||
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
ctx.cache_enabled = 1;
|
||||
s_update_cache_state(cache_level, type, true);
|
||||
#endif
|
||||
}
|
||||
|
||||
bool cache_hal_is_cache_enabled(cache_type_t type)
|
||||
bool cache_hal_is_cache_enabled(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
bool enabled;
|
||||
bool enabled = false;
|
||||
#if CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
enabled = ctx.cache_enabled;
|
||||
enabled = s_get_cache_state(cache_level, type);
|
||||
#else
|
||||
enabled = cache_ll_is_cache_enabled(type);
|
||||
#endif //CACHE_LL_ENABLE_DISABLE_STATE_SW
|
||||
return enabled;
|
||||
}
|
||||
|
||||
void cache_hal_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
bool cache_hal_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
//Now only esp32 has 2 MMUs, this file doesn't build on esp32
|
||||
HAL_ASSERT(mmu_hal_check_valid_ext_vaddr_region(0, vaddr, size, MMU_VADDR_DATA | MMU_VADDR_INSTRUCTION));
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
Cache_Invalidate_Addr(CACHE_MAP_L1_DCACHE | CACHE_MAP_L2_CACHE, vaddr, size);
|
||||
#else
|
||||
cache_ll_invalidate_addr(vaddr, size);
|
||||
#endif
|
||||
if (!out_level || !out_id) {
|
||||
return false;
|
||||
}
|
||||
return cache_ll_vaddr_to_cache_level_id(vaddr_start, len, out_level, out_id);
|
||||
}
|
||||
|
||||
bool cache_hal_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t cache_level = 0;
|
||||
uint32_t cache_id = 0;
|
||||
|
||||
valid = cache_hal_vaddr_to_cache_level_id(vaddr, size, &cache_level, &cache_id);
|
||||
if (valid) {
|
||||
cache_ll_invalidate_addr(cache_level, CACHE_TYPE_ALL, cache_id, vaddr, size);
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
#if SOC_CACHE_WRITEBACK_SUPPORTED
|
||||
void cache_hal_writeback_addr(uint32_t vaddr, uint32_t size)
|
||||
bool cache_hal_writeback_addr(uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
HAL_ASSERT(mmu_hal_check_valid_ext_vaddr_region(0, vaddr, size, MMU_VADDR_DATA));
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
Cache_WriteBack_Addr(CACHE_MAP_L1_DCACHE, vaddr, size);
|
||||
Cache_WriteBack_Addr(CACHE_MAP_L2_CACHE, vaddr, size);
|
||||
#else
|
||||
cache_ll_writeback_addr(vaddr, size);
|
||||
#endif
|
||||
bool valid = false;
|
||||
uint32_t cache_level = 0;
|
||||
uint32_t cache_id = 0;
|
||||
|
||||
valid = cache_hal_vaddr_to_cache_level_id(vaddr, size, &cache_level, &cache_id);
|
||||
if (valid) {
|
||||
cache_ll_writeback_addr(cache_level, CACHE_TYPE_DATA, cache_id, vaddr, size);
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
#endif //#if SOC_CACHE_WRITEBACK_SUPPORTED
|
||||
|
||||
|
||||
#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
void cache_hal_freeze(cache_type_t type)
|
||||
void cache_hal_freeze(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
Cache_Freeze_L2_Cache_Enable(CACHE_FREEZE_ACK_BUSY);
|
||||
#else
|
||||
cache_ll_freeze_cache(type);
|
||||
#endif //SOC_CACHE_L2_SUPPORTED
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
cache_ll_freeze_cache(cache_level, type, CACHE_LL_ID_ALL);
|
||||
}
|
||||
|
||||
void cache_hal_unfreeze(cache_type_t type)
|
||||
void cache_hal_unfreeze(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
Cache_Freeze_L2_Cache_Disable();
|
||||
#else
|
||||
cache_ll_unfreeze_cache(type);
|
||||
#endif //SOC_CACHE_L2_SUPPORTED
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
cache_ll_unfreeze_cache(cache_level, type, CACHE_LL_ID_ALL);
|
||||
}
|
||||
#endif //#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
|
||||
uint32_t cache_hal_get_cache_line_size(cache_type_t type)
|
||||
uint32_t cache_hal_get_cache_line_size(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
uint32_t line_size = 0;
|
||||
#if SOC_CACHE_L2_SUPPORTED
|
||||
line_size = Cache_Get_L2_Cache_Line_Size();
|
||||
#else
|
||||
line_size = cache_ll_get_line_size(type);
|
||||
#endif //SOC_CACHE_L2_SUPPORTED
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
|
||||
uint32_t line_size = cache_ll_get_line_size(cache_level, type, CACHE_LL_ID_ALL);
|
||||
|
||||
return line_size;
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ static uint32_t s_cache_status[2];
|
||||
* There's a bug that Cache_Read_Disable requires a call to Cache_Flush
|
||||
* before Cache_Read_Enable, even if cached data was not modified.
|
||||
*/
|
||||
void cache_hal_suspend(cache_type_t type)
|
||||
void cache_hal_suspend(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
s_cache_status[0] = cache_ll_l1_get_enabled_bus(0);
|
||||
cache_ll_l1_disable_cache(0);
|
||||
@ -25,7 +25,7 @@ void cache_hal_suspend(cache_type_t type)
|
||||
}
|
||||
|
||||
|
||||
void cache_hal_resume(cache_type_t type)
|
||||
void cache_hal_resume(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
cache_ll_l1_enable_cache(0);
|
||||
cache_ll_l1_enable_bus(0, s_cache_status[0]);
|
||||
@ -36,7 +36,7 @@ void cache_hal_resume(cache_type_t type)
|
||||
}
|
||||
|
||||
|
||||
bool cache_hal_is_cache_enabled(cache_type_t type)
|
||||
bool cache_hal_is_cache_enabled(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
bool result = cache_ll_l1_is_cache_enabled(0, CACHE_TYPE_ALL);
|
||||
#if !CONFIG_FREERTOS_UNICORE
|
||||
@ -44,3 +44,23 @@ bool cache_hal_is_cache_enabled(cache_type_t type)
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
bool cache_hal_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
if (!out_level || !out_id) {
|
||||
return false;
|
||||
}
|
||||
return cache_ll_vaddr_to_cache_level_id(vaddr_start, len, out_level, out_id);
|
||||
}
|
||||
|
||||
uint32_t cache_hal_get_cache_line_size(cache_type_t type, uint32_t cache_level)
|
||||
{
|
||||
HAL_ASSERT(cache_level && (cache_level <= CACHE_LL_LEVEL_NUMS));
|
||||
return 4;
|
||||
}
|
||||
|
||||
bool cache_hal_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
//esp32 doesn't support invalidate certain addr
|
||||
abort();
|
||||
}
|
||||
|
@ -19,6 +19,12 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#define CACHE_LL_ID_ALL 2 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
|
||||
/**
|
||||
* @brief enable a cache unit
|
||||
*
|
||||
@ -27,7 +33,7 @@ extern "C" {
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_enable_cache(uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
if (cache_id == 0) {
|
||||
DPORT_REG_SET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE);
|
||||
@ -67,7 +73,7 @@ static inline void cache_ll_l1_disable_cache(uint32_t cache_id)
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_l1_is_cache_enabled(uint32_t cache_id, cache_type_t type)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
(void) type; //On 32 it shares between I and D cache
|
||||
|
||||
bool enabled;
|
||||
@ -94,7 +100,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@ -135,7 +141,7 @@ __attribute__((always_inline))
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
(void) mask;
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
uint32_t bus_mask = 0;
|
||||
if (cache_id == 0) {
|
||||
@ -170,7 +176,7 @@ __attribute__((always_inline))
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
|
||||
{
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
if (cache_id == 0) {
|
||||
uint32_t bus_mask= DPORT_REG_READ(DPORT_PRO_CACHE_CTRL1_REG);
|
||||
mask = (cache_bus_mask_t)(mask | ((!(bus_mask & DPORT_PRO_CACHE_MASK_IRAM0)) ? CACHE_BUS_IBUS0 : 0));
|
||||
@ -202,7 +208,7 @@ __attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
(void) mask;
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
uint32_t bus_mask = 0;
|
||||
if (cache_id == 0) {
|
||||
@ -226,6 +232,33 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
||||
valid |= ((vaddr_start >= SOC_DROM0_CACHE_ADDRESS_LOW) && (vaddr_end < SOC_DROM0_CACHE_ADDRESS_HIGH)) || ((vaddr_start >= SOC_DRAM1_CACHE_ADDRESS_LOW) && (vaddr_end < SOC_DRAM1_CACHE_ADDRESS_HIGH));
|
||||
valid |= ((vaddr_start >= SOC_IRAM0_CACHE_ADDRESS_LOW) && (vaddr_end < SOC_IRAM0_CACHE_ADDRESS_HIGH));
|
||||
|
||||
if (valid) {
|
||||
*out_level = 1;
|
||||
*out_id = 0;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
@ -36,72 +36,83 @@ extern "C" {
|
||||
#define CACHE_LL_L1_ILG_EVENT_PRELOAD_OP_FAULT (1<<1)
|
||||
#define CACHE_LL_L1_ILG_EVENT_SYNC_OP_FAULT (1<<0)
|
||||
|
||||
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
//On ESP32C2, the auto preload flag is always 0
|
||||
#define CACHE_LL_L1_ICACHE_AUTOLOAD 0
|
||||
|
||||
/**
|
||||
* @brief Check if Cache auto preload is enabled or not. On ESP32C2, instructions and data share Cache
|
||||
* @brief Check if Cache auto preload is enabled or not.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return false (On ESP32C2, it's always false)
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
bool enabled = false;
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable Cache. On ESP32C2, instructions and data share Cache
|
||||
* @brief Disable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_disable_cache(cache_type_t type)
|
||||
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
(void) type;
|
||||
Cache_Disable_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Enable Cache. On ESP32C2, instructions and data share Cache
|
||||
* @brief Enable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_enable_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Enable_ICache(CACHE_LL_L1_ICACHE_AUTOLOAD);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Suspend Cache. On ESP32C2, instructions and data share Cache
|
||||
* @brief Suspend Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_suspend_cache(cache_type_t type)
|
||||
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Suspend_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Resume Cache. On ESP32C2, instructions and data share Cache
|
||||
* @brief Resume Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Resume_ICache(CACHE_LL_L1_ICACHE_AUTOLOAD);
|
||||
}
|
||||
@ -124,13 +135,16 @@ static inline bool cache_ll_is_cache_enabled(cache_type_t type)
|
||||
/**
|
||||
* @brief Invalidate cache supported addr
|
||||
*
|
||||
* Invalidate a Cache item
|
||||
* Invalidate a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to be invalidated
|
||||
* @param size Size of the region to be invalidated
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_Invalidate_Addr(vaddr, size);
|
||||
}
|
||||
@ -138,12 +152,14 @@ static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
/**
|
||||
* @brief Get Cache line size, in bytes
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return Cache line size, in bytes
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline uint32_t cache_ll_get_line_size(cache_type_t type)
|
||||
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
uint32_t size = 0;
|
||||
size = Cache_Get_ICache_Line_Size();
|
||||
@ -165,7 +181,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@ -191,7 +207,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -213,7 +229,7 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -226,6 +242,33 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
REG_SET_BIT(EXTMEM_ICACHE_CTRL1_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
||||
valid |= (SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_end));
|
||||
valid |= (SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_end));
|
||||
|
||||
if (valid) {
|
||||
*out_level = 1;
|
||||
*out_id = 0;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Interrupt
|
||||
*----------------------------------------------------------------------------*/
|
||||
|
@ -36,18 +36,26 @@ extern "C" {
|
||||
#define CACHE_LL_L1_ILG_EVENT_PRELOAD_OP_FAULT (1<<1)
|
||||
#define CACHE_LL_L1_ILG_EVENT_SYNC_OP_FAULT (1<<0)
|
||||
|
||||
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
#define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<2)
|
||||
|
||||
/**
|
||||
* @brief Check if Cache auto preload is enabled or not. On ESP32C3, instructions and data share Cache
|
||||
* @brief Check if Cache auto preload is enabled or not.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
bool enabled = false;
|
||||
if (REG_GET_BIT(EXTMEM_ICACHE_AUTOLOAD_CTRL_REG, EXTMEM_ICACHE_AUTOLOAD_ENA)) {
|
||||
enabled = true;
|
||||
@ -56,54 +64,58 @@ static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable Cache. On ESP32C3, instructions and data share Cache
|
||||
* @brief Disable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_disable_cache(cache_type_t type)
|
||||
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
(void) type;
|
||||
Cache_Disable_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Enable Cache. On ESP32C3, instructions and data share Cache
|
||||
* @brief Enable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_enable_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Enable_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Suspend Cache. On ESP32C3, instructions and data share Cache
|
||||
* @brief Suspend Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_suspend_cache(cache_type_t type)
|
||||
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Suspend_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Resume Cache. On ESP32C3, instructions and data share Cache
|
||||
* @brief Resume Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Resume_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
}
|
||||
@ -126,13 +138,16 @@ static inline bool cache_ll_is_cache_enabled(cache_type_t type)
|
||||
/**
|
||||
* @brief Invalidate cache supported addr
|
||||
*
|
||||
* Invalidate a Cache
|
||||
* Invalidate a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to be invalidated
|
||||
* @param size Size of the region to be invalidated
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_Invalidate_Addr(vaddr, size);
|
||||
}
|
||||
@ -140,12 +155,14 @@ static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
/**
|
||||
* @brief Get Cache line size, in bytes
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return Cache line size, in bytes
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline uint32_t cache_ll_get_line_size(cache_type_t type)
|
||||
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
uint32_t size = 0;
|
||||
size = Cache_Get_ICache_Line_Size();
|
||||
@ -167,7 +184,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@ -193,7 +210,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -215,7 +232,7 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -228,6 +245,33 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
REG_SET_BIT(EXTMEM_ICACHE_CTRL1_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
||||
valid |= (SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_end));
|
||||
valid |= (SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_end));
|
||||
|
||||
if (valid) {
|
||||
*out_level = 1;
|
||||
*out_id = 0;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Interrupt
|
||||
*----------------------------------------------------------------------------*/
|
||||
|
@ -26,18 +26,26 @@ extern "C" {
|
||||
#define CACHE_LL_L1_ACCESS_EVENT_MASK (1<<4)
|
||||
#define CACHE_LL_L1_ACCESS_EVENT_CACHE_FAIL (1<<4)
|
||||
|
||||
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
#define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<0)
|
||||
|
||||
/**
|
||||
* @brief Check if Cache auto preload is enabled or not. On ESP32C6, instructions and data share Cache
|
||||
* @brief Check if Cache auto preload is enabled or not.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
bool enabled = false;
|
||||
if (REG_GET_BIT(EXTMEM_L1_CACHE_AUTOLOAD_CTRL_REG, EXTMEM_L1_CACHE_AUTOLOAD_ENA)) {
|
||||
enabled = true;
|
||||
@ -46,54 +54,58 @@ static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable Cache. On ESP32C6, instructions and data share Cache
|
||||
* @brief Disable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_disable_cache(cache_type_t type)
|
||||
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
(void) type;
|
||||
Cache_Disable_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Enable Cache. On ESP32C6, instructions and data share Cache
|
||||
* @brief Enable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_enable_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Enable_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Suspend Cache. On ESP32C6, instructions and data share Cache
|
||||
* @brief Suspend Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_suspend_cache(cache_type_t type)
|
||||
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Suspend_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Resume Cache. On ESP326, instructions and data share Cache
|
||||
* @brief Resume Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Resume_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
}
|
||||
@ -101,48 +113,57 @@ static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_e
|
||||
/**
|
||||
* @brief Invalidate cache supported addr
|
||||
*
|
||||
* Invalidate a Cache item
|
||||
* Invalidate a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to be invalidated
|
||||
* @param size Size of the region to be invalidated
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_Invalidate_Addr(vaddr, size);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Freeze Cache. On ESP32C6, instructions and data share Cache
|
||||
* @brief Freeze Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_freeze_cache(cache_type_t type)
|
||||
static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Freeze_ICache_Enable(CACHE_FREEZE_ACK_BUSY);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unfreeze Cache. On ESP32C6, instructions and data share Cache
|
||||
* @brief Unfreeze Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_unfreeze_cache(cache_type_t type)
|
||||
static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Freeze_ICache_Disable();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get cache line size, in bytes
|
||||
* @brief Get Cache line size, in bytes
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return cache line size, in bytes
|
||||
* @return Cache line size, in bytes
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline uint32_t cache_ll_get_line_size(cache_type_t type)
|
||||
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
uint32_t size = 0;
|
||||
size = Cache_Get_ICache_Line_Size();
|
||||
@ -164,7 +185,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@ -189,7 +210,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c6, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -211,7 +232,7 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c6, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -224,6 +245,33 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
REG_SET_BIT(EXTMEM_L1_CACHE_CTRL_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
||||
valid |= (SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_end));
|
||||
valid |= (SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_end));
|
||||
|
||||
if (valid) {
|
||||
*out_level = 1;
|
||||
*out_id = 0;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Interrupt
|
||||
*----------------------------------------------------------------------------*/
|
||||
|
@ -26,18 +26,26 @@ extern "C" {
|
||||
#define CACHE_LL_L1_ACCESS_EVENT_MASK (1<<4)
|
||||
#define CACHE_LL_L1_ACCESS_EVENT_CACHE_FAIL (1<<4)
|
||||
|
||||
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
#define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<0)
|
||||
|
||||
/**
|
||||
* @brief Check if Cache auto preload is enabled or not. On ESP32h2, instructions and data share Cache
|
||||
* @brief Check if Cache auto preload is enabled or not.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
bool enabled = false;
|
||||
if (REG_GET_BIT(CACHE_L1_CACHE_AUTOLOAD_CTRL_REG, CACHE_L1_CACHE_AUTOLOAD_ENA)) {
|
||||
enabled = true;
|
||||
@ -46,54 +54,58 @@ static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable Cache. On ESP32H2, instructions and data share Cache
|
||||
* @brief Disable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_disable_cache(cache_type_t type)
|
||||
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
(void) type;
|
||||
Cache_Disable_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Enable Cache. On ESP32H2, instructions and data share Cache
|
||||
* @brief Enable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_enable_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Enable_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Suspend Cache. On ESP32H2, instructions and data share Cache
|
||||
* @brief Suspend Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_suspend_cache(cache_type_t type)
|
||||
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Suspend_ICache();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Resume Cache. On ESP326, instructions and data share Cache
|
||||
* @brief Resume Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Resume_ICache(inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
}
|
||||
@ -101,48 +113,57 @@ static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_e
|
||||
/**
|
||||
* @brief Invalidate cache supported addr
|
||||
*
|
||||
* Invalidate a Cache item
|
||||
* Invalidate a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to be invalidated
|
||||
* @param size Size of the region to be invalidated
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_Invalidate_Addr(vaddr, size);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Freeze Cache. On ESP32H2, instructions and data share Cache
|
||||
* @brief Freeze Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_freeze_cache(cache_type_t type)
|
||||
static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Freeze_ICache_Enable(CACHE_FREEZE_ACK_BUSY);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unfreeze Cache. On ESP32H2, instructions and data share Cache
|
||||
* @brief Unfreeze Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_unfreeze_cache(cache_type_t type)
|
||||
static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Freeze_ICache_Disable();
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get cache line size, in bytes
|
||||
* @brief Get Cache line size, in bytes
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return cache line size, in bytes
|
||||
* @return Cache line size, in bytes
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline uint32_t cache_ll_get_line_size(cache_type_t type)
|
||||
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
uint32_t size = 0;
|
||||
size = Cache_Get_ICache_Line_Size();
|
||||
@ -164,7 +185,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@ -189,7 +210,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32h2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -211,7 +232,7 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32h2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -224,6 +245,33 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
REG_SET_BIT(CACHE_L1_CACHE_CTRL_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
||||
valid |= (SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_end));
|
||||
valid |= (SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_end));
|
||||
|
||||
if (valid) {
|
||||
*out_level = 1;
|
||||
*out_id = 0;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Interrupt
|
||||
*----------------------------------------------------------------------------*/
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -23,6 +23,11 @@ extern "C" {
|
||||
#define CACHE_LL_DEFAULT_IBUS_MASK CACHE_BUS_IBUS0
|
||||
#define CACHE_LL_DEFAULT_DBUS_MASK CACHE_BUS_IBUS2
|
||||
|
||||
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
#define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<0)
|
||||
#define CACHE_LL_L1_DCACHE_AUTOLOAD (1<<0)
|
||||
|
||||
@ -57,15 +62,18 @@ static inline bool cache_ll_l1_is_dcache_autoload_enabled(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Check if ICache or DCache auto preload is enabled or not
|
||||
* @brief Check if Cache auto preload is enabled or not.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
bool enabled = false;
|
||||
switch (type)
|
||||
{
|
||||
@ -101,12 +109,14 @@ static inline void cache_ll_l1_disable_dcache(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable ICache or DCache or both
|
||||
* @brief Disable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_disable_cache(cache_type_t type)
|
||||
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -146,16 +156,16 @@ static inline void cache_ll_l1_enable_dcache(bool data_autoload_en)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Enable ICache or DCache or both
|
||||
* @brief Enable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_enable_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -191,12 +201,14 @@ static inline void cache_ll_l1_suspend_dcache(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Suspend ICache or DCache or both
|
||||
* @brief Suspend Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_suspend_cache(cache_type_t type)
|
||||
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -236,16 +248,16 @@ static inline void cache_ll_l1_resume_dcache(bool data_autoload_en)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Resume ICache or DCache or both
|
||||
* @brief Resume Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -271,7 +283,7 @@ static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_e
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_l1_is_icache_enabled(uint32_t cache_id){
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
bool enabled;
|
||||
enabled = REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE);
|
||||
@ -288,7 +300,7 @@ static inline bool cache_ll_l1_is_icache_enabled(uint32_t cache_id){
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_l1_is_dcache_enabled(uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
bool enabled;
|
||||
enabled = REG_GET_BIT(EXTMEM_PRO_DCACHE_CTRL_REG, EXTMEM_PRO_DCACHE_ENABLE);
|
||||
@ -324,13 +336,16 @@ static inline bool cache_ll_is_cache_enabled(cache_type_t type)
|
||||
/**
|
||||
* @brief Invalidate cache supported addr
|
||||
*
|
||||
* Invalidate a Cache item for either ICache or DCache.
|
||||
* Invalidate a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to be invalidated
|
||||
* @param size Size of the region to be invalidated
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_Invalidate_Addr(vaddr, size);
|
||||
}
|
||||
@ -338,13 +353,16 @@ static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
/**
|
||||
* @brief Writeback cache supported addr
|
||||
*
|
||||
* Writeback the DCache item to external memory
|
||||
* Writeback a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to writeback
|
||||
* @param size Size of the region to writeback
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be written back
|
||||
* @param size size of the region to be written back
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_writeback_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_writeback_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_WriteBack_Addr(vaddr, size);
|
||||
}
|
||||
@ -376,14 +394,16 @@ static inline uint32_t cache_ll_l1_dcache_get_line_size(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get ICache or DCache line size, in bytes
|
||||
* @brief Get Cache line size, in bytes
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return ICache/DCache line size, in bytes
|
||||
* @return Cache line size, in bytes
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline uint32_t cache_ll_get_line_size(cache_type_t type)
|
||||
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
uint32_t size = 0;
|
||||
switch (type)
|
||||
@ -504,6 +524,32 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
REG_SET_BIT(EXTMEM_PRO_DCACHE_CTRL1_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
||||
valid |= ((vaddr_start >= SOC_DROM0_ADDRESS_LOW) && (vaddr_end < SOC_DROM0_ADDRESS_HIGH)) || ((vaddr_start >= SOC_DPORT_CACHE_ADDRESS_LOW) && (vaddr_end < SOC_DRAM0_CACHE_ADDRESS_HIGH));
|
||||
valid |= ((vaddr_start >= SOC_IRAM0_CACHE_ADDRESS_LOW) && (vaddr_end < SOC_IRAM1_ADDRESS_HIGH));
|
||||
|
||||
if (valid) {
|
||||
*out_level = 1;
|
||||
*out_id = 0;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
@ -38,6 +38,11 @@ extern "C" {
|
||||
#define CACHE_LL_L1_ILG_EVENT_ICACHE_PRELOAD_OP_FAULT (1<<1)
|
||||
#define CACHE_LL_L1_ILG_EVENT_ICACHE_SYNC_OP_FAULT (1<<0)
|
||||
|
||||
#define CACHE_LL_ID_ALL 2 //All of the caches in a type and level, make this value greater than any id
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
#define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<2)
|
||||
#define CACHE_LL_L1_DCACHE_AUTOLOAD (1<<2)
|
||||
|
||||
@ -72,15 +77,18 @@ static inline bool cache_ll_l1_is_dcache_autoload_enabled(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Check if ICache or DCache auto preload is enabled or not
|
||||
* @brief Check if Cache auto preload is enabled or not.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(cache_type_t type)
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
bool enabled = false;
|
||||
switch (type)
|
||||
{
|
||||
@ -116,12 +124,14 @@ static inline void cache_ll_l1_disable_dcache(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable ICache or DCache or both
|
||||
* @brief Disable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_disable_cache(cache_type_t type)
|
||||
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -161,16 +171,16 @@ static inline void cache_ll_l1_enable_dcache(bool data_autoload_en)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Enable ICache or DCache or both
|
||||
* @brief Enable Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_enable_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -206,12 +216,14 @@ static inline void cache_ll_l1_suspend_dcache(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Suspend ICache or DCache or both
|
||||
* @brief Suspend Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_suspend_cache(cache_type_t type)
|
||||
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -251,16 +263,16 @@ static inline void cache_ll_l1_resume_dcache(bool data_autoload_en)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Resume ICache or DCache or both
|
||||
* @brief Resume Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param data_autoload_en Dcache auto preload enabled
|
||||
*
|
||||
* @param inst_autoload_en Icache auto preload enabled
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en data autoload enabled or not
|
||||
* @param inst_autoload_en inst autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_en, bool data_autoload_en)
|
||||
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -287,7 +299,7 @@ static inline void cache_ll_resume_cache(cache_type_t type, bool inst_autoload_e
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_l1_is_icache_enabled(uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
return REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE);
|
||||
}
|
||||
|
||||
@ -301,7 +313,7 @@ static inline bool cache_ll_l1_is_icache_enabled(uint32_t cache_id)
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_l1_is_dcache_enabled(uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
return REG_GET_BIT(EXTMEM_DCACHE_CTRL_REG, EXTMEM_DCACHE_ENABLE);
|
||||
}
|
||||
|
||||
@ -334,13 +346,16 @@ static inline bool cache_ll_is_cache_enabled(cache_type_t type)
|
||||
/**
|
||||
* @brief Invalidate cache supported addr
|
||||
*
|
||||
* Invalidate a Cache item for either ICache or DCache.
|
||||
* Invalidate a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to be invalidated
|
||||
* @param size Size of the region to be invalidated
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_Invalidate_Addr(vaddr, size);
|
||||
}
|
||||
@ -348,13 +363,16 @@ static inline void cache_ll_invalidate_addr(uint32_t vaddr, uint32_t size)
|
||||
/**
|
||||
* @brief Writeback cache supported addr
|
||||
*
|
||||
* Writeback the DCache item to external memory
|
||||
* Writeback a cache item
|
||||
*
|
||||
* @param vaddr Start address of the region to writeback
|
||||
* @param size Size of the region to writeback
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be written back
|
||||
* @param size size of the region to be written back
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_writeback_addr(uint32_t vaddr, uint32_t size)
|
||||
static inline void cache_ll_writeback_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_WriteBack_Addr(vaddr, size);
|
||||
}
|
||||
@ -378,12 +396,14 @@ static inline void cache_ll_l1_freeze_dcache(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Freeze ICache or DCache or both
|
||||
* @brief Freeze Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_freeze_cache(cache_type_t type)
|
||||
static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -419,12 +439,14 @@ static inline void cache_ll_l1_unfreeze_dcache(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unfreeze ICache or DCache or both
|
||||
* @brief Unfreeze Cache
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_unfreeze_cache(cache_type_t type)
|
||||
static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
@ -468,14 +490,16 @@ static inline uint32_t cache_ll_l1_dcache_get_line_size(void)
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get ICache or DCache line size, in bytes
|
||||
* @brief Get Cache line size, in bytes
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return ICache/DCache line size, in bytes
|
||||
* @return Cache line size, in bytes
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline uint32_t cache_ll_get_line_size(cache_type_t type)
|
||||
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
uint32_t size = 0;
|
||||
switch (type)
|
||||
@ -508,7 +532,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@ -534,7 +558,7 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32s3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -566,7 +590,7 @@ __attribute__((always_inline))
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
|
||||
{
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32s3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
|
||||
uint32_t ibus_mask = REG_READ(EXTMEM_ICACHE_CTRL1_REG);
|
||||
@ -595,7 +619,7 @@ static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id == 0 || cache_id == 1);
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32s3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@ -616,6 +640,33 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
REG_SET_BIT(EXTMEM_DCACHE_CTRL1_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id)
|
||||
{
|
||||
bool valid = false;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
||||
valid |= (SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_IRAM0_CACHE(vaddr_end));
|
||||
valid |= (SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_start) && SOC_ADDRESS_IN_DRAM0_CACHE(vaddr_end));
|
||||
|
||||
if (valid) {
|
||||
*out_level = 1;
|
||||
*out_id = 0;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Interrupt
|
||||
*----------------------------------------------------------------------------*/
|
||||
|
@ -1,6 +1,6 @@
|
||||
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@ -21,105 +21,128 @@ extern "C" {
|
||||
void cache_hal_init(void);
|
||||
|
||||
/**
|
||||
* @brief Disable cache
|
||||
* @brief Disable Cache
|
||||
*
|
||||
* Disable the ICache or DCache or both, all the items in the corresponding Cache(s) will be invalideated.
|
||||
* Next request to these items will trigger a transaction to the external memory (flash / psram)
|
||||
* Disable the ICache or DCache or both, of a certain level or all levels.
|
||||
* All the items in the corresponding Cache(s) will be invalideated.
|
||||
* Next request to these items will trigger a transaction to the physical memory
|
||||
*
|
||||
* @note If the autoload feature is enabled, this API will return until the ICache autoload is disabled.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s)
|
||||
*/
|
||||
void cache_hal_disable(cache_type_t type);
|
||||
void cache_hal_disable(cache_type_t type, uint32_t cache_level);
|
||||
|
||||
/**
|
||||
* @brief Enable cache
|
||||
* @brief Enable Cache
|
||||
*
|
||||
* Enable the ICache or DCache or both.
|
||||
* Enable the ICache or DCache or both, of a certain level or all levels.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s)
|
||||
*/
|
||||
void cache_hal_enable(cache_type_t type);
|
||||
void cache_hal_enable(cache_type_t type, uint32_t cache_level);
|
||||
|
||||
/**
|
||||
* @brief Suspend cache
|
||||
* @brief Suspend Cache
|
||||
*
|
||||
* Suspend the ICache or DCache or both,suspends the CPU access to cache for a while, without invalidation.
|
||||
* Suspend the ICache or DCache or both, of a certain level or all levels.
|
||||
* This API suspends the CPU access to cache for a while, without invalidation.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @return Current status of corresponding Cache(s)
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s)
|
||||
*/
|
||||
void cache_hal_suspend(cache_type_t type);
|
||||
void cache_hal_suspend(cache_type_t type, uint32_t cache_level);
|
||||
|
||||
/**
|
||||
* @brief Resume cache
|
||||
* @brief Resume Cache
|
||||
*
|
||||
* Resume the ICache or DCache or both.
|
||||
* Resume the ICache or DCache or both, of a certain level or all levels.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s)
|
||||
*/
|
||||
void cache_hal_resume(cache_type_t type);
|
||||
void cache_hal_resume(cache_type_t type, uint32_t cache_level);
|
||||
|
||||
/**
|
||||
* @brief Check if corresponding cache is enabled or not
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s)
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
bool cache_hal_is_cache_enabled(cache_type_t type);
|
||||
bool cache_hal_is_cache_enabled(cache_type_t type, uint32_t cache_level);
|
||||
|
||||
/**
|
||||
* @brief Invalidate cache supported addr
|
||||
* @brief Invalidate Cache supported addr
|
||||
*
|
||||
* Invalidate a Cache item for either ICache or DCache.
|
||||
*
|
||||
* @param vaddr Start address of the region to be invalidated
|
||||
* @param size Size of the region to be invalidated
|
||||
*
|
||||
* @return True for valid address. No operation if invalid
|
||||
*/
|
||||
void cache_hal_invalidate_addr(uint32_t vaddr, uint32_t size);
|
||||
bool cache_hal_invalidate_addr(uint32_t vaddr, uint32_t size);
|
||||
|
||||
#if SOC_CACHE_WRITEBACK_SUPPORTED
|
||||
/**
|
||||
* @brief Writeback cache supported addr
|
||||
* @brief Writeback Cache supported addr
|
||||
*
|
||||
* Writeback the DCache item to external memory
|
||||
*
|
||||
* @param vaddr Start address of the region to writeback
|
||||
* @param size Size of the region to writeback
|
||||
*
|
||||
* @return True for valid address. No operation if invalid
|
||||
*/
|
||||
void cache_hal_writeback_addr(uint32_t vaddr, uint32_t size);
|
||||
bool cache_hal_writeback_addr(uint32_t vaddr, uint32_t size);
|
||||
#endif //#if SOC_CACHE_WRITEBACK_SUPPORTED
|
||||
|
||||
#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
/**
|
||||
* @brief Freeze cache
|
||||
* @brief Freeze Cache
|
||||
*
|
||||
* Freeze cache, CPU access to cache will be suspended, until the cache is unfrozen.
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s)
|
||||
*/
|
||||
void cache_hal_freeze(cache_type_t type);
|
||||
void cache_hal_freeze(cache_type_t type, uint32_t cache_level);
|
||||
|
||||
/**
|
||||
* @brief Unfreeze cache
|
||||
*
|
||||
* Unfreeze cache, CPU access to cache will be restored
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s)
|
||||
*/
|
||||
void cache_hal_unfreeze(cache_type_t type);
|
||||
void cache_hal_unfreeze(cache_type_t type, uint32_t cache_level);
|
||||
#endif //#if SOC_CACHE_FREEZE_SUPPORTED
|
||||
|
||||
/**
|
||||
* @brief Get cache line size, in bytes
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
*
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_level Level of the Cache(s) *
|
||||
* @return cache line size, in bytes
|
||||
*/
|
||||
uint32_t cache_hal_get_cache_line_size(cache_type_t type);
|
||||
uint32_t cache_hal_get_cache_line_size(cache_type_t type, uint32_t cache_level);
|
||||
|
||||
/**
|
||||
* @brief Get Cache level and the ID of the vaddr
|
||||
*
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
* @param out_level cache level
|
||||
* @param out_id cache id
|
||||
*
|
||||
* @return true for valid, false for invalid addr or null pointer
|
||||
*/
|
||||
bool cache_hal_vaddr_to_cache_level_id(uint32_t vaddr_start, uint32_t len, uint32_t *out_level, uint32_t *out_id);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
@ -20,11 +20,6 @@ void mmu_hal_init(void)
|
||||
ROM_Boot_Cache_Init();
|
||||
#endif
|
||||
|
||||
//TODO: IDF-7516
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
Cache_Invalidate_All(CACHE_MAP_L2_CACHE);
|
||||
#endif
|
||||
|
||||
mmu_ll_set_page_size(0, CONFIG_MMU_PAGE_SIZE);
|
||||
mmu_hal_unmap_all();
|
||||
}
|
||||
|
@ -195,11 +195,15 @@ config SOC_SHARED_IDCACHE_SUPPORTED
|
||||
bool
|
||||
default y
|
||||
|
||||
config SOC_CACHE_WRITEBACK_SUPPORTED
|
||||
bool
|
||||
default y
|
||||
|
||||
config SOC_CACHE_FREEZE_SUPPORTED
|
||||
bool
|
||||
default y
|
||||
|
||||
config SOC_CACHE_L2_SUPPORTED
|
||||
config SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE
|
||||
bool
|
||||
default y
|
||||
|
||||
|
@ -19,6 +19,11 @@ extern "C" {
|
||||
#define SOC_MMU_PAGE_SIZE 0x10000
|
||||
#endif
|
||||
|
||||
#define SOC_IRAM0_ADDRESS_LOW 0x4ff00000
|
||||
#define SOC_IRAM0_ADDRESS_HIGH 0x4ffc0000
|
||||
|
||||
#define SOC_DRAM0_ADDRESS_LOW SOC_IRAM0_ADDRESS_LOW
|
||||
#define SOC_DRAM0_ADDRESS_HIGH SOC_IRAM0_ADDRESS_HIGH
|
||||
|
||||
#define SOC_IRAM0_CACHE_ADDRESS_LOW 0x40000000
|
||||
#define SOC_IRAM0_CACHE_ADDRESS_HIGH 0x50000000
|
||||
|
@ -130,8 +130,9 @@
|
||||
|
||||
/*-------------------------- CACHE CAPS --------------------------------------*/
|
||||
#define SOC_SHARED_IDCACHE_SUPPORTED 1 //Shared Cache for both instructions and data
|
||||
#define SOC_CACHE_WRITEBACK_SUPPORTED 1
|
||||
#define SOC_CACHE_FREEZE_SUPPORTED 1
|
||||
#define SOC_CACHE_L2_SUPPORTED 1
|
||||
#define SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE 1
|
||||
|
||||
/*-------------------------- CPU CAPS ----------------------------------------*/
|
||||
#define SOC_CPU_CORES_NUM (2U)
|
||||
|
@ -360,17 +360,17 @@ void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
|
||||
|
||||
void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state)
|
||||
{
|
||||
cache_hal_suspend(CACHE_TYPE_ALL);
|
||||
cache_hal_suspend(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
|
||||
{
|
||||
cache_hal_resume(CACHE_TYPE_ALL);
|
||||
cache_hal_resume(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
bool IRAM_ATTR spi_flash_cache_enabled(void)
|
||||
{
|
||||
return cache_hal_is_cache_enabled(CACHE_TYPE_ALL);
|
||||
return cache_hal_is_cache_enabled(CACHE_TYPE_ALL, CACHE_LL_LEVEL_EXT_MEM);
|
||||
}
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32S2
|
||||
@ -918,15 +918,15 @@ void esp_config_l2_cache_mode(void)
|
||||
{
|
||||
cache_size_t cache_size;
|
||||
cache_line_size_t cache_line_size;
|
||||
#if CONFIG_ESP32P4_L2_CACHE_128KB
|
||||
#if CONFIG_CACHE_L2_CACHE_128KB
|
||||
cache_size = CACHE_SIZE_128K;
|
||||
#elif CONFIG_ESP32P4_L2_CACHE_256KB
|
||||
#elif CONFIG_CACHE_L2_CACHE_256KB
|
||||
cache_size = CACHE_SIZE_256K;
|
||||
#else
|
||||
cache_size = CACHE_SIZE_512K;
|
||||
#endif
|
||||
|
||||
#if CONFIG_ESP32P4_L2_CACHE_LINE_64B
|
||||
#if CONFIG_CACHE_L2_CACHE_LINE_64B
|
||||
cache_line_size = CACHE_LINE_SIZE_64B;
|
||||
#else
|
||||
cache_line_size = CACHE_LINE_SIZE_128B;
|
||||
|
@ -30,9 +30,6 @@
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
#include "esp_private/esp_cache_esp32_private.h"
|
||||
#elif CONFIG_IDF_TARGET_ESP32P4
|
||||
//TODO: IDF-7516
|
||||
#include "esp32p4/rom/cache.h"
|
||||
#endif
|
||||
|
||||
#include "esp_private/cache_utils.h"
|
||||
@ -377,12 +374,7 @@ IRAM_ATTR bool spi_flash_check_and_flush_cache(size_t start_addr, size_t length)
|
||||
return true;
|
||||
#else // CONFIG_IDF_TARGET_ESP32
|
||||
if (vaddr != NULL) {
|
||||
#if CONFIG_IDF_TARGET_ESP32P4
|
||||
//TODO: IDF-7516
|
||||
Cache_Invalidate_Addr(CACHE_MAP_L1_DCACHE | CACHE_MAP_L2_CACHE, (uint32_t)vaddr, SPI_FLASH_MMU_PAGE_SIZE);
|
||||
#else
|
||||
cache_hal_invalidate_addr((uint32_t)vaddr, SPI_FLASH_MMU_PAGE_SIZE);
|
||||
#endif
|
||||
ret = true;
|
||||
}
|
||||
#endif // CONFIG_IDF_TARGET_ESP32
|
||||
|
Loading…
Reference in New Issue
Block a user