Merge branch 'bugfix/fix_rtc_us_to_cycle_div_zero_in_deepsleep_v5.2' into 'release/v5.2'

fix(esp_hw_support/sleep): fix rtc_time_us_to_slowclk div zero in deepsleep process (backport v5.2)

See merge request espressif/esp-idf!27763
This commit is contained in:
Jiang Jiang Jian 2023-12-11 10:57:19 +08:00
commit a2b96227ac
9 changed files with 63 additions and 31 deletions

View File

@ -112,6 +112,7 @@ static uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cyc
uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
@ -127,6 +128,7 @@ static inline bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slo
uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
@ -142,6 +144,7 @@ uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -126,6 +126,7 @@ uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
@ -141,6 +142,7 @@ static inline bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slo
uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
@ -156,6 +158,7 @@ uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -129,6 +129,7 @@ uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
@ -144,6 +145,7 @@ static bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slowclk_cy
uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
@ -159,6 +161,7 @@ uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -210,6 +210,7 @@ static bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slowclk_cy
uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
/*The Fosc CLK of calibration circuit is divided by 32 for ECO1.
@ -235,6 +236,7 @@ uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -209,6 +209,7 @@ static bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slowclk_cy
uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
/*The Fosc CLK of calibration circuit is divided by 32 for ECO2.
@ -234,6 +235,7 @@ uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -181,6 +181,7 @@ static bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slowclk_cy
uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
@ -196,6 +197,7 @@ uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -123,6 +123,7 @@ static uint32_t rtc_clk_cal_internal_cycling(rtc_cal_sel_t cal_clk, uint32_t slo
*/
static uint32_t rtc_clk_xtal_to_slowclk(uint64_t xtal_cycles, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
uint64_t divider = ((uint64_t)xtal_freq) * slowclk_cycles;
uint64_t period_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT) + divider / 2 - 1) / divider;
@ -191,6 +192,7 @@ uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles, ui
uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_ONEOFF_MODE);
uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
@ -224,6 +226,7 @@ uint32_t rtc_clk_cal_cycling(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -128,6 +128,7 @@ uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
@ -143,6 +144,7 @@ static inline bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slo
uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
{
assert(slowclk_cycles);
rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
@ -158,6 +160,7 @@ uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
{
assert(period);
/* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
* TODO: fix overflow.
*/

View File

@ -592,6 +592,44 @@ FORCE_INLINE_ATTR void misc_modules_wake_prepare(void)
#endif
}
static IRAM_ATTR void sleep_low_power_clock_calibration(bool is_dslp)
{
// Calibrate rtc slow clock
#ifdef CONFIG_ESP_SYSTEM_RTC_EXT_XTAL
if (rtc_clk_slow_src_get() == SOC_RTC_SLOW_CLK_SRC_XTAL32K) {
uint64_t time_per_us = 1000000ULL;
s_config.rtc_clk_cal_period = (time_per_us << RTC_CLK_CAL_FRACT) / rtc_clk_slow_freq_get_hz();
} else {
// If the external 32 kHz XTAL does not exist, use the internal 150 kHz RC oscillator
// as the RTC slow clock source.
s_config.rtc_clk_cal_period = rtc_clk_cal(RTC_CAL_RTC_MUX, RTC_CLK_SRC_CAL_CYCLES);
esp_clk_slowclk_cal_set(s_config.rtc_clk_cal_period);
}
#elif CONFIG_RTC_CLK_SRC_INT_RC && CONFIG_IDF_TARGET_ESP32S2
s_config.rtc_clk_cal_period = rtc_clk_cal_cycling(RTC_CAL_RTC_MUX, RTC_CLK_SRC_CAL_CYCLES);
esp_clk_slowclk_cal_set(s_config.rtc_clk_cal_period);
#else
#if CONFIG_PM_ENABLE
if ((s_lightsleep_cnt % CONFIG_PM_LIGHTSLEEP_RTC_OSC_CAL_INTERVAL == 0) || is_dslp)
#endif
{
s_config.rtc_clk_cal_period = rtc_clk_cal(RTC_CAL_RTC_MUX, RTC_CLK_SRC_CAL_CYCLES);
esp_clk_slowclk_cal_set(s_config.rtc_clk_cal_period);
}
#endif
// Calibrate rtc fast clock, only PMU supported chips sleep process is needed.
#if SOC_PMU_SUPPORTED
#if CONFIG_PM_ENABLE
if ((s_lightsleep_cnt % CONFIG_PM_LIGHTSLEEP_RTC_OSC_CAL_INTERVAL == 0) || is_dslp)
#endif
{
s_config.fast_clk_cal_period = rtc_clk_cal(RTC_CAL_RC_FAST, FAST_CLK_SRC_CAL_CYCLES);
}
#endif
}
inline static uint32_t call_rtc_sleep_start(uint32_t reject_triggers, uint32_t lslp_mem_inf_fpu, bool dslp);
static esp_err_t IRAM_ATTR esp_sleep_start(uint32_t pd_flags, esp_sleep_mode_t mode, bool allow_sleep_rejection)
@ -907,7 +945,8 @@ static esp_err_t IRAM_ATTR deep_sleep_start(bool allow_sleep_rejection)
// Decide which power domains can be powered down
uint32_t pd_flags = get_power_down_flags();
s_config.rtc_clk_cal_period = esp_clk_slowclk_cal_get();
// Re-calibrate the RTC clock
sleep_low_power_clock_calibration(true);
// Correct the sleep time
s_config.sleep_time_adjustment = DEEP_SLEEP_TIME_OVERHEAD_US;
@ -1093,29 +1132,8 @@ esp_err_t esp_light_sleep_start(void)
pd_flags &= ~RTC_SLEEP_PD_RTC_PERIPH;
#endif
// Re-calibrate the RTC Timer clock
#ifdef CONFIG_ESP_SYSTEM_RTC_EXT_XTAL
if (rtc_clk_slow_src_get() == SOC_RTC_SLOW_CLK_SRC_XTAL32K) {
uint64_t time_per_us = 1000000ULL;
s_config.rtc_clk_cal_period = (time_per_us << RTC_CLK_CAL_FRACT) / rtc_clk_slow_freq_get_hz();
} else {
// If the external 32 kHz XTAL does not exist, use the internal 150 kHz RC oscillator
// as the RTC slow clock source.
s_config.rtc_clk_cal_period = rtc_clk_cal(RTC_CAL_RTC_MUX, RTC_CLK_SRC_CAL_CYCLES);
esp_clk_slowclk_cal_set(s_config.rtc_clk_cal_period);
}
#elif CONFIG_RTC_CLK_SRC_INT_RC && CONFIG_IDF_TARGET_ESP32S2
s_config.rtc_clk_cal_period = rtc_clk_cal_cycling(RTC_CAL_RTC_MUX, RTC_CLK_SRC_CAL_CYCLES);
esp_clk_slowclk_cal_set(s_config.rtc_clk_cal_period);
#else
#if CONFIG_PM_ENABLE
if (s_lightsleep_cnt % CONFIG_PM_LIGHTSLEEP_RTC_OSC_CAL_INTERVAL == 0)
#endif
{
s_config.rtc_clk_cal_period = rtc_clk_cal(RTC_CAL_RTC_MUX, RTC_CLK_SRC_CAL_CYCLES);
esp_clk_slowclk_cal_set(s_config.rtc_clk_cal_period);
}
#endif
// Re-calibrate the RTC clock
sleep_low_power_clock_calibration(false);
/*
* Adjustment time consists of parts below:
@ -1124,14 +1142,7 @@ esp_err_t esp_light_sleep_start(void)
* 3. Code execution time when clock is not stable;
* 4. Code execution time which can be measured;
*/
#if SOC_PMU_SUPPORTED
#if CONFIG_PM_ENABLE
if (s_lightsleep_cnt % CONFIG_PM_LIGHTSLEEP_RTC_OSC_CAL_INTERVAL == 0)
#endif
{
s_config.fast_clk_cal_period = rtc_clk_cal(RTC_CAL_RC_FAST, FAST_CLK_SRC_CAL_CYCLES);
}
int sleep_time_sw_adjustment = LIGHT_SLEEP_TIME_OVERHEAD_US + sleep_time_overhead_in + s_config.sleep_time_overhead_out;
int sleep_time_hw_adjustment = pmu_sleep_calculate_hw_wait_time(pd_flags, s_config.rtc_clk_cal_period, s_config.fast_clk_cal_period);
s_config.sleep_time_adjustment = sleep_time_sw_adjustment + sleep_time_hw_adjustment;