mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
coredump: custom core dump stack is now supported on Xtensa targets
This commit is contained in:
parent
490216a2ac
commit
eb66430793
@ -23,23 +23,39 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/**
|
||||
* @brief Define the type that will be used to describe the current context when
|
||||
* doing a backup of the current stack. This same structure is used to restore the stack.
|
||||
*/
|
||||
typedef struct {
|
||||
uint32_t sp;
|
||||
} core_dump_stack_context_t;
|
||||
|
||||
/**
|
||||
* @brief Set the stack pointer to the address passed as a parameter.
|
||||
* @note This function must be inlined.
|
||||
*
|
||||
* @param new_sp New stack pointer to set in sp register.
|
||||
*
|
||||
* @return Former stack pointer address (sp register value).
|
||||
* @param old_ctx CPU context, related to SP, to fill. It will be given back when restoring SP.
|
||||
*/
|
||||
FORCE_INLINE_ATTR void* esp_core_dump_replace_sp(void* new_sp)
|
||||
FORCE_INLINE_ATTR void esp_core_dump_replace_sp(void* new_sp, core_dump_stack_context_t* old_ctx)
|
||||
{
|
||||
void* current_sp = NULL;
|
||||
asm volatile ("mv %0, sp \n\t\
|
||||
mv sp, %1 \n\t\
|
||||
"
|
||||
: "=&r"(current_sp)
|
||||
: "=&r"(old_ctx->sp)
|
||||
: "r"(new_sp));
|
||||
return current_sp;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @brief Restore the stack pointer that was returned when calling `esp_core_dump_replace_sp()` function.
|
||||
*
|
||||
* @param ctx CPU context, related to SP, to restore.
|
||||
*/
|
||||
FORCE_INLINE_ATTR void esp_core_dump_restore_sp(core_dump_stack_context_t* old_ctx)
|
||||
{
|
||||
asm volatile ("mv sp, %0 \n\t" :: "r"(old_ctx->sp));
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@ -33,23 +33,75 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/**
|
||||
* @brief Define the type that will be used to describe the current context when
|
||||
* doing a backup of the current stack. This same structure is used to restore the stack.
|
||||
*/
|
||||
typedef struct {
|
||||
uint32_t sp;
|
||||
uint32_t a0;
|
||||
uint32_t ps;
|
||||
uint32_t windowbase;
|
||||
uint32_t windowstart;
|
||||
} core_dump_stack_context_t;
|
||||
|
||||
/**
|
||||
* @brief Set the stack pointer to the address passed as a parameter.
|
||||
* @note This function must be inlined.
|
||||
*
|
||||
* @param new_sp New stack pointer to set in sp register.
|
||||
*
|
||||
* @return Former stack pointer address (sp register value).
|
||||
* @param old_ctx CPU context, related to SP, to fill. It will be given back when restoring SP.
|
||||
*/
|
||||
FORCE_INLINE_ATTR void* esp_core_dump_replace_sp(void* new_sp)
|
||||
FORCE_INLINE_ATTR void esp_core_dump_replace_sp(void* new_sp, core_dump_stack_context_t* old_ctx)
|
||||
{
|
||||
void* current_sp = NULL;
|
||||
asm volatile ("mov %0, sp \n\t\
|
||||
"
|
||||
: "=&r"(current_sp)
|
||||
:);
|
||||
/* We have to spill all the windows to the stack first as the new stack pointer
|
||||
* represents a clean new environment. */
|
||||
xthal_window_spill();
|
||||
|
||||
/* Backup the special registers PS, WindowBase and WindowStart. We will need to restore them later */
|
||||
asm volatile ("mov %0, sp \n" \
|
||||
"mov %1, a0 \n" \
|
||||
"rsr.ps %2 \n"\
|
||||
"rsr.windowbase %3 \n"\
|
||||
"rsr.windowstart %4 \n"\
|
||||
: "=r"(old_ctx->sp),
|
||||
"=r"(old_ctx->a0),
|
||||
"=r"(old_ctx->ps),
|
||||
"=r"(old_ctx->windowbase),
|
||||
"=r"(old_ctx->windowstart) :);
|
||||
|
||||
/* Set the new stack */
|
||||
SET_STACK(new_sp);
|
||||
return current_sp;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @brief Restore the stack pointer that was returned when calling `esp_core_dump_replace_sp()` function.
|
||||
*
|
||||
* @param ctx CPU context, related to SP, to restore.
|
||||
*/
|
||||
FORCE_INLINE_ATTR void esp_core_dump_restore_sp(core_dump_stack_context_t* old_ctx)
|
||||
{
|
||||
/* Start by disabling WindowOverflowEnable bit from PS to make sure we won't get a Window Overflow exception
|
||||
* restoring WindowBase and WindowStart registers */
|
||||
const uint32_t ps_woe = old_ctx->ps & ~(PS_WOE_MASK);
|
||||
asm volatile ( \
|
||||
"wsr.ps %0 \n"\
|
||||
"rsync \n"\
|
||||
"wsr.windowbase %1 \n"\
|
||||
"rsync \n"\
|
||||
"wsr.windowstart %2 \n"\
|
||||
"rsync \n"\
|
||||
"mov sp, %3 \n" \
|
||||
"mov a0, %4 \n" \
|
||||
"wsr.ps %5 \n"\
|
||||
"rsync \n"\
|
||||
:: "r"(ps_woe),
|
||||
"r"(old_ctx->windowbase),
|
||||
"r"(old_ctx->windowstart),
|
||||
"r"(old_ctx->sp),
|
||||
"r"(old_ctx->a0),
|
||||
"r"(old_ctx->ps));
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@ -57,7 +57,7 @@ extern int _coredump_rtc_fast_end;
|
||||
|
||||
static uint8_t s_coredump_stack[ESP_COREDUMP_STACK_SIZE];
|
||||
static uint8_t* s_core_dump_sp = NULL;
|
||||
static uint8_t* s_core_dump_backup = NULL;
|
||||
static core_dump_stack_context_t s_stack_context;
|
||||
|
||||
/**
|
||||
* @brief Function setting up the core dump stack.
|
||||
@ -77,9 +77,9 @@ FORCE_INLINE_ATTR void esp_core_dump_setup_stack(void)
|
||||
/* Replace the stack pointer depending on the architecture, but save the
|
||||
* current stack pointer, in order to be able too restore it later.
|
||||
* This function must be inlined. */
|
||||
s_core_dump_backup = esp_core_dump_replace_sp(s_core_dump_sp);
|
||||
esp_core_dump_replace_sp(s_core_dump_sp, &s_stack_context);
|
||||
ESP_COREDUMP_LOGI("Backing up stack @ %p and use core dump stack @ %p",
|
||||
s_core_dump_backup, esp_cpu_get_sp());
|
||||
s_stack_context.sp, esp_cpu_get_sp());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -111,8 +111,8 @@ FORCE_INLINE_ATTR void esp_core_dump_report_stack_usage(void)
|
||||
s_core_dump_sp - s_coredump_stack - bytes_free, bytes_free);
|
||||
|
||||
/* Restore the stack pointer. */
|
||||
ESP_COREDUMP_LOGI("Restoring stack @ %p", s_core_dump_backup);
|
||||
esp_core_dump_replace_sp(s_core_dump_backup);
|
||||
ESP_COREDUMP_LOGI("Restoring stack @ %p", s_stack_context.sp);
|
||||
esp_core_dump_restore_sp(&s_stack_context);
|
||||
}
|
||||
|
||||
#else
|
||||
|
@ -84,11 +84,11 @@
|
||||
do { \
|
||||
uint32_t sp = (uint32_t)new_sp - SAVE_AREA_OFFSET; \
|
||||
*(uint32_t*)(sp - BASE_AREA_SP_OFFSET) = (uint32_t)new_sp; \
|
||||
const uint32_t mask = ~(PS_WOE_MASK | PS_OWB_MASK | PS_CALLINC_MASK); \
|
||||
uint32_t tmp1 = 0, tmp2 = 0; \
|
||||
asm volatile ( \
|
||||
"rsr.ps %1 \n"\
|
||||
"movi %2, ~" XTSTR( PS_WOE_MASK | PS_OWB_MASK | PS_CALLINC_MASK ) " \n"\
|
||||
"and %1, %1, %2 \n"\
|
||||
"and %1, %1, %3 \n"\
|
||||
"wsr.ps %1 \n"\
|
||||
"rsync \n"\
|
||||
" \n"\
|
||||
@ -99,6 +99,7 @@
|
||||
"wsr.windowstart %1 \n"\
|
||||
"rsync \n"\
|
||||
" \n"\
|
||||
"movi a0, 0\n" \
|
||||
"mov sp, %0 \n"\
|
||||
"rsr.ps %1 \n"\
|
||||
" \n"\
|
||||
@ -107,6 +108,6 @@
|
||||
"or %1, %1, %2 \n"\
|
||||
"wsr.ps %1 \n"\
|
||||
"rsync \n"\
|
||||
: "+r"(sp), "+r"(tmp1), "+r"(tmp2)); \
|
||||
: "+r"(sp), "+r"(tmp1), "+r"(tmp2) : "r"(mask)); \
|
||||
} while (0);
|
||||
#endif // __ASSEMBLER__
|
||||
|
Loading…
Reference in New Issue
Block a user