2019-04-18 10:13:05 -04:00
|
|
|
#include "hal/spi_slave_hal.h"
|
|
|
|
#include "hal/spi_ll.h"
|
2023-08-31 07:17:40 -04:00
|
|
|
#include "soc/ext_mem_defs.h"
|
2020-09-23 09:01:13 -04:00
|
|
|
#include "soc/soc_caps.h"
|
|
|
|
|
2020-09-08 05:05:49 -04:00
|
|
|
//This GDMA related part will be introduced by GDMA dedicated APIs in the future. Here we temporarily use macros.
|
2023-08-31 07:17:40 -04:00
|
|
|
#if SOC_GDMA_SUPPORTED
|
|
|
|
#if (SOC_GDMA_TRIG_PERIPH_SPI2_BUS == SOC_GDMA_BUS_AHB) && (SOC_AHB_GDMA_VERSION == 1)
|
2020-09-23 09:01:13 -04:00
|
|
|
#include "soc/gdma_struct.h"
|
|
|
|
#include "hal/gdma_ll.h"
|
2021-01-27 08:56:16 -05:00
|
|
|
#define spi_dma_ll_rx_reset(dev, chan) gdma_ll_rx_reset_channel(&GDMA, chan)
|
|
|
|
#define spi_dma_ll_tx_reset(dev, chan) gdma_ll_tx_reset_channel(&GDMA, chan);
|
|
|
|
#define spi_dma_ll_rx_start(dev, chan, addr) do {\
|
|
|
|
gdma_ll_rx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
|
|
|
|
gdma_ll_rx_start(&GDMA, chan);\
|
2020-09-23 09:01:13 -04:00
|
|
|
} while (0)
|
2021-01-27 08:56:16 -05:00
|
|
|
#define spi_dma_ll_tx_start(dev, chan, addr) do {\
|
|
|
|
gdma_ll_tx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
|
|
|
|
gdma_ll_tx_start(&GDMA, chan);\
|
2020-09-23 09:01:13 -04:00
|
|
|
} while (0)
|
2023-08-31 07:17:40 -04:00
|
|
|
|
|
|
|
#elif (SOC_GDMA_TRIG_PERIPH_SPI2_BUS == SOC_GDMA_BUS_AXI) //TODO: IDF-6152, refactor spi hal layer
|
|
|
|
#include "hal/axi_dma_ll.h"
|
|
|
|
#define spi_dma_ll_rx_reset(dev, chan) axi_dma_ll_rx_reset_channel(&AXI_DMA, chan)
|
|
|
|
#define spi_dma_ll_tx_reset(dev, chan) axi_dma_ll_tx_reset_channel(&AXI_DMA, chan);
|
|
|
|
#define spi_dma_ll_rx_start(dev, chan, addr) do {\
|
|
|
|
axi_dma_ll_rx_set_desc_addr(&AXI_DMA, chan, (uint32_t)addr);\
|
|
|
|
axi_dma_ll_rx_start(&AXI_DMA, chan);\
|
|
|
|
} while (0)
|
|
|
|
#define spi_dma_ll_tx_start(dev, chan, addr) do {\
|
|
|
|
axi_dma_ll_tx_set_desc_addr(&AXI_DMA, chan, (uint32_t)addr);\
|
|
|
|
axi_dma_ll_tx_start(&AXI_DMA, chan);\
|
|
|
|
} while (0)
|
2020-09-23 09:01:13 -04:00
|
|
|
#endif
|
2023-08-31 07:17:40 -04:00
|
|
|
#endif //SOC_GDMA_SUPPORTED
|
2019-04-18 10:13:05 -04:00
|
|
|
|
|
|
|
bool spi_slave_hal_usr_is_done(spi_slave_hal_context_t* hal)
|
|
|
|
{
|
|
|
|
return spi_ll_usr_is_done(hal->hw);
|
|
|
|
}
|
|
|
|
|
|
|
|
void spi_slave_hal_user_start(const spi_slave_hal_context_t *hal)
|
|
|
|
{
|
|
|
|
spi_ll_clear_int_stat(hal->hw); //clear int bit
|
2023-07-04 21:46:21 -04:00
|
|
|
spi_ll_user_start(hal->hw);
|
2019-04-18 10:13:05 -04:00
|
|
|
}
|
|
|
|
|
2023-09-01 05:51:54 -04:00
|
|
|
#if SOC_NON_CACHEABLE_OFFSET
|
|
|
|
#define ADDR_DMA_2_CPU(addr) ((typeof(addr))((uint32_t)(addr) + SOC_NON_CACHEABLE_OFFSET))
|
|
|
|
#define ADDR_CPU_2_DMA(addr) ((typeof(addr))((uint32_t)(addr) - SOC_NON_CACHEABLE_OFFSET))
|
|
|
|
#else
|
|
|
|
#define ADDR_DMA_2_CPU(addr) (addr)
|
|
|
|
#define ADDR_CPU_2_DMA(addr) (addr)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static void s_spi_slave_hal_dma_desc_setup_link(spi_dma_desc_t *dmadesc, const void *data, int len, bool is_rx)
|
|
|
|
{
|
|
|
|
dmadesc = ADDR_DMA_2_CPU(dmadesc);
|
|
|
|
int n = 0;
|
|
|
|
while (len) {
|
|
|
|
int dmachunklen = len;
|
|
|
|
if (dmachunklen > DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED) {
|
|
|
|
dmachunklen = DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED;
|
|
|
|
}
|
|
|
|
if (is_rx) {
|
|
|
|
//Receive needs DMA length rounded to next 32-bit boundary
|
|
|
|
dmadesc[n].dw0.size = (dmachunklen + 3) & (~3);
|
|
|
|
} else {
|
|
|
|
dmadesc[n].dw0.size = dmachunklen;
|
|
|
|
dmadesc[n].dw0.length = dmachunklen;
|
|
|
|
}
|
|
|
|
dmadesc[n].buffer = (uint8_t *)data;
|
|
|
|
dmadesc[n].dw0.suc_eof = 0;
|
|
|
|
dmadesc[n].dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA;
|
|
|
|
dmadesc[n].next = ADDR_CPU_2_DMA(&dmadesc[n + 1]);
|
|
|
|
len -= dmachunklen;
|
|
|
|
data += dmachunklen;
|
|
|
|
n++;
|
|
|
|
}
|
|
|
|
dmadesc[n - 1].dw0.suc_eof = 1; //Mark last DMA desc as end of stream.
|
|
|
|
dmadesc[n - 1].next = NULL;
|
|
|
|
}
|
|
|
|
|
2019-04-18 10:13:05 -04:00
|
|
|
void spi_slave_hal_prepare_data(const spi_slave_hal_context_t *hal)
|
|
|
|
{
|
|
|
|
if (hal->use_dma) {
|
|
|
|
|
|
|
|
//Fill DMA descriptors
|
2020-11-10 02:40:01 -05:00
|
|
|
if (hal->rx_buffer) {
|
2023-09-01 05:51:54 -04:00
|
|
|
s_spi_slave_hal_dma_desc_setup_link(hal->dmadesc_rx, hal->rx_buffer, ((hal->bitlen + 7) / 8), true);
|
2020-09-14 05:33:10 -04:00
|
|
|
|
|
|
|
//reset dma inlink, this should be reset before spi related reset
|
2021-01-27 08:56:16 -05:00
|
|
|
spi_dma_ll_rx_reset(hal->dma_in, hal->rx_dma_chan);
|
2020-11-26 00:06:21 -05:00
|
|
|
spi_ll_dma_rx_fifo_reset(hal->dma_in);
|
2020-11-10 02:40:01 -05:00
|
|
|
spi_ll_slave_reset(hal->hw);
|
2020-09-14 05:33:10 -04:00
|
|
|
spi_ll_infifo_full_clr(hal->hw);
|
|
|
|
|
|
|
|
spi_ll_dma_rx_enable(hal->hw, 1);
|
2023-09-01 05:51:54 -04:00
|
|
|
spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, (lldesc_t *)hal->dmadesc_rx);
|
2019-04-18 10:13:05 -04:00
|
|
|
}
|
|
|
|
if (hal->tx_buffer) {
|
2023-09-01 05:51:54 -04:00
|
|
|
s_spi_slave_hal_dma_desc_setup_link(hal->dmadesc_tx, hal->tx_buffer, (hal->bitlen + 7) / 8, false);
|
2023-08-31 07:17:40 -04:00
|
|
|
|
2020-09-14 05:33:10 -04:00
|
|
|
//reset dma outlink, this should be reset before spi related reset
|
2021-01-27 08:56:16 -05:00
|
|
|
spi_dma_ll_tx_reset(hal->dma_out, hal->tx_dma_chan);
|
2020-11-26 00:06:21 -05:00
|
|
|
spi_ll_dma_tx_fifo_reset(hal->dma_out);
|
2020-11-10 02:40:01 -05:00
|
|
|
spi_ll_slave_reset(hal->hw);
|
2020-09-14 05:33:10 -04:00
|
|
|
spi_ll_outfifo_empty_clr(hal->hw);
|
|
|
|
|
2020-11-10 02:40:01 -05:00
|
|
|
spi_ll_dma_tx_enable(hal->hw, 1);
|
2023-09-01 05:51:54 -04:00
|
|
|
spi_dma_ll_tx_start(hal->dma_out, hal->tx_dma_chan, (lldesc_t *)hal->dmadesc_tx);
|
2019-04-18 10:13:05 -04:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
//No DMA. Turn off SPI and copy data to transmit buffers.
|
|
|
|
if (hal->tx_buffer) {
|
2020-09-14 05:33:10 -04:00
|
|
|
spi_ll_slave_reset(hal->hw);
|
2019-04-18 10:13:05 -04:00
|
|
|
spi_ll_write_buffer(hal->hw, hal->tx_buffer, hal->bitlen);
|
|
|
|
}
|
2020-11-10 02:40:01 -05:00
|
|
|
|
2020-11-26 00:06:21 -05:00
|
|
|
spi_ll_cpu_tx_fifo_reset(hal->hw);
|
2019-04-18 10:13:05 -04:00
|
|
|
}
|
2020-11-10 02:40:01 -05:00
|
|
|
|
2019-04-18 10:13:05 -04:00
|
|
|
spi_ll_slave_set_rx_bitlen(hal->hw, hal->bitlen);
|
|
|
|
spi_ll_slave_set_tx_bitlen(hal->hw, hal->bitlen);
|
2020-09-14 05:33:10 -04:00
|
|
|
|
2021-05-31 18:25:59 -04:00
|
|
|
#ifdef CONFIG_IDF_TARGET_ESP32
|
|
|
|
//SPI Slave mode on ESP32 requires MOSI/MISO enable
|
2021-05-21 10:15:55 -04:00
|
|
|
spi_ll_enable_mosi(hal->hw, (hal->rx_buffer == NULL) ? 0 : 1);
|
|
|
|
spi_ll_enable_miso(hal->hw, (hal->tx_buffer == NULL) ? 0 : 1);
|
2021-05-31 18:25:59 -04:00
|
|
|
#endif
|
2019-04-18 10:13:05 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
void spi_slave_hal_store_result(spi_slave_hal_context_t *hal)
|
|
|
|
{
|
|
|
|
//when data of cur_trans->length are all sent, the slv_rdata_bit
|
|
|
|
//will be the length sent-1 (i.e. cur_trans->length-1 ), otherwise
|
|
|
|
//the length sent.
|
|
|
|
hal->rcv_bitlen = spi_ll_slave_get_rcv_bitlen(hal->hw);
|
|
|
|
if (hal->rcv_bitlen == hal->bitlen - 1) {
|
|
|
|
hal->rcv_bitlen++;
|
|
|
|
}
|
|
|
|
if (!hal->use_dma && hal->rx_buffer) {
|
|
|
|
//Copy result out
|
|
|
|
spi_ll_read_buffer(hal->hw, hal->rx_buffer, hal->bitlen);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t spi_slave_hal_get_rcv_bitlen(spi_slave_hal_context_t *hal)
|
|
|
|
{
|
|
|
|
return hal->rcv_bitlen;
|
|
|
|
}
|
|
|
|
|
2022-09-15 06:26:12 -04:00
|
|
|
#if CONFIG_IDF_TARGET_ESP32
|
|
|
|
//This workaround is only for esp32
|
2019-04-18 10:13:05 -04:00
|
|
|
bool spi_slave_hal_dma_need_reset(const spi_slave_hal_context_t *hal)
|
|
|
|
{
|
|
|
|
bool ret;
|
|
|
|
ret = false;
|
|
|
|
if (hal->use_dma && hal->rx_buffer) {
|
|
|
|
int i;
|
|
|
|
//In case CS goes high too soon, the transfer is aborted while the DMA channel still thinks it's going. This
|
|
|
|
//leads to issues later on, so in that case we need to reset the channel. The state can be detected because
|
|
|
|
//the DMA system doesn't give back the offending descriptor; the owner is still set to DMA.
|
2023-09-01 05:51:54 -04:00
|
|
|
for (i = 0; hal->dmadesc_rx[i].dw0.suc_eof == 0 && hal->dmadesc_rx[i].dw0.owner == 0; i++) {}
|
|
|
|
if (hal->dmadesc_rx[i].dw0.owner) {
|
2019-04-18 10:13:05 -04:00
|
|
|
ret = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ret;
|
2020-09-08 05:05:49 -04:00
|
|
|
}
|
2022-09-15 06:26:12 -04:00
|
|
|
#endif //#if CONFIG_IDF_TARGET_ESP32
|