spi_master: new segmented-configure-transfer mode

This commit is contained in:
Armando 2022-06-24 19:01:51 +08:00 committed by wanlei
parent a7355d3aba
commit b303e4b7a6
20 changed files with 2319 additions and 60 deletions

View File

@ -11,6 +11,7 @@
#include "hal/spi_types.h"
//for spi_bus_initialization functions. to be back-compatible
#include "driver/spi_common.h"
#include "soc/soc_caps.h"
/**
* @brief SPI common used frequency (in Hz)
@ -157,6 +158,47 @@ typedef struct {
uint8_t dummy_bits; ///< The dummy length in this transaction, in bits.
} spi_transaction_ext_t ;
#if SOC_SPI_SCT_SUPPORTED
/**
* @Backgrounds: `SCT Mode`
* Segmented-Configure-Transfer Mode
*
* In this mode, you could pre-configure multiple SPI transactions.
* - These whole transaction is called one `Segmented-Configure-Transaction` or one `SCT`.
* - Each of the transactions in one `SCT` is called one `Segment`.
*
* Per segment can have different SPI phase configurations
*/
/**
* SPI SCT Mode transaction flags
*/
#define SPI_SEG_TRANS_PREP_LEN_UPDATED (1<<0) ///< Use `spi_seg_transaction_t: cs_ena_pretrans` in this segment.
#define SPI_SEG_TRANS_CMD_LEN_UPDATED (1<<1) ///< Use `spi_seg_transaction_t: command_bits` in this segment.
#define SPI_SEG_TRANS_ADDR_LEN_UPDATED (1<<2) ///< Use `spi_seg_transaction_t: address_bits` in this segment.
#define SPI_SEG_TRANS_DUMMY_LEN_UPDATED (1<<3) ///< Use `spi_seg_transaction_t: dummy_bits` in this segment.
#define SPI_SEG_TRANS_DONE_LEN_UPDATED (1<<4) ///< Use `spi_seg_transaction_t: cs_ena_posttrans` in this segment.
/**
* This struct is for SPI SCT (Segmented-Configure-Transfer) Mode.
*
* By default, length of each SPI Phase will not change per segment. Each segment will use the phase length you set when `spi_bus_add_device()`
* However, you could force a segment to use its custom phase length. To achieve this, set the `SPI_SEG_TRANS_XX` flags, to customize phase length.
*/
typedef struct {
struct spi_transaction_t base; ///< Transaction data, so that pointer to spi_transaction_t can be converted into spi_seg_transaction_t
uint8_t cs_ena_pretrans; ///< Amount of SPI bit-cycles the cs should be activated before the transmission
uint8_t cs_ena_posttrans; ///< Amount of SPI bit-cycles the cs should stay active after the transmission
uint8_t command_bits; ///< The command length in this transaction, in bits.
uint8_t address_bits; ///< The address length in this transaction, in bits.
uint8_t dummy_bits; ///< The dummy length in this transaction, in bits.
uint32_t seg_trans_flags; ///< SCT specific flags. See `SPI_SEG_TRANS_XXX` macros.
/**< Necessary buffer required by HW, don't touch this. >**/
uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX];
} spi_seg_transaction_t;
#endif //#if SOC_SPI_SCT_SUPPORTED
typedef struct spi_device_t *spi_device_handle_t; ///< Handle for a device on a SPI bus
/**
* @brief Allocate a device on a SPI bus
@ -258,6 +300,68 @@ esp_err_t spi_device_get_trans_result(spi_device_handle_t handle, spi_transactio
*/
esp_err_t spi_device_transmit(spi_device_handle_t handle, spi_transaction_t *trans_desc);
#if SOC_SPI_SCT_SUPPORTED
/**
* @brief Enable/Disable Segmented-Configure-Transfer (SCT) mode
*
* Search for `@Backgrounds: `SCT Mode`` in this header file to know what is SCT mode
*
* @note This API isn't thread safe. Besides, after enabling this, current SPI host will be switched into SCT mode.
* Therefore, never call this API when in multiple threads, or when an SPI transaction is ongoing (on this SPI host).
*
* @param handle Device handle obtained using spi_host_add_dev
* @param enable True: to enable SCT mode; False: to disable SCT mode
*
* @return
* - ESP_OK: On success
* - ESP_ERR_INVALID_ARG: Invalid arguments
* - ESP_ERR_INVALID_STATE: Invalid states, e.g.: an SPI polling transaction is ongoing, SPI internal Queue isn't empty, etc.
*/
esp_err_t spi_bus_segment_trans_mode_enable(spi_device_handle_t handle, bool enable);
/**
* @brief Queue an SPI Segmented-Configure-Transaction (SCT) list for interrupt transaction execution.
*
* Search for `@Backgrounds: `SCT Mode`` in this header file to know what is SCT mode
*
* @note After calling this API, call `spi_device_get_segment_trans_result` to get the transaction results.
*
* @param handle Device handle obtained using spi_host_add_dev
* @param seg_trans_desc Pointer to the transaction segments list head (a one-segment-list is also acceptable)
* @param seg_num Segment number
* @param ticks_to_wait Ticks to wait until there's room in the queue; use portMAX_DELAY to never time out.
*
* @return
* - ESP_OK: On success
* - ESP_ERR_INVALID_ARG: Invalid arguments
* - ESP_ERR_INVALID_STATE: Invalid states, e.g.: an SPI polling transaction is ongoing, SCT mode isn't enabled, DMA descriptors not enough, etc.
* - ESP_ERR_TIMEOUT: Timeout, this SCT transaction isn't queued successfully
*/
esp_err_t spi_device_queue_segment_trans(spi_device_handle_t handle, spi_seg_transaction_t *seg_trans_desc, uint32_t seg_num, TickType_t ticks_to_wait);
/**
* @brief Get the result of an SPI Segmented-Configure-Transaction (SCT).
*
* Search for `@Backgrounds: `SCT Mode`` in this header file to know what is SCT mode
*
* @note Until this API returns (with `ESP_OK`), you can now recycle the memory used for this SCT list (pointed by `seg_trans_desc`).
* You must maintain the SCT list related memory before this API returns, otherwise the SCT transaction may fail
*
* @param handle Device handle obtained using spi_host_add_dev
* @param[out] seg_trans_desc Pointer to the completed SCT list head (then you can recycle this list of memory).
* @param ticks_to_wait Ticks to wait until there's a returned item; use portMAX_DELAY to never time out.
*
* @return
* - ESP_OK: On success
* - ESP_ERR_INVALID_ARG: Invalid arguments
* - ESP_ERR_INVALID_STATE: Invalid states, e.g.: SCT mode isn't enabled, etc.
* - ESP_ERR_TIMEOUT: Timeout, didn't get a completed SCT transaction
*/
esp_err_t spi_device_get_segment_trans_result(spi_device_handle_t handle, spi_seg_transaction_t **seg_trans_desc, TickType_t ticks_to_wait);
#endif //#if SOC_SPI_SCT_SUPPORTED
/**
* @brief Immediately start a polling transaction.
*

View File

@ -140,23 +140,40 @@ typedef struct {
const uint32_t *buffer_to_send; //equals to tx_data, if SPI_TRANS_USE_RXDATA is applied; otherwise if original buffer wasn't in DMA-capable memory, this gets the address of a temporary buffer that is;
//otherwise sets to the original buffer or NULL if no buffer is assigned.
uint32_t *buffer_to_rcv; // similar to buffer_to_send
uint32_t dummy; //As we create the queue when in init, to use sct mode private descriptor as a queue item (when in sct mode), we need to add a dummy member here to keep the same size with `spi_sct_desc_priv_t`.
} spi_trans_priv_t;
#if SOC_SPI_SCT_SUPPORTED
//Type of dma descriptors that used under SPI SCT mode
typedef struct {
lldesc_t *tx_seg_head;
lldesc_t *rx_seg_head;
spi_seg_transaction_t *sct_trans_desc_head;
uint16_t tx_used_desc_num;
uint16_t rx_used_desc_num;
} spi_sct_desc_priv_t;
#endif
typedef struct {
int id;
spi_device_t* device[DEV_NUM_MAX];
intr_handle_t intr;
spi_hal_context_t hal;
spi_trans_priv_t cur_trans_buf;
#if SOC_SPI_SCT_SUPPORTED
spi_sct_desc_priv_t cur_sct_trans;
#endif
int cur_cs; //current device doing transaction
const spi_bus_attr_t* bus_attr;
const spi_dma_ctx_t *dma_ctx;
bool sct_mode_enabled;
/**
* the bus is permanently controlled by a device until `spi_bus_release_bus`` is called. Otherwise
* the acquiring of SPI bus will be freed when `spi_device_polling_end` is called.
*/
spi_device_t* device_acquiring_lock;
portMUX_TYPE spinlock;
//debug information
bool polling; //in process of a polling, avoid of queue new transactions into ISR
@ -237,6 +254,7 @@ static esp_err_t spi_master_init_driver(spi_host_device_t host_id)
.cur_cs = DEV_NUM_MAX,
.polling = false,
.device_acquiring_lock = NULL,
.spinlock = (portMUX_TYPE)portMUX_INITIALIZER_UNLOCKED,
.bus_attr = bus_attr,
.dma_ctx = dma_ctx,
};
@ -678,57 +696,61 @@ static void SPI_MASTER_ISR_ATTR s_spi_prepare_data(spi_device_t *dev, const spi_
spi_hal_enable_data_line(hal->hw, (!hal_dev->half_duplex && hal_trans->rcv_buffer) || hal_trans->send_buffer, !!hal_trans->rcv_buffer);
}
static void SPI_MASTER_ISR_ATTR spi_format_hal_trans_struct(spi_device_t *dev, spi_trans_priv_t *trans_buf, spi_hal_trans_config_t *hal_trans)
{
spi_host_t *host = dev->host;
spi_transaction_t *trans = trans_buf->trans;
hal_trans->tx_bitlen = trans->length;
hal_trans->rx_bitlen = trans->rxlength;
hal_trans->rcv_buffer = (uint8_t*)host->cur_trans_buf.buffer_to_rcv;
hal_trans->send_buffer = (uint8_t*)host->cur_trans_buf.buffer_to_send;
hal_trans->cmd = trans->cmd;
hal_trans->addr = trans->addr;
if (trans->flags & SPI_TRANS_VARIABLE_CMD) {
hal_trans->cmd_bits = ((spi_transaction_ext_t *)trans)->command_bits;
} else {
hal_trans->cmd_bits = dev->cfg.command_bits;
}
if (trans->flags & SPI_TRANS_VARIABLE_ADDR) {
hal_trans->addr_bits = ((spi_transaction_ext_t *)trans)->address_bits;
} else {
hal_trans->addr_bits = dev->cfg.address_bits;
}
if (trans->flags & SPI_TRANS_VARIABLE_DUMMY) {
hal_trans->dummy_bits = ((spi_transaction_ext_t *)trans)->dummy_bits;
} else {
hal_trans->dummy_bits = dev->cfg.dummy_bits;
}
hal_trans->cs_keep_active = (trans->flags & SPI_TRANS_CS_KEEP_ACTIVE) ? 1 : 0;
//Set up OIO/QIO/DIO if needed
hal_trans->line_mode.data_lines = (trans->flags & SPI_TRANS_MODE_DIO) ? 2 : (trans->flags & SPI_TRANS_MODE_QIO) ? 4 : 1;
#if SOC_SPI_SUPPORT_OCT
if (trans->flags & SPI_TRANS_MODE_OCT) {
hal_trans->line_mode.data_lines = 8;
}
#endif
hal_trans->line_mode.addr_lines = (trans->flags & SPI_TRANS_MULTILINE_ADDR) ? hal_trans->line_mode.data_lines : 1;
hal_trans->line_mode.cmd_lines = (trans->flags & SPI_TRANS_MULTILINE_CMD) ? hal_trans->line_mode.data_lines : 1;
}
// The function is called to send a new transaction, in ISR or in the task.
// Setup the transaction-specified registers and linked-list used by the DMA (or FIFO if DMA is not used)
static void SPI_MASTER_ISR_ATTR spi_new_trans(spi_device_t *dev, spi_trans_priv_t *trans_buf)
{
spi_transaction_t *trans = trans_buf->trans;
spi_host_t *host = dev->host;
spi_hal_context_t *hal = &(host->hal);
spi_hal_context_t *hal = &(dev->host->hal);
spi_hal_dev_config_t *hal_dev = &(dev->hal_dev);
host->cur_cs = dev->id;
dev->host->cur_cs = dev->id;
//Reconfigure according to device settings, the function only has effect when the dev_id is changed.
spi_setup_device(dev);
//set the transaction specific configuration each time before a transaction setup
spi_hal_trans_config_t hal_trans = {};
hal_trans.tx_bitlen = trans->length;
hal_trans.rx_bitlen = trans->rxlength;
hal_trans.rcv_buffer = (uint8_t*)trans_buf->buffer_to_rcv;
hal_trans.send_buffer = (uint8_t*)trans_buf->buffer_to_send;
hal_trans.cmd = trans->cmd;
hal_trans.addr = trans->addr;
hal_trans.cs_keep_active = (trans->flags & SPI_TRANS_CS_KEEP_ACTIVE) ? 1 : 0;
//Set up OIO/QIO/DIO if needed
hal_trans.line_mode.data_lines = (trans->flags & SPI_TRANS_MODE_DIO) ? 2 :
(trans->flags & SPI_TRANS_MODE_QIO) ? 4 : 1;
#if SOC_SPI_SUPPORT_OCT
if (trans->flags & SPI_TRANS_MODE_OCT) {
hal_trans.line_mode.data_lines = 8;
}
#endif
hal_trans.line_mode.addr_lines = (trans->flags & SPI_TRANS_MULTILINE_ADDR) ? hal_trans.line_mode.data_lines : 1;
hal_trans.line_mode.cmd_lines = (trans->flags & SPI_TRANS_MULTILINE_CMD) ? hal_trans.line_mode.data_lines : 1;
if (trans->flags & SPI_TRANS_VARIABLE_CMD) {
hal_trans.cmd_bits = ((spi_transaction_ext_t *)trans)->command_bits;
} else {
hal_trans.cmd_bits = dev->cfg.command_bits;
}
if (trans->flags & SPI_TRANS_VARIABLE_ADDR) {
hal_trans.addr_bits = ((spi_transaction_ext_t *)trans)->address_bits;
} else {
hal_trans.addr_bits = dev->cfg.address_bits;
}
if (trans->flags & SPI_TRANS_VARIABLE_DUMMY) {
hal_trans.dummy_bits = ((spi_transaction_ext_t *)trans)->dummy_bits;
} else {
hal_trans.dummy_bits = dev->cfg.dummy_bits;
}
spi_format_hal_trans_struct(dev, trans_buf, &hal_trans);
spi_hal_setup_trans(hal, hal_dev, &hal_trans);
s_spi_prepare_data(dev, &hal_trans);
@ -758,6 +780,41 @@ static void SPI_MASTER_ISR_ATTR spi_post_trans(spi_host_t *host)
host->cur_cs = DEV_NUM_MAX;
}
#if SOC_SPI_SCT_SUPPORTED
static void SPI_MASTER_ISR_ATTR spi_new_sct_trans(spi_device_t *dev, spi_sct_desc_priv_t *cur_sct_trans)
{
dev->host->cur_cs = dev->id;
//Reconfigure according to device settings, the function only has effect when the dev_id is changed.
spi_setup_device(dev);
spi_hal_sct_load_dma_link(&dev->host->hal, cur_sct_trans->rx_seg_head, cur_sct_trans->tx_seg_head);
if (dev->cfg.pre_cb) {
dev->cfg.pre_cb((spi_transaction_t *)cur_sct_trans->sct_trans_desc_head);
}
//Kick off transfer
spi_hal_user_start(&dev->host->hal);
}
static void SPI_MASTER_ISR_ATTR spi_post_sct_trans(spi_host_t *host)
{
if (host->cur_sct_trans.rx_seg_head == NULL) {
assert(host->cur_sct_trans.rx_used_desc_num == 0);
}
portENTER_CRITICAL_ISR(&host->spinlock);
spi_hal_sct_tx_dma_desc_recycle(&host->hal, host->cur_sct_trans.tx_used_desc_num);
spi_hal_sct_rx_dma_desc_recycle(&host->hal, host->cur_sct_trans.rx_used_desc_num);
portEXIT_CRITICAL_ISR(&host->spinlock);
if (host->device[host->cur_cs]->cfg.post_cb) {
host->device[host->cur_cs]->cfg.post_cb((spi_transaction_t *)host->cur_sct_trans.sct_trans_desc_head);
}
host->cur_cs = DEV_NUM_MAX;
}
#endif //#if SOC_SPI_SCT_SUPPORTED
// This is run in interrupt context.
static void SPI_MASTER_ISR_ATTR spi_intr(void *arg)
{
@ -769,7 +826,11 @@ static void SPI_MASTER_ISR_ATTR spi_intr(void *arg)
const spi_dma_ctx_t *dma_ctx = host->dma_ctx;
#endif
#if SOC_SPI_SCT_SUPPORTED
assert(spi_hal_usr_is_done(&host->hal) || spi_ll_get_intr(host->hal.hw, SPI_LL_INTR_SEG_DONE));
#else
assert(spi_hal_usr_is_done(&host->hal));
#endif
/*
* Help to skip the handling of in-flight transaction, and disable of the interrupt.
@ -804,15 +865,21 @@ static void SPI_MASTER_ISR_ATTR spi_intr(void *arg)
#endif
}
//cur_cs is changed to DEV_NUM_MAX here
spi_post_trans(host);
if (!(host->device[cs]->cfg.flags & SPI_DEVICE_NO_RETURN_RESULT)) {
//Return transaction descriptor.
xQueueSendFromISR(host->device[cs]->ret_queue, &host->cur_trans_buf, &do_yield);
#if SOC_SPI_SCT_SUPPORTED
if (host->sct_mode_enabled) {
//cur_cs is changed to DEV_NUM_MAX here
spi_post_sct_trans(host);
xQueueSendFromISR(host->device[cs]->ret_queue, &host->cur_sct_trans, &do_yield);
} else
#endif //#if SOC_SPI_SCT_SUPPORTED
{
//cur_cs is changed to DEV_NUM_MAX here
spi_post_trans(host);
if (!(host->device[cs]->cfg.flags & SPI_DEVICE_NO_RETURN_RESULT)) {
//Return transaction descriptor.
xQueueSendFromISR(host->device[cs]->ret_queue, &host->cur_trans_buf, &do_yield);
}
}
// spi_bus_lock_bg_pause(bus_attr->lock);
#ifdef CONFIG_PM_ENABLE
//Release APB frequency lock
esp_pm_lock_release(bus_attr->pm_lock);
@ -849,7 +916,14 @@ static void SPI_MASTER_ISR_ATTR spi_intr(void *arg)
bool dev_has_req = spi_bus_lock_bg_check_dev_req(desired_dev);
if (dev_has_req) {
device_to_send = host->device[spi_bus_lock_get_dev_id(desired_dev)];
trans_found = xQueueReceiveFromISR(device_to_send->trans_queue, &host->cur_trans_buf, &do_yield);
#if SOC_SPI_SCT_SUPPORTED
if (host->sct_mode_enabled) {
trans_found = xQueueReceiveFromISR(device_to_send->trans_queue, &host->cur_sct_trans, &do_yield);
} else
#endif //#if SOC_SPI_SCT_SUPPORTED
{
trans_found = xQueueReceiveFromISR(device_to_send->trans_queue, &host->cur_trans_buf, &do_yield);
}
if (!trans_found) {
spi_bus_lock_bg_clear_req(desired_dev);
}
@ -857,16 +931,24 @@ static void SPI_MASTER_ISR_ATTR spi_intr(void *arg)
}
if (trans_found) {
spi_trans_priv_t *const cur_trans_buf = &host->cur_trans_buf;
#if SOC_SPI_SCT_SUPPORTED
if (host->sct_mode_enabled) {
spi_new_sct_trans(device_to_send, &host->cur_sct_trans);
} else
#endif //#if SOC_SPI_SCT_SUPPORTED
{
spi_trans_priv_t *const cur_trans_buf = &host->cur_trans_buf;
#if CONFIG_IDF_TARGET_ESP32
if (bus_attr->dma_enabled && (cur_trans_buf->buffer_to_rcv || cur_trans_buf->buffer_to_send)) {
//mark channel as active, so that the DMA will not be reset by the slave
//This workaround is only for esp32, where tx_dma_chan and rx_dma_chan are always same
spicommon_dmaworkaround_transfer_active(dma_ctx->tx_dma_chan.chan_id);
}
if (bus_attr->dma_enabled && (cur_trans_buf->buffer_to_rcv || cur_trans_buf->buffer_to_send)) {
//mark channel as active, so that the DMA will not be reset by the slave
//This workaround is only for esp32, where tx_dma_chan and rx_dma_chan are always same
spicommon_dmaworkaround_transfer_active(dma_ctx->tx_dma_chan.chan_id);
}
#endif //#if CONFIG_IDF_TARGET_ESP32
spi_new_trans(device_to_send, cur_trans_buf);
spi_new_trans(device_to_send, cur_trans_buf);
}
}
// Exit of the ISR, handle interrupt re-enable (if sending transaction), retry (if there's coming BG),
// or resume acquiring device task (if quit due to bus acquiring).
} while (!spi_bus_lock_bg_exit(lock, trans_found, &do_yield));
@ -1309,3 +1391,233 @@ esp_err_t spi_bus_get_max_transaction_len(spi_host_device_t host_id, size_t *max
return ESP_OK;
}
#if SOC_SPI_SCT_SUPPORTED
/**
* This function will turn this host into SCT (segmented-configure-transfer) mode.
*
* No concurrency guarantee, if a transaction is ongoing, calling this will lead to wrong transaction
*/
esp_err_t spi_bus_segment_trans_mode_enable(spi_device_handle_t handle, bool enable)
{
SPI_CHECK(handle, "Invalid arguments.", ESP_ERR_INVALID_ARG);
SPI_CHECK(SOC_SPI_SCT_SUPPORTED_PERIPH(handle->host->id), "Invalid arguments", ESP_ERR_INVALID_ARG);
SPI_CHECK(!spi_bus_device_is_polling(handle), "Cannot queue new transaction while previous polling transaction is not terminated.", ESP_ERR_INVALID_STATE);
SPI_CHECK(uxQueueMessagesWaiting(handle->trans_queue) == 0, "Cannot enable SCT mode when internal Queue still has items", ESP_ERR_INVALID_STATE);
esp_err_t ret = ESP_OK;
if (enable) {
/**
* This `fake_trans` transaction descriptor is only used to initialise the SPI registers
* This transaction won't be triggered.
*/
spi_transaction_t fake_trans = {
.flags = SPI_TRANS_USE_RXDATA | SPI_TRANS_USE_TXDATA,
.length = 8,
.tx_data = {0xff},
};
spi_host_t *host = handle->host;
spi_trans_priv_t trans_buf;
spi_hal_context_t *hal = &handle->host->hal;
spi_hal_dev_config_t *hal_dev = &handle->hal_dev;
//As we know the `fake_trans` are internal, so no need to `uninstall_priv_desc`
ret = setup_priv_desc(&fake_trans, &trans_buf, (host->bus_attr->dma_enabled));
if (ret != ESP_OK) {
return ret;
}
//init SPI registers
spi_hal_setup_device(hal, hal_dev);
spi_hal_trans_config_t hal_trans = {};
spi_format_hal_trans_struct(handle, &trans_buf, &hal_trans);
spi_hal_setup_trans(hal, hal_dev, &hal_trans);
spi_hal_sct_init(&handle->host->hal);
} else {
spi_hal_sct_deinit(&handle->host->hal);
}
handle->host->sct_mode_enabled = enable;
return ESP_OK;
}
static void SPI_MASTER_ATTR s_sct_init_conf_buffer(spi_hal_context_t *hal, spi_seg_transaction_t *seg_trans_desc, uint32_t seg_num)
{
for (int i = 0; i < seg_num; i++) {
spi_hal_sct_init_conf_buffer(hal, seg_trans_desc[i].conf_buffer);
}
}
static void SPI_MASTER_ATTR s_sct_format_conf_buffer(spi_device_handle_t handle, spi_seg_transaction_t *seg_trans_desc, bool seg_end)
{
spi_hal_context_t *hal = &handle->host->hal;
spi_hal_dev_config_t *hal_dev = &handle->hal_dev;
spi_hal_seg_config_t seg_config = {};
//prep
if (seg_trans_desc->seg_trans_flags & SPI_SEG_TRANS_PREP_LEN_UPDATED) {
seg_config.cs_setup = seg_trans_desc->cs_ena_pretrans;
} else {
seg_config.cs_setup = handle->cfg.cs_ena_pretrans;
}
//cmd
seg_config.cmd = seg_trans_desc->base.cmd;
if (seg_trans_desc->seg_trans_flags & SPI_SEG_TRANS_CMD_LEN_UPDATED) {
seg_config.cmd_bits = seg_trans_desc->command_bits;
} else {
seg_config.cmd_bits = handle->cfg.command_bits;
}
//addr
seg_config.addr = seg_trans_desc->base.addr;
if (seg_trans_desc->seg_trans_flags & SPI_SEG_TRANS_ADDR_LEN_UPDATED) {
seg_config.addr_bits = seg_trans_desc->address_bits;
} else {
seg_config.addr_bits = handle->cfg.address_bits;
}
//dummy
if (seg_trans_desc->seg_trans_flags & SPI_SEG_TRANS_DUMMY_LEN_UPDATED) {
seg_config.dummy_bits = seg_trans_desc->dummy_bits;
} else {
seg_config.dummy_bits = handle->cfg.dummy_bits;
}
//dout
seg_config.tx_bitlen = seg_trans_desc->base.length;
//din
seg_config.rx_bitlen = seg_trans_desc->base.rxlength;
//done
if (seg_trans_desc->seg_trans_flags & SPI_SEG_TRANS_DONE_LEN_UPDATED) {
seg_config.cs_hold = seg_trans_desc->cs_ena_posttrans;
} else {
seg_config.cs_hold = handle->cfg.cs_ena_posttrans;
}
//conf
if (seg_end) {
seg_config.seg_end = true;
}
spi_hal_sct_format_conf_buffer(hal, &seg_config, hal_dev, seg_trans_desc->conf_buffer);
}
esp_err_t SPI_MASTER_ATTR spi_device_queue_segment_trans(spi_device_handle_t handle, spi_seg_transaction_t *seg_trans_desc, uint32_t seg_num, TickType_t ticks_to_wait)
{
SPI_CHECK(handle, "Invalid arguments.", ESP_ERR_INVALID_ARG);
SPI_CHECK(SOC_SPI_SCT_SUPPORTED_PERIPH(handle->host->id), "Invalid arguments", ESP_ERR_INVALID_ARG);
SPI_CHECK(handle->host->sct_mode_enabled == 1, "SCT mode isn't enabled", ESP_ERR_INVALID_STATE);
esp_err_t ret = ESP_OK;
for (int i = 0; i < seg_num; i++) {
ret = check_trans_valid(handle, (spi_transaction_t *)&seg_trans_desc[i]);
if (ret != ESP_OK) {
return ret;
}
}
SPI_CHECK(!spi_bus_device_is_polling(handle), "Cannot queue new transaction while previous polling transaction is not terminated.", ESP_ERR_INVALID_STATE);
spi_hal_context_t *hal = &handle->host->hal;
s_sct_init_conf_buffer(hal, seg_trans_desc, seg_num);
spi_hal_dma_desc_status_t dma_desc_status = SPI_HAL_DMA_DESC_NULL;
lldesc_t *tx_seg_head = NULL;
uint32_t tx_used_dma_desc_num = 0;
uint32_t tx_buf_len = 0;
lldesc_t *rx_seg_head = NULL;
uint32_t rx_used_dma_desc_num = 0;
uint32_t rx_buf_len = 0;
/*--------------Get segment head--------------*/
s_sct_format_conf_buffer(handle, &seg_trans_desc[0], (seg_num == 1));
//TX
tx_buf_len = (seg_trans_desc[0].base.length + 8 - 1) / 8;
portENTER_CRITICAL(&handle->host->spinlock);
dma_desc_status = spi_hal_sct_new_tx_dma_desc_head(hal, seg_trans_desc[0].conf_buffer, seg_trans_desc[0].base.tx_buffer, tx_buf_len, &tx_seg_head, &tx_used_dma_desc_num);
portEXIT_CRITICAL(&handle->host->spinlock);
SPI_CHECK(dma_desc_status == SPI_HAL_DMA_DESC_LINKED, "No available dma descriptors, increase the `max_transfer_sz`, or wait queued transactions are done", ESP_ERR_INVALID_STATE);
//RX
//This is modified to the same lenght as tx length, when in fd mode, else it's `rxlength`
rx_buf_len = (seg_trans_desc[0].base.rxlength + 8 - 1) / 8;
if (seg_trans_desc[0].base.rx_buffer) {
portENTER_CRITICAL(&handle->host->spinlock);
dma_desc_status = spi_hal_sct_new_rx_dma_desc_head(hal, seg_trans_desc[0].base.rx_buffer, rx_buf_len, &rx_seg_head, &rx_used_dma_desc_num);
portEXIT_CRITICAL(&handle->host->spinlock);
SPI_CHECK(dma_desc_status == SPI_HAL_DMA_DESC_LINKED, "No available dma descriptors, increase the `max_transfer_sz`, or wait queued transactions are done", ESP_ERR_INVALID_STATE);
}
/*--------------Prepare other segments--------------*/
for (int i = 1; i < seg_num; i++) {
s_sct_format_conf_buffer(handle, &seg_trans_desc[i], (i == (seg_num - 1)));
//TX
tx_buf_len = (seg_trans_desc[i].base.length + 8 - 1) / 8;
portENTER_CRITICAL(&handle->host->spinlock);
dma_desc_status = spi_hal_sct_link_tx_seg_dma_desc(hal, seg_trans_desc[i].conf_buffer, seg_trans_desc[i].base.tx_buffer, tx_buf_len, &tx_used_dma_desc_num);
portEXIT_CRITICAL(&handle->host->spinlock);
SPI_CHECK(dma_desc_status == SPI_HAL_DMA_DESC_LINKED, "No available dma descriptors, increase the `max_transfer_sz`, or wait queued transactions are done", ESP_ERR_INVALID_STATE);
//RX
if (seg_trans_desc[i].base.rx_buffer) {
//This is modified to the same lenght as tx length, when in fd mode, else it's `rxlength`
rx_buf_len = (seg_trans_desc[i].base.rxlength + 8 - 1) / 8;
portENTER_CRITICAL(&handle->host->spinlock);
dma_desc_status = spi_hal_sct_link_rx_seg_dma_desc(hal, seg_trans_desc[i].base.rx_buffer, rx_buf_len, &rx_used_dma_desc_num);
portEXIT_CRITICAL(&handle->host->spinlock);
}
}
#ifdef CONFIG_PM_ENABLE
esp_pm_lock_acquire(handle->host->bus_attr->pm_lock);
#endif
spi_sct_desc_priv_t sct_desc = {
.tx_seg_head = tx_seg_head,
.rx_seg_head = rx_seg_head,
.sct_trans_desc_head = seg_trans_desc,
.tx_used_desc_num = tx_used_dma_desc_num,
.rx_used_desc_num = rx_used_dma_desc_num,
};
BaseType_t r = xQueueSend(handle->trans_queue, (void *)&sct_desc, ticks_to_wait);
if (!r) {
#ifdef CONFIG_PM_ENABLE
//Release APB frequency lock
esp_pm_lock_release(handle->host->bus_attr->pm_lock);
#endif
return ESP_ERR_TIMEOUT;
}
// The ISR will be invoked at correct time by the lock with `spi_bus_intr_enable`.
ret = spi_bus_lock_bg_request(handle->dev_lock);
if (ret != ESP_OK) {
return ret;
}
return ESP_OK;
}
esp_err_t SPI_MASTER_ATTR spi_device_get_segment_trans_result(spi_device_handle_t handle, spi_seg_transaction_t **seg_trans_desc, TickType_t ticks_to_wait)
{
SPI_CHECK(handle, "Invalid arguments.", ESP_ERR_INVALID_ARG);
SPI_CHECK(SOC_SPI_SCT_SUPPORTED_PERIPH(handle->host->id), "Invalid arguments", ESP_ERR_INVALID_ARG);
SPI_CHECK(handle->host->sct_mode_enabled == 1, "SCT mode isn't enabled", ESP_ERR_INVALID_STATE);
spi_sct_desc_priv_t sct_desc = {};
BaseType_t r = xQueueReceive(handle->ret_queue, (void *)&sct_desc, ticks_to_wait);
if (!r) {
return ESP_ERR_TIMEOUT;
}
*seg_trans_desc = sct_desc.sct_trans_desc_head;
return ESP_OK;
}
#endif //#if SOC_SPI_SCT_SUPPORTED

View File

@ -1260,6 +1260,282 @@ static inline int spi_ll_get_slave_hd_dummy_bits(spi_line_mode_t line_mode)
return 8;
}
/*------------------------------------------------------------------------------
* Segmented-Configure-Transfer
*----------------------------------------------------------------------------*/
#define SPI_LL_CONF_BUF_SET_BIT(_w, _m) ({ \
(_w) |= (_m); \
})
#define SPI_LL_CONF_BUF_CLR_BIT(_w, _m) ({ \
(_w) &= ~(_m); \
})
#define SPI_LL_CONF_BUF_SET_FIELD(_w, _f, val) ({ \
((_w) = (((_w) & ~((_f##_V) << (_f##_S))) | (((val) & (_f##_V))<<(_f##_S)))); \
})
#define SPI_LL_CONF_BUF_GET_FIELD(_w, _f) ({ \
(((_w) >> (_f##_S)) & (_f##_V)); \
})
//This offset is 1, for bitmap
#define SPI_LL_CONF_BUFFER_OFFSET (1)
//bitmap must be the first
#define SPI_LL_CONF_BITMAP_POS (0)
#define SPI_LL_ADDR_REG_POS (0)
#define SPI_LL_CTRL_REG_POS (1)
#define SPI_LL_CLOCK_REG_POS (2)
#define SPI_LL_USER_REG_POS (3)
#define SPI_LL_USER1_REG_POS (4)
#define SPI_LL_USER2_REG_POS (5)
#define SPI_LL_MS_DLEN_REG_POS (6)
#define SPI_LL_MISC_REG_POS (7)
#define SPI_LL_DIN_MODE_REG_POS (8)
#define SPI_LL_DIN_NUM_REG_POS (9)
#define SPI_LL_DOUT_MODE_REG_POS (10)
#define SPI_LL_DMA_CONF_REG_POS (11)
#define SPI_LL_DMA_INT_ENA_REG_POS (12)
#define SPI_LL_DMA_INT_CLR_REG_POS (13)
#define SPI_LL_SCT_MAGIC_NUMBER (0x2)
/**
* Update the conf buffer for conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_conf_phase_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], bool is_end)
{
//user reg: usr_conf_nxt
if (is_end) {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
} else {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
}
}
/**
* Update the conf buffer for prep phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS setup time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_prep_phase_conf_buffer(spi_dev_t *hw, uint8_t setup, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_setup
if(setup) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
}
//user1 reg: cs_setup_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_TIME, setup - 1);
}
/**
* Update the conf buffer for cmd phase
*
* @param hw Beginning address of the peripheral registers.
* @param cmd Command value
* @param cmdlen Length of the cmd phase
* @param lsbfirst Whether LSB first
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_cmd_phase_conf_buffer(spi_dev_t *hw, uint16_t cmd, int cmdlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_command
if (cmdlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
}
//user2 reg: usr_command_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_BITLEN, cmdlen - 1);
//user2 reg: usr_command_value
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, cmd);
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, HAL_SPI_SWAP_DATA_TX(cmd, cmdlen));
}
}
/**
* Update the conf buffer for addr phase
*
* @param hw Beginning address of the peripheral registers.
* @param addr Address to set
* @param addrlen Length of the address phase
* @param lsbfirst whether the LSB first feature is enabled.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_addr_phase_conf_buffer(spi_dev_t *hw, uint64_t addr, int addrlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_addr
if (addrlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
}
//user1 reg: usr_addr_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_BITLEN, addrlen - 1);
//addr reg: addr
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, HAL_SWAP32(addr));
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, (addr << (32 - addrlen)));
}
}
/**
* Update the conf buffer for dummy phase
*
* @param hw Beginning address of the peripheral registers.
* @param dummy_n Dummy cycles used. 0 to disable the dummy phase.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dummy_phase_conf_buffer(spi_dev_t *hw, int dummy_n, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_dummy
if (dummy_n) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
}
//user1 reg: usr_dummy_cyclelen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_CYCLELEN, dummy_n - 1);
}
/**
* Update the conf buffer for dout phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen output length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dout_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_mosi
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_mosi
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
}
}
/**
* Update the conf buffer for din phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen input length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_din_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_miso
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_miso
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
}
}
/**
* Update the conf buffer for done phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS hold time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_done_phase_conf_buffer(spi_dev_t *hw, int hold, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_hold
if(hold) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
}
//user1 reg: cs_hold_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_TIME, hold);
}
/**
* Initialize the conf buffer:
*
* - init bitmap
* - save all register values into the rest of the conf buffer words
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
__attribute__((always_inline))
static inline void spi_ll_init_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
conf_buffer[SPI_LL_CONF_BITMAP_POS] = 0x7FFF | (SPI_LL_SCT_MAGIC_NUMBER << 28);
conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->addr;
conf_buffer[SPI_LL_CTRL_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ctrl.val;
conf_buffer[SPI_LL_CLOCK_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->clock.val;
conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user.val;
conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user1.val;
conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user2.val;
conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ms_dlen.val;
conf_buffer[SPI_LL_MISC_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->misc.val;
conf_buffer[SPI_LL_DIN_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_mode.val;
conf_buffer[SPI_LL_DIN_NUM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_num.val;
conf_buffer[SPI_LL_DOUT_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dout_mode.val;
conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_conf.val;
conf_buffer[SPI_LL_DMA_INT_ENA_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_ena.val;
conf_buffer[SPI_LL_DMA_INT_CLR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_clr.val;
}
/**
* Enable/Disable the conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param enable True: enable; False: disable
*/
static inline void spi_ll_conf_state_enable(spi_dev_t *hw, bool enable)
{
hw->slave.usr_conf = enable;
}
/**
* Set Segmented-Configure-Transfer required magic value
*
* @param hw Beginning address of the peripheral registers.
* @param magic_value magic value
*/
static inline void spi_ll_set_magic_number(spi_dev_t *hw, uint8_t magic_value)
{
hw->slave.dma_seg_magic_value = magic_value;
}
#undef SPI_LL_RST_MASK
#undef SPI_LL_UNUSED_INT_MASK

View File

@ -1175,6 +1175,279 @@ static inline uint32_t spi_ll_slave_hd_get_last_addr(spi_dev_t *hw)
return hw->slave1.last_addr;
}
/*------------------------------------------------------------------------------
* Segmented-Configure-Transfer
*----------------------------------------------------------------------------*/
#define SPI_LL_CONF_BUF_SET_BIT(_w, _m) ({ \
(_w) |= (_m); \
})
#define SPI_LL_CONF_BUF_CLR_BIT(_w, _m) ({ \
(_w) &= ~(_m); \
})
#define SPI_LL_CONF_BUF_SET_FIELD(_w, _f, val) ({ \
((_w) = (((_w) & ~((_f##_V) << (_f##_S))) | (((val) & (_f##_V))<<(_f##_S)))); \
})
#define SPI_LL_CONF_BUF_GET_FIELD(_w, _f) ({ \
(((_w) >> (_f##_S)) & (_f##_V)); \
})
//This offset is 1, for bitmap
#define SPI_LL_CONF_BUFFER_OFFSET (1)
//bitmap must be the first
#define SPI_LL_CONF_BITMAP_POS (0)
#define SPI_LL_ADDR_REG_POS (0)
#define SPI_LL_CTRL_REG_POS (1)
#define SPI_LL_CLOCK_REG_POS (2)
#define SPI_LL_USER_REG_POS (3)
#define SPI_LL_USER1_REG_POS (4)
#define SPI_LL_USER2_REG_POS (5)
#define SPI_LL_MS_DLEN_REG_POS (6)
#define SPI_LL_MISC_REG_POS (7)
#define SPI_LL_DIN_MODE_REG_POS (8)
#define SPI_LL_DIN_NUM_REG_POS (9)
#define SPI_LL_DOUT_MODE_REG_POS (10)
#define SPI_LL_DMA_CONF_REG_POS (11)
#define SPI_LL_DMA_INT_ENA_REG_POS (12)
#define SPI_LL_DMA_INT_CLR_REG_POS (13)
#define SPI_LL_SCT_MAGIC_NUMBER (0x2)
/**
* Update the conf buffer for conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_conf_phase_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], bool is_end)
{
//user reg: usr_conf_nxt
if (is_end) {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
} else {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
}
}
/**
* Update the conf buffer for prep phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS setup time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_prep_phase_conf_buffer(spi_dev_t *hw, uint8_t setup, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_setup
if(setup) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
}
//user1 reg: cs_setup_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_TIME, setup - 1);
}
/**
* Update the conf buffer for cmd phase
*
* @param hw Beginning address of the peripheral registers.
* @param cmd Command value
* @param cmdlen Length of the cmd phase
* @param lsbfirst Whether LSB first
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_cmd_phase_conf_buffer(spi_dev_t *hw, uint16_t cmd, int cmdlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_command
if (cmdlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
}
//user2 reg: usr_command_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_BITLEN, cmdlen - 1);
//user2 reg: usr_command_value
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, cmd);
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, HAL_SPI_SWAP_DATA_TX(cmd, cmdlen));
}
}
/**
* Update the conf buffer for addr phase
*
* @param hw Beginning address of the peripheral registers.
* @param addr Address to set
* @param addrlen Length of the address phase
* @param lsbfirst whether the LSB first feature is enabled.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_addr_phase_conf_buffer(spi_dev_t *hw, uint64_t addr, int addrlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_addr
if (addrlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
}
//user1 reg: usr_addr_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_BITLEN, addrlen - 1);
//addr reg: addr
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, HAL_SWAP32(addr));
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, (addr << (32 - addrlen)));
}
}
/**
* Update the conf buffer for dummy phase
*
* @param hw Beginning address of the peripheral registers.
* @param dummy_n Dummy cycles used. 0 to disable the dummy phase.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dummy_phase_conf_buffer(spi_dev_t *hw, int dummy_n, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_dummy
if (dummy_n) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
}
//user1 reg: usr_dummy_cyclelen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_CYCLELEN, dummy_n - 1);
}
/**
* Update the conf buffer for dout phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen output length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dout_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_mosi
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_mosi
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
}
}
/**
* Update the conf buffer for din phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen input length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_din_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_miso
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_miso
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
}
}
/**
* Update the conf buffer for done phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS hold time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_done_phase_conf_buffer(spi_dev_t *hw, int hold, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_hold
if(hold) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
}
//user1 reg: cs_hold_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_TIME, hold);
}
/**
* Initialize the conf buffer:
*
* - init bitmap
* - save all register values into the rest of the conf buffer words
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
__attribute__((always_inline))
static inline void spi_ll_init_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
conf_buffer[SPI_LL_CONF_BITMAP_POS] = 0x7FFF | (SPI_LL_SCT_MAGIC_NUMBER << 28);
conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->addr;
conf_buffer[SPI_LL_CTRL_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ctrl.val;
conf_buffer[SPI_LL_CLOCK_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->clock.val;
conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user.val;
conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user1.val;
conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user2.val;
conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ms_dlen.val;
conf_buffer[SPI_LL_MISC_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->misc.val;
conf_buffer[SPI_LL_DIN_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_mode.val;
conf_buffer[SPI_LL_DIN_NUM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_num.val;
conf_buffer[SPI_LL_DOUT_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dout_mode.val;
conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_conf.val;
conf_buffer[SPI_LL_DMA_INT_ENA_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_ena.val;
conf_buffer[SPI_LL_DMA_INT_CLR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_clr.val;
}
/**
* Enable/Disable the conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param enable True: enable; False: disable
*/
static inline void spi_ll_conf_state_enable(spi_dev_t *hw, bool enable)
{
hw->slave.usr_conf = enable;
}
/**
* Set Segmented-Configure-Transfer required magic value
*
* @param hw Beginning address of the peripheral registers.
* @param magic_value magic value
*/
static inline void spi_ll_set_magic_number(spi_dev_t *hw, uint8_t magic_value)
{
hw->slave.dma_seg_magic_value = magic_value;
}
#undef SPI_LL_RST_MASK
#undef SPI_LL_UNUSED_INT_MASK

View File

@ -1166,6 +1166,281 @@ static inline uint32_t spi_ll_slave_hd_get_last_addr(spi_dev_t *hw)
return hw->slave1.slv_last_addr;
}
/*------------------------------------------------------------------------------
* Segmented-Configure-Transfer
*----------------------------------------------------------------------------*/
#define SPI_LL_CONF_BUF_SET_BIT(_w, _m) ({ \
(_w) |= (_m); \
})
#define SPI_LL_CONF_BUF_CLR_BIT(_w, _m) ({ \
(_w) &= ~(_m); \
})
#define SPI_LL_CONF_BUF_SET_FIELD(_w, _f, val) ({ \
((_w) = (((_w) & ~((_f##_V) << (_f##_S))) | (((val) & (_f##_V))<<(_f##_S)))); \
})
#define SPI_LL_CONF_BUF_GET_FIELD(_w, _f) ({ \
(((_w) >> (_f##_S)) & (_f##_V)); \
})
//This offset is 1, for bitmap
#define SPI_LL_CONF_BUFFER_OFFSET (1)
//bitmap must be the first
#define SPI_LL_CONF_BITMAP_POS (0)
#define SPI_LL_ADDR_REG_POS (0)
#define SPI_LL_CTRL_REG_POS (1)
#define SPI_LL_CLOCK_REG_POS (2)
#define SPI_LL_USER_REG_POS (3)
#define SPI_LL_USER1_REG_POS (4)
#define SPI_LL_USER2_REG_POS (5)
#define SPI_LL_MS_DLEN_REG_POS (6)
#define SPI_LL_MISC_REG_POS (7)
#define SPI_LL_DIN_MODE_REG_POS (8)
#define SPI_LL_DIN_NUM_REG_POS (9)
#define SPI_LL_DOUT_MODE_REG_POS (10)
#define SPI_LL_DMA_CONF_REG_POS (11)
#define SPI_LL_DMA_INT_ENA_REG_POS (12)
#define SPI_LL_DMA_INT_CLR_REG_POS (13)
#define SPI_LL_SCT_MAGIC_NUMBER (0x2)
/**
* Update the conf buffer for conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_conf_phase_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], bool is_end)
{
//user reg: usr_conf_nxt
if (is_end) {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
} else {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
}
}
/**
* Update the conf buffer for prep phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS setup time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_prep_phase_conf_buffer(spi_dev_t *hw, uint8_t setup, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_setup
if(setup) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
}
//user1 reg: cs_setup_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_TIME, setup - 1);
}
/**
* Update the conf buffer for cmd phase
*
* @param hw Beginning address of the peripheral registers.
* @param cmd Command value
* @param cmdlen Length of the cmd phase
* @param lsbfirst Whether LSB first
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_cmd_phase_conf_buffer(spi_dev_t *hw, uint16_t cmd, int cmdlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_command
if (cmdlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
}
//user2 reg: usr_command_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_BITLEN, cmdlen - 1);
//user2 reg: usr_command_value
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, cmd);
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, HAL_SPI_SWAP_DATA_TX(cmd, cmdlen));
}
}
/**
* Update the conf buffer for addr phase
*
* @param hw Beginning address of the peripheral registers.
* @param addr Address to set
* @param addrlen Length of the address phase
* @param lsbfirst whether the LSB first feature is enabled.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_addr_phase_conf_buffer(spi_dev_t *hw, uint64_t addr, int addrlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_addr
if (addrlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
}
//user1 reg: usr_addr_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_BITLEN, addrlen - 1);
//addr reg: addr
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, HAL_SWAP32(addr));
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, (addr << (32 - addrlen)));
}
}
/**
* Update the conf buffer for dummy phase
*
* @param hw Beginning address of the peripheral registers.
* @param dummy_n Dummy cycles used. 0 to disable the dummy phase.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dummy_phase_conf_buffer(spi_dev_t *hw, int dummy_n, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_dummy
if (dummy_n) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
}
//user1 reg: usr_dummy_cyclelen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_CYCLELEN, dummy_n - 1);
}
/**
* Update the conf buffer for dout phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen output length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dout_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_mosi
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_mosi
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
}
}
/**
* Update the conf buffer for din phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen input length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_din_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_miso
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_miso
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
}
}
/**
* Update the conf buffer for done phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS hold time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_done_phase_conf_buffer(spi_dev_t *hw, int hold, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_hold
if(hold) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
}
//user1 reg: cs_hold_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_TIME, hold);
}
/**
* Initialize the conf buffer:
*
* - init bitmap
* - save all register values into the rest of the conf buffer words
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
__attribute__((always_inline))
static inline void spi_ll_init_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
conf_buffer[SPI_LL_CONF_BITMAP_POS] = 0x7FFF | (SPI_LL_SCT_MAGIC_NUMBER << 28);
conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->addr;
conf_buffer[SPI_LL_CTRL_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ctrl.val;
conf_buffer[SPI_LL_CLOCK_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->clock.val;
conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user.val;
conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user1.val;
conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user2.val;
conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ms_dlen.val;
conf_buffer[SPI_LL_MISC_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->misc.val;
conf_buffer[SPI_LL_DIN_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_mode.val;
conf_buffer[SPI_LL_DIN_NUM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_num.val;
conf_buffer[SPI_LL_DOUT_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dout_mode.val;
conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_conf.val;
conf_buffer[SPI_LL_DMA_INT_ENA_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_ena.val;
conf_buffer[SPI_LL_DMA_INT_CLR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_clr.val;
}
/**
* Enable/Disable the conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param enable True: enable; False: disable
*/
static inline void spi_ll_conf_state_enable(spi_dev_t *hw, bool enable)
{
hw->slave.usr_conf = enable;
}
/**
* Set Segmented-Configure-Transfer required magic value
*
* @param hw Beginning address of the peripheral registers.
* @param magic_value magic value
*/
static inline void spi_ll_set_magic_number(spi_dev_t *hw, uint8_t magic_value)
{
hw->slave.dma_seg_magic_value = magic_value;
}
#undef SPI_LL_RST_MASK
#undef SPI_LL_UNUSED_INT_MASK

View File

@ -18,10 +18,13 @@
#include <string.h>
#include "esp_types.h"
#include "esp_attr.h"
#include "esp_bit_defs.h"
#include "soc/spi_periph.h"
#include "soc/spi_struct.h"
#include "soc/spi_reg.h"
#include "soc/dport_reg.h"
#include "soc/lldesc.h"
#include "soc/soc_caps.h"
#include "hal/assert.h"
#include "hal/misc.h"
#include "hal/spi_types.h"
@ -308,7 +311,7 @@ static inline void spi_ll_user_start(spi_dev_t *hw)
*/
static inline uint32_t spi_ll_get_running_cmd(spi_dev_t *hw)
{
return hw->cmd.val;
return hw->cmd.usr;
}
/**
@ -1017,7 +1020,6 @@ static inline void spi_ll_set_command(spi_dev_t *hw, uint16_t cmd, int cmdlen, b
* more straightly.
*/
HAL_FORCE_MODIFY_U32_REG_FIELD(hw->user2, usr_command_value, HAL_SPI_SWAP_DATA_TX(cmd, cmdlen));
}
}
@ -1463,6 +1465,297 @@ static inline bool spi_ll_tx_get_empty_err(spi_dev_t *hw)
return hw->dma_int_raw.outfifo_empty_err;
}
/*------------------------------------------------------------------------------
* Segmented-Configure-Transfer
*----------------------------------------------------------------------------*/
#define SPI_LL_CONF_BUF_SET_BIT(_w, _m) ({ \
(_w) |= (_m); \
})
#define SPI_LL_CONF_BUF_CLR_BIT(_w, _m) ({ \
(_w) &= ~(_m); \
})
#define SPI_LL_CONF_BUF_SET_FIELD(_w, _f, val) ({ \
((_w) = (((_w) & ~((_f##_V) << (_f##_S))) | (((val) & (_f##_V))<<(_f##_S)))); \
})
#define SPI_LL_CONF_BUF_GET_FIELD(_w, _f) ({ \
(((_w) >> (_f##_S)) & (_f##_V)); \
})
//This offset is 1, for bitmap
#define SPI_LL_CONF_BUFFER_OFFSET (1)
//bitmap must be the first
#define SPI_LL_CONF_BITMAP_POS (0)
#define SPI_LL_CMD_REG_POS (0)
#define SPI_LL_ADDR_REG_POS (1)
#define SPI_LL_CTRL_REG_POS (2)
#define SPI_LL_CTRL1_REG_POS (3)
#define SPI_LL_CTRL2_REG_POS (4)
#define SPI_LL_CLOCK_REG_POS (5)
#define SPI_LL_USER_REG_POS (6)
#define SPI_LL_USER1_REG_POS (7)
#define SPI_LL_USER2_REG_POS (8)
#define SPI_LL_MOSI_DLEN_REG_POS (9)
#define SPI_LL_MISO_DLEN_REG_POS (10)
#define SPI_LL_MISC_REG_POS (11)
#define SPI_LL_SLAVE_REG_POS (12)
#define SPI_LL_FSM_REG_POS (13)
#define SPI_LL_HOLD_REG_POS (14)
#define SPI_LL_DMA_INT_ENA_REG_POS (15)
#define SPI_LL_DMA_INT_RAW_REG_POS (16)
#define SPI_LL_DMA_INT_CLR_REG_POS (17)
#define SPI_LL_DIN_MODE_REG_POS (18)
#define SPI_LL_DIN_NUM_REG_POS (19)
#define SPI_LL_DOUT_MODE_REG_POS (20)
#define SPI_LL_DOUT_NUM_REG_POS (21)
#define SPI_LL_LCD_CTRL_REG_POS (22)
#define SPI_LL_LCD_CTRL1_REG_POS (23)
#define SPI_LL_LCD_CTRL2_REG_POS (24)
#define SPI_LL_LCD_D_MODE_REG_POS (25)
#define SPI_LL_LCD_D_NUM_REG_POS (26)
#define SPI_LL_SCT_MAGIC_NUMBER (0x2)
/**
* Update the conf buffer for conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_conf_phase_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], bool is_end)
{
//user reg: usr_conf_nxt
if (is_end) {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
} else {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
}
}
/**
* Update the conf buffer for prep phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS setup time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_prep_phase_conf_buffer(spi_dev_t *hw, uint8_t setup, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_setup
if(setup) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
}
//ctrl2 reg: cs_setup_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_CTRL2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_TIME, setup - 1);
}
/**
* Update the conf buffer for cmd phase
*
* @param hw Beginning address of the peripheral registers.
* @param cmd Command value
* @param cmdlen Length of the cmd phase
* @param lsbfirst Whether LSB first
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_cmd_phase_conf_buffer(spi_dev_t *hw, uint16_t cmd, int cmdlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_command
if (cmdlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
}
//user2 reg: usr_command_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_BITLEN, cmdlen - 1);
//user2 reg: usr_command_value
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, cmd);
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, HAL_SPI_SWAP_DATA_TX(cmd, cmdlen));
}
}
/**
* Update the conf buffer for addr phase
*
* @param hw Beginning address of the peripheral registers.
* @param addr Address to set
* @param addrlen Length of the address phase
* @param lsbfirst whether the LSB first feature is enabled.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_addr_phase_conf_buffer(spi_dev_t *hw, uint64_t addr, int addrlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_addr
if (addrlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
}
//user1 reg: usr_addr_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_BITLEN, addrlen - 1);
//addr reg: addr
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, HAL_SWAP32(addr));
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, (addr << (32 - addrlen)));
}
}
/**
* Update the conf buffer for dummy phase
*
* @param hw Beginning address of the peripheral registers.
* @param dummy_n Dummy cycles used. 0 to disable the dummy phase.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dummy_phase_conf_buffer(spi_dev_t *hw, int dummy_n, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_dummy
if (dummy_n) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
}
//user1 reg: usr_dummy_cyclelen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_CYCLELEN, dummy_n - 1);
}
/**
* Update the conf buffer for dout phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen output length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dout_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_mosi
if (bitlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
}
//mosi_dlen reg: usr_mosi_bit_len
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MOSI_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_DBITLEN, bitlen - 1);
}
/**
* Update the conf buffer for din phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen input length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_din_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_miso
if (bitlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
}
//miso_dlen reg: usr_miso_bit_len
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MISO_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_DBITLEN, bitlen - 1);
}
/**
* Update the conf buffer for done phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS hold time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_done_phase_conf_buffer(spi_dev_t *hw, int hold, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_hold
if(hold) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
}
//ctrl2 reg: cs_hold_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_CTRL2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_TIME, hold);
}
/**
* Initialize the conf buffer:
*
* - init bitmap
* - save all register values into the rest of the conf buffer words
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
__attribute__((always_inline))
static inline void spi_ll_init_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
conf_buffer[SPI_LL_CONF_BITMAP_POS] = 0x7FFFFFF | (SPI_LL_SCT_MAGIC_NUMBER << 28);
conf_buffer[SPI_LL_CMD_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->cmd.val;
conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->addr;
conf_buffer[SPI_LL_CTRL_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ctrl.val;
conf_buffer[SPI_LL_CTRL1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ctrl1.val;
conf_buffer[SPI_LL_CTRL2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ctrl2.val;
conf_buffer[SPI_LL_CLOCK_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->clock.val;
conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user.val;
conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user1.val;
conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user2.val;
conf_buffer[SPI_LL_MOSI_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->mosi_dlen.val;
conf_buffer[SPI_LL_MISO_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->miso_dlen.val;
conf_buffer[SPI_LL_MISC_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->misc.val;
conf_buffer[SPI_LL_SLAVE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->slave.val;
conf_buffer[SPI_LL_FSM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->fsm.val;
conf_buffer[SPI_LL_HOLD_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->hold.val;
conf_buffer[SPI_LL_DMA_INT_ENA_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_ena.val;
conf_buffer[SPI_LL_DMA_INT_RAW_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_raw.val;
conf_buffer[SPI_LL_DMA_INT_CLR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_clr.val;
conf_buffer[SPI_LL_DIN_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_mode.val;
conf_buffer[SPI_LL_DIN_NUM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_num.val;
conf_buffer[SPI_LL_DOUT_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dout_mode.val;
conf_buffer[SPI_LL_DOUT_NUM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dout_num.val;
conf_buffer[SPI_LL_LCD_CTRL_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->lcd_ctrl.val;
conf_buffer[SPI_LL_LCD_CTRL1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->lcd_ctrl1.val;
conf_buffer[SPI_LL_LCD_CTRL2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->lcd_ctrl2.val;
conf_buffer[SPI_LL_LCD_D_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->lcd_d_mode.val;
conf_buffer[SPI_LL_LCD_D_NUM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->lcd_d_num.val;
}
/**
* Enable/Disable the conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param enable True: enable; False: disable
*/
static inline void spi_ll_conf_state_enable(spi_dev_t *hw, bool enable)
{
hw->slv_rd_byte.usr_conf = enable;
}
/**
* Set Segmented-Configure-Transfer required magic value
*
* @param hw Beginning address of the peripheral registers.
* @param magic_value magic value
*/
static inline void spi_ll_set_magic_number(spi_dev_t *hw, uint8_t magic_value)
{
hw->slv_rd_byte.dma_seg_magic_value = magic_value;
}
#undef SPI_LL_RST_MASK
#undef SPI_LL_UNUSED_INT_MASK

View File

@ -1195,6 +1195,280 @@ static inline uint32_t spi_ll_slave_hd_get_last_addr(spi_dev_t *hw)
return hw->slave1.last_addr;
}
/*------------------------------------------------------------------------------
* Segmented-Configure-Transfer
*----------------------------------------------------------------------------*/
#define SPI_LL_CONF_BUF_SET_BIT(_w, _m) ({ \
(_w) |= (_m); \
})
#define SPI_LL_CONF_BUF_CLR_BIT(_w, _m) ({ \
(_w) &= ~(_m); \
})
#define SPI_LL_CONF_BUF_SET_FIELD(_w, _f, val) ({ \
((_w) = (((_w) & ~((_f##_V) << (_f##_S))) | (((val) & (_f##_V))<<(_f##_S)))); \
})
#define SPI_LL_CONF_BUF_GET_FIELD(_w, _f) ({ \
(((_w) >> (_f##_S)) & (_f##_V)); \
})
//This offset is 1, for bitmap
#define SPI_LL_CONF_BUFFER_OFFSET (1)
//bitmap must be the first
#define SPI_LL_CONF_BITMAP_POS (0)
#define SPI_LL_ADDR_REG_POS (0)
#define SPI_LL_CTRL_REG_POS (1)
#define SPI_LL_CLOCK_REG_POS (2)
#define SPI_LL_USER_REG_POS (3)
#define SPI_LL_USER1_REG_POS (4)
#define SPI_LL_USER2_REG_POS (5)
#define SPI_LL_MS_DLEN_REG_POS (6)
#define SPI_LL_MISC_REG_POS (7)
#define SPI_LL_DIN_MODE_REG_POS (8)
#define SPI_LL_DIN_NUM_REG_POS (9)
#define SPI_LL_DOUT_MODE_REG_POS (10)
#define SPI_LL_DMA_CONF_REG_POS (11)
#define SPI_LL_DMA_INT_ENA_REG_POS (12)
#define SPI_LL_DMA_INT_CLR_REG_POS (13)
#define SPI_LL_SCT_MAGIC_NUMBER (0x2)
/**
* Update the conf buffer for conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_conf_phase_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], bool is_end)
{
//user reg: usr_conf_nxt
if (is_end) {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
} else {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_CONF_NXT_M);
}
}
/**
* Update the conf buffer for prep phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS setup time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_prep_phase_conf_buffer(spi_dev_t *hw, uint8_t setup, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_setup
if(setup) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_M);
}
//user1 reg: cs_setup_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_SETUP_TIME, setup - 1);
}
/**
* Update the conf buffer for cmd phase
*
* @param hw Beginning address of the peripheral registers.
* @param cmd Command value
* @param cmdlen Length of the cmd phase
* @param lsbfirst Whether LSB first
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_cmd_phase_conf_buffer(spi_dev_t *hw, uint16_t cmd, int cmdlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_command
if (cmdlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_M);
}
//user2 reg: usr_command_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_BITLEN, cmdlen - 1);
//user2 reg: usr_command_value
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, cmd);
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_COMMAND_VALUE, HAL_SPI_SWAP_DATA_TX(cmd, cmdlen));
}
}
/**
* Update the conf buffer for addr phase
*
* @param hw Beginning address of the peripheral registers.
* @param addr Address to set
* @param addrlen Length of the address phase
* @param lsbfirst whether the LSB first feature is enabled.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_addr_phase_conf_buffer(spi_dev_t *hw, uint64_t addr, int addrlen, bool lsbfirst, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_addr
if (addrlen) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_M);
}
//user1 reg: usr_addr_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_BITLEN, addrlen - 1);
//addr reg: addr
if (lsbfirst) {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, HAL_SWAP32(addr));
} else {
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_ADDR_VALUE, (addr << (32 - addrlen)));
}
}
/**
* Update the conf buffer for dummy phase
*
* @param hw Beginning address of the peripheral registers.
* @param dummy_n Dummy cycles used. 0 to disable the dummy phase.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dummy_phase_conf_buffer(spi_dev_t *hw, int dummy_n, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: usr_dummy
if (dummy_n) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_M);
}
//user1 reg: usr_dummy_cyclelen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_DUMMY_CYCLELEN, dummy_n - 1);
}
/**
* Update the conf buffer for dout phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen output length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_dout_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_mosi
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_mosi
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MOSI_M);
//dma_conf reg: dma_tx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_TX_ENA_M);
}
}
/**
* Update the conf buffer for din phase
*
* @param hw Beginning address of the peripheral registers.
* @param bitlen input length, in bits.
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_din_phase_conf_buffer(spi_dev_t *hw, int bitlen, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
if (bitlen) {
//user reg: usr_miso
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
//ms_dlen reg: ms_data_bitlen
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_MS_DATA_BITLEN, bitlen - 1);
} else {
//user reg: usr_miso
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_USR_MISO_M);
//dma_conf reg: dma_rx_ena
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_DMA_RX_ENA_M);
}
}
/**
* Update the conf buffer for done phase
*
* @param hw Beginning address of the peripheral registers.
* @param setup CS hold time
* @param conf_buffer Conf buffer to be updated.
*/
static inline void spi_ll_format_done_phase_conf_buffer(spi_dev_t *hw, int hold, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
//user reg: cs_hold
if(hold) {
SPI_LL_CONF_BUF_SET_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
} else {
SPI_LL_CONF_BUF_CLR_BIT(conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_M);
}
//user1 reg: cs_hold_time
SPI_LL_CONF_BUF_SET_FIELD(conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET], SPI_CS_HOLD_TIME, hold);
}
/**
* Initialize the conf buffer:
*
* - init bitmap
* - save all register values into the rest of the conf buffer words
*
* @param hw Beginning address of the peripheral registers.
* @param conf_buffer Conf buffer to be updated.
*/
__attribute__((always_inline))
static inline void spi_ll_init_conf_buffer(spi_dev_t *hw, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
conf_buffer[SPI_LL_CONF_BITMAP_POS] = 0x7FFF | (SPI_LL_SCT_MAGIC_NUMBER << 28);
conf_buffer[SPI_LL_ADDR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->addr;
conf_buffer[SPI_LL_CTRL_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ctrl.val;
conf_buffer[SPI_LL_CLOCK_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->clock.val;
conf_buffer[SPI_LL_USER_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user.val;
conf_buffer[SPI_LL_USER1_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user1.val;
conf_buffer[SPI_LL_USER2_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->user2.val;
conf_buffer[SPI_LL_MS_DLEN_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->ms_dlen.val;
conf_buffer[SPI_LL_MISC_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->misc.val;
conf_buffer[SPI_LL_DIN_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_mode.val;
conf_buffer[SPI_LL_DIN_NUM_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->din_num.val;
conf_buffer[SPI_LL_DOUT_MODE_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dout_mode.val;
conf_buffer[SPI_LL_DMA_CONF_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_conf.val;
conf_buffer[SPI_LL_DMA_INT_ENA_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_ena.val;
conf_buffer[SPI_LL_DMA_INT_CLR_REG_POS + SPI_LL_CONF_BUFFER_OFFSET] = hw->dma_int_clr.val;
}
/**
* Enable/Disable the conf phase
*
* @param hw Beginning address of the peripheral registers.
* @param enable True: enable; False: disable
*/
static inline void spi_ll_conf_state_enable(spi_dev_t *hw, bool enable)
{
hw->slave.usr_conf = enable;
}
/**
* Set Segmented-Configure-Transfer required magic value
*
* @param hw Beginning address of the peripheral registers.
* @param magic_value magic value
*/
static inline void spi_ll_set_magic_number(spi_dev_t *hw, uint8_t magic_value)
{
hw->slave.dma_seg_magic_value = magic_value;
}
#undef SPI_LL_RST_MASK
#undef SPI_LL_UNUSED_INT_MASK

View File

@ -48,6 +48,15 @@ typedef dma_descriptor_align4_t spi_dma_desc_t;
typedef dma_descriptor_align8_t spi_dma_desc_t;
#endif
/**
* @brief Enum for DMA descriptor status
*/
typedef enum spi_hal_dma_desc_status_t {
SPI_HAL_DMA_DESC_NULL = 0, ///< Null descriptos
SPI_HAL_DMA_DESC_RUN_OUT = 1, ///< DMA descriptors are not enough for data
SPI_HAL_DMA_DESC_LINKED = 2, ///< DMA descriptors are linked successfully
} spi_hal_dma_desc_status_t;
/**
* Input parameters to the ``spi_hal_cal_clock_conf`` to calculate the timing configuration
*/
@ -103,6 +112,17 @@ typedef struct {
/* Configured by driver at initialization, don't touch */
spi_dev_t *hw; ///< Beginning address of the peripheral registers.
bool dma_enabled; ///< Whether the DMA is enabled, do not update after initialization
#if SOC_SPI_SCT_SUPPORTED
/* Segmented-Configure-Transfer required, configured by driver, don't touch */
uint32_t tx_free_desc_num;
uint32_t rx_free_desc_num;
lldesc_t *cur_tx_seg_link; ///< Current TX DMA descriptor used for sct mode.
lldesc_t *cur_rx_seg_link; ///< Current RX DMA descriptor used for sct mode.
lldesc_t *tx_seg_link_tail; ///< Tail of the TX DMA descriptor link
lldesc_t *rx_seg_link_tail; ///< Tail of the RX DMA descriptor link
#endif //#if SOC_SPI_SCT_SUPPORTED
/* Internal parameters, don't touch */
spi_hal_trans_config_t trans_config; ///< Transaction configuration
} spi_hal_context_t;
@ -133,6 +153,32 @@ typedef struct {
};//boolean configurations
} spi_hal_dev_config_t;
#if SOC_SPI_SCT_SUPPORTED
/**
* SCT mode required configurations, per segment
*/
typedef struct {
/* CONF State */
bool seg_end; ///< True: this segment is the end; False: this segment isn't the end;
/* PREP State */
int cs_setup; ///< Setup time of CS active edge before the first SPI clock
/* CMD State */
uint16_t cmd; ///< Command value to be sent
int cmd_bits; ///< Length (in bits) of the command phase
/* ADDR State */
uint64_t addr; ///< Address value to be sent
int addr_bits; ///< Length (in bits) of the address phase
/* DUMMY State */
int dummy_bits; ///< Base length (in bits) of the dummy phase.
/* DOUT State */
int tx_bitlen; ///< TX length, in bits
/* DIN State */
int rx_bitlen; ///< RX length, in bits
/* DONE State */
int cs_hold; ///< Hold time of CS inactive edge after the last SPI clock
} spi_hal_seg_config_t;
#endif //#if SOC_SPI_SCT_SUPPORTED
/**
* Init the peripheral and the context.
*
@ -266,6 +312,125 @@ void spi_hal_cal_timing(int source_freq_hz, int eff_clk, bool gpio_is_used, int
*/
int spi_hal_get_freq_limit(bool gpio_is_used, int input_delay_ns);
#if SOC_SPI_SCT_SUPPORTED
/*----------------------------------------------------------
* Segmented-Configure-Transfer (SCT) Mode
* ---------------------------------------------------------*/
/**
* Initialise SCT mode required registers and hal states
*
* @param hal Context of the HAL layer.
*/
void spi_hal_sct_init(spi_hal_context_t *hal);
/**
* Initialise conf buffer, give it an initial value
*
* @param hal Context of the HAL layer.
*/
void spi_hal_sct_init_conf_buffer(spi_hal_context_t *hal, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX]);
/**
* Format the conf buffer
* According to the `spi_hal_seg_config_t`, update the conf buffer
*
* @param hal Context of the HAL layer.
* @param config Conf buffer configuration, per segment. See `spi_hal_seg_config_t` to know what can be configured
* @param conf_buffer Conf buffer
*/
void spi_hal_sct_format_conf_buffer(spi_hal_context_t *hal, const spi_hal_seg_config_t *config, const spi_hal_dev_config_t *dev, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX]);
/**
* Format tx dma descriptor(s) for a SCT head
*
* @param hal Context of the HAL layer.
* @param conf_buffer Conf buffer
* @param send_buffer TX buffer
* @param buf_len_bytes TX buffer length, in bytes
* @param[out] trans_head SCT dma descriptor head
* @param[out] used_desc_num After formatting, `used_desc_num` number of descriptors are used
*
* @return
* - SPI_HAL_DMA_DESC_LINKED: Successfully format these dma descriptors, and link together
* - SPI_HAL_DMA_DESC_RUN_OUT: Run out of dma descriptors, should alloc more, or wait until enough number of descriptors are recycled (by `spi_hal_sct_tx_dma_desc_recycle`)
*/
spi_hal_dma_desc_status_t spi_hal_sct_new_tx_dma_desc_head(spi_hal_context_t *hal, const uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], const void *send_buffer, uint32_t buf_len_bytes, lldesc_t **trans_head, uint32_t *used_desc_num);
/**
* Format tx dma descriptor(s) for a segment, and linked it to its previous segment
*
* @param hal Context of the HAL layer.
* @param conf_buffer Conf buffer
* @param send_buffer TX buffer
* @param buf_len_bytes TX buffer length, in bytes
* @param[out] used_desc_num After formatting, `used_desc_num` number of descriptors are used
*
* @return
* - SPI_HAL_DMA_DESC_LINKED: Successfully format these dma descriptors, and link together
* - SPI_HAL_DMA_DESC_RUN_OUT: Run out of dma descriptors, should alloc more, or wait until enough number of descriptors are recycled (by `spi_hal_sct_tx_dma_desc_recycle`)
*/
spi_hal_dma_desc_status_t spi_hal_sct_link_tx_seg_dma_desc(spi_hal_context_t *hal, const uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], const void *send_buffer, uint32_t buf_len_bytes, uint32_t *used_desc_num);
/**
* Recycle used tx dma descriptors (back to available state, NOT a memory free)
*
* @param hal Context of the HAL layer.
* @param recycle_num Number of the to-be-recycled descriptors
*/
void spi_hal_sct_tx_dma_desc_recycle(spi_hal_context_t *hal, uint32_t recycle_num);
/**
* Format rx dma descriptor(s) for a SCT head
*
* @param hal Context of the HAL layer.
* @param recv_buffer RX buffer
* @param buf_len_bytes RX buffer length, in bytes
* @param[out] trans_head SCT dma descriptor head
* @param[out] used_desc_num After formatting, `used_desc_num` number of descriptors are used
*
* @return
* - SPI_HAL_DMA_DESC_LINKED: Successfully format these dma descriptors, and link together
* - SPI_HAL_DMA_DESC_RUN_OUT: Run out of dma descriptors, should alloc more, or wait until enough number of descriptors are recycled (by `spi_hal_sct_tx_dma_desc_recycle`)
*/
spi_hal_dma_desc_status_t spi_hal_sct_new_rx_dma_desc_head(spi_hal_context_t *hal, const void *recv_buffer, uint32_t buf_len_bytes, lldesc_t **trans_head, uint32_t *used_desc_num);
/**
* Format rx dma descriptor(s) for a segment, and linked it to its previous segment
*
* @param hal Context of the HAL layer.
* @param send_buffer RX buffer
* @param buf_len_bytes RX buffer length, in bytes
* @param[out] used_desc_num After formatting, `used_desc_num` number of descriptors are used
*
* @return
* - SPI_HAL_DMA_DESC_LINKED: Successfully format these dma descriptors, and link together
* - SPI_HAL_DMA_DESC_RUN_OUT: Run out of dma descriptors, should alloc more, or wait until enough number of descriptors are recycled (by `spi_hal_sct_tx_dma_desc_recycle`)
*/
spi_hal_dma_desc_status_t spi_hal_sct_link_rx_seg_dma_desc(spi_hal_context_t *hal, const void *recv_buffer, uint32_t buf_len_bytes, uint32_t *used_desc_num);
/**
* Recycle used rx dma descriptors (back to available state, NOT a memory free)
*
* @param hal Context of the HAL layer.
* @param recycle_num Number of the to-be-recycled descriptors
*/
void spi_hal_sct_rx_dma_desc_recycle(spi_hal_context_t *hal, uint32_t recycle_num);
/**
* Load dma descriptors to dma
* Will do nothing to TX or RX dma, when `tx_seg_head` or `rx_seg_head` is NULL
*
* @param hal Context of the HAL layer.
* @param rx_seg_head Head of the SCT RX dma descriptors
* @param tx_seg_head Head of the SCT TX dma descriptors
*/
void spi_hal_sct_load_dma_link(spi_hal_context_t *hal, lldesc_t *rx_seg_head, lldesc_t *tx_seg_head);
/**
* Deinit SCT mode related registers and hal states
*/
void spi_hal_sct_deinit(spi_hal_context_t *hal);
#endif //#if SOC_SPI_SCT_SUPPORTED
#endif //#if SOC_GPSPI_SUPPORTED
#ifdef __cplusplus

View File

@ -52,6 +52,34 @@ void spi_hal_deinit(spi_hal_context_t *hal)
}
}
#if SOC_SPI_SCT_SUPPORTED
static void s_sct_reset_dma_link(spi_hal_context_t *hal)
{
hal->tx_free_desc_num = hal->dmadesc_n;
hal->rx_free_desc_num = hal->dmadesc_n;
hal->cur_tx_seg_link = hal->dmadesc_tx;
hal->cur_rx_seg_link = hal->dmadesc_rx;
hal->tx_seg_link_tail = NULL;
hal->rx_seg_link_tail = NULL;
}
void spi_hal_sct_init(spi_hal_context_t *hal)
{
s_sct_reset_dma_link(hal);
spi_ll_conf_state_enable(hal->hw, true);
spi_ll_set_magic_number(hal->hw, SPI_LL_SCT_MAGIC_NUMBER);
spi_ll_enable_intr(hal->hw, SPI_LL_INTR_SEG_DONE);
spi_ll_set_intr(hal->hw, SPI_LL_INTR_SEG_DONE);
}
void spi_hal_sct_deinit(spi_hal_context_t *hal)
{
spi_ll_conf_state_enable(hal->hw, false);
spi_ll_disable_intr(hal->hw, SPI_LL_INTR_SEG_DONE);
spi_ll_clear_intr(hal->hw, SPI_LL_INTR_SEG_DONE);
}
#endif //#if SOC_SPI_SCT_SUPPORTED
esp_err_t spi_hal_cal_clock_conf(const spi_hal_timing_param_t *timing_param, spi_hal_timing_conf_t *timing_conf)
{
spi_hal_timing_conf_t temp_conf = {};

View File

@ -162,3 +162,177 @@ void spi_hal_fetch_result(const spi_hal_context_t *hal)
spi_ll_read_buffer(hal->hw, trans->rcv_buffer, trans->rx_bitlen);
}
}
#if SOC_SPI_SCT_SUPPORTED
/*------------------------------------------------------------------------------
* Segmented-Configure-Transfer
*----------------------------------------------------------------------------*/
void spi_hal_sct_init_conf_buffer(spi_hal_context_t *hal, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
spi_ll_init_conf_buffer(hal->hw, conf_buffer);
}
void spi_hal_sct_format_conf_buffer(spi_hal_context_t *hal, const spi_hal_seg_config_t *config, const spi_hal_dev_config_t *dev, uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX])
{
spi_ll_format_prep_phase_conf_buffer(hal->hw, config->cs_setup, conf_buffer);
spi_ll_format_cmd_phase_conf_buffer(hal->hw, config->cmd, config->cmd_bits, dev->tx_lsbfirst, conf_buffer);
spi_ll_format_addr_phase_conf_buffer(hal->hw, config->addr, config->addr_bits, dev->rx_lsbfirst, conf_buffer);
spi_ll_format_dummy_phase_conf_buffer(hal->hw, config->dummy_bits, conf_buffer);
spi_ll_format_dout_phase_conf_buffer(hal->hw, config->tx_bitlen, conf_buffer);
spi_ll_format_din_phase_conf_buffer(hal->hw, config->rx_bitlen, conf_buffer);
spi_ll_format_done_phase_conf_buffer(hal->hw, config->cs_hold, conf_buffer);
spi_ll_format_conf_phase_conf_buffer(hal->hw, conf_buffer, config->seg_end);
}
void spi_hal_sct_load_dma_link(spi_hal_context_t *hal, lldesc_t *rx_seg_head, lldesc_t *tx_seg_head)
{
spi_ll_clear_intr(hal->hw, SPI_LL_INTR_SEG_DONE);
HAL_ASSERT(hal->dma_enabled);
if (rx_seg_head) {
spi_dma_ll_rx_reset(hal->dma_in, hal->rx_dma_chan);
spi_ll_dma_rx_fifo_reset(hal->hw);
spi_ll_infifo_full_clr(hal->hw);
spi_ll_dma_rx_enable(hal->hw, 1);
spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, rx_seg_head);
}
if (tx_seg_head) {
spi_dma_ll_tx_reset(hal->dma_out, hal->tx_dma_chan);
spi_ll_dma_tx_fifo_reset(hal->hw);
spi_ll_outfifo_empty_clr(hal->hw);
spi_ll_dma_tx_enable(hal->hw, 1);
spi_dma_ll_tx_start(hal->dma_out, hal->tx_dma_chan, tx_seg_head);
}
}
/*-----------------------------------------------------------
* Below hal functions should be in the same spinlock
*-----------------------------------------------------------*/
/*-------------------------
* TX
*------------------------*/
void spi_hal_sct_tx_dma_desc_recycle(spi_hal_context_t *hal, uint32_t recycle_num)
{
hal->tx_free_desc_num += recycle_num;
}
static void s_sct_prepare_tx_seg(spi_hal_context_t *hal, const uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], const void *send_buffer, uint32_t buf_len_bytes, lldesc_t **trans_head)
{
HAL_ASSERT(hal->tx_free_desc_num >= 1 + lldesc_get_required_num(buf_len_bytes));
*trans_head = hal->cur_tx_seg_link;
lldesc_setup_link(hal->cur_tx_seg_link, conf_buffer, SOC_SPI_SCT_BUFFER_NUM_MAX * 4, false);
lldesc_t *conf_buffer_link = hal->cur_tx_seg_link;
hal->tx_free_desc_num -= 1;
hal->tx_seg_link_tail = hal->cur_tx_seg_link;
hal->cur_tx_seg_link++;
if (hal->cur_tx_seg_link == hal->dmadesc_tx + hal->dmadesc_n) {
//As there is enough space, so we simply point this to the pool head
hal->cur_tx_seg_link = hal->dmadesc_tx;
}
if(send_buffer && buf_len_bytes) {
lldesc_setup_link(hal->cur_tx_seg_link, send_buffer, buf_len_bytes, false);
STAILQ_NEXT(conf_buffer_link, qe) = hal->cur_tx_seg_link;
for (int i = 0; i < lldesc_get_required_num(buf_len_bytes); i++) {
hal->tx_seg_link_tail = hal->cur_tx_seg_link;
hal->cur_tx_seg_link++;
if (hal->cur_tx_seg_link == hal->dmadesc_tx + hal->dmadesc_n) {
//As there is enough space, so we simply point this to the pool head
hal->cur_tx_seg_link = hal->dmadesc_tx;
}
}
hal->tx_free_desc_num -= lldesc_get_required_num(buf_len_bytes);
}
}
spi_hal_dma_desc_status_t spi_hal_sct_new_tx_dma_desc_head(spi_hal_context_t *hal, const uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], const void *send_buffer, uint32_t buf_len_bytes, lldesc_t **trans_head, uint32_t *used_desc_num)
{
//1 desc for the conf_buffer, other for data.
if (hal->tx_free_desc_num < 1 + lldesc_get_required_num(buf_len_bytes)) {
return SPI_HAL_DMA_DESC_RUN_OUT;
}
s_sct_prepare_tx_seg(hal, conf_buffer, send_buffer, buf_len_bytes, trans_head);
*used_desc_num = 1 + lldesc_get_required_num(buf_len_bytes);
return SPI_HAL_DMA_DESC_LINKED;
}
spi_hal_dma_desc_status_t spi_hal_sct_link_tx_seg_dma_desc(spi_hal_context_t *hal, const uint32_t conf_buffer[SOC_SPI_SCT_BUFFER_NUM_MAX], const void *send_buffer, uint32_t buf_len_bytes, uint32_t *used_desc_num)
{
//1 desc for the conf_buffer, other for data.
if (hal->tx_free_desc_num < 1 + lldesc_get_required_num(buf_len_bytes)) {
return SPI_HAL_DMA_DESC_RUN_OUT;
}
if (hal->tx_seg_link_tail) {
//Connect last segment to the current segment, as we're sure the `s_sct_prepare_tx_seg` next won't fail.
STAILQ_NEXT(hal->tx_seg_link_tail, qe) = hal->cur_tx_seg_link;
}
lldesc_t *internal_head = NULL;
s_sct_prepare_tx_seg(hal, conf_buffer, send_buffer, buf_len_bytes, &internal_head);
*used_desc_num = 1 + lldesc_get_required_num(buf_len_bytes);
return SPI_HAL_DMA_DESC_LINKED;
}
/*-------------------------
* RX
*------------------------*/
void spi_hal_sct_rx_dma_desc_recycle(spi_hal_context_t *hal, uint32_t recycle_num)
{
hal->rx_free_desc_num += recycle_num;
}
static void s_sct_prepare_rx_seg(spi_hal_context_t *hal, const void *recv_buffer, uint32_t buf_len_bytes, lldesc_t **trans_head)
{
HAL_ASSERT(hal->rx_free_desc_num >= lldesc_get_required_num(buf_len_bytes));
*trans_head = hal->cur_rx_seg_link;
lldesc_setup_link(hal->cur_rx_seg_link, recv_buffer, buf_len_bytes, true);
for (int i = 0; i< lldesc_get_required_num(buf_len_bytes); i++) {
hal->rx_seg_link_tail = hal->cur_rx_seg_link;
hal->cur_rx_seg_link++;
if (hal->cur_rx_seg_link == hal->dmadesc_rx + hal->dmadesc_n) {
//As there is enough space, so we simply point this to the pool head
hal->cur_rx_seg_link = hal->dmadesc_rx;
}
}
hal->rx_free_desc_num -= lldesc_get_required_num(buf_len_bytes);
}
spi_hal_dma_desc_status_t spi_hal_sct_new_rx_dma_desc_head(spi_hal_context_t *hal, const void *recv_buffer, uint32_t buf_len_bytes, lldesc_t **trans_head, uint32_t *used_desc_num)
{
if (hal->rx_free_desc_num < lldesc_get_required_num(buf_len_bytes)) {
return SPI_HAL_DMA_DESC_RUN_OUT;
}
s_sct_prepare_rx_seg(hal, recv_buffer, buf_len_bytes, trans_head);
*used_desc_num = lldesc_get_required_num(buf_len_bytes);
return SPI_HAL_DMA_DESC_LINKED;
}
spi_hal_dma_desc_status_t spi_hal_sct_link_rx_seg_dma_desc(spi_hal_context_t *hal, const void *recv_buffer, uint32_t buf_len_bytes, uint32_t *used_desc_num)
{
if (hal->rx_free_desc_num < lldesc_get_required_num(buf_len_bytes)) {
return SPI_HAL_DMA_DESC_RUN_OUT;
}
if (hal->rx_seg_link_tail) {
//Connect last segment to the current segment, as we're sure the `s_sct_prepare_tx_seg` next won't fail.
STAILQ_NEXT(hal->rx_seg_link_tail, qe) = hal->cur_rx_seg_link;
}
lldesc_t *internal_head = NULL;
s_sct_prepare_rx_seg(hal, recv_buffer, buf_len_bytes, &internal_head);
*used_desc_num = lldesc_get_required_num(buf_len_bytes);
return SPI_HAL_DMA_DESC_LINKED;
}
#endif //#if SOC_SPI_SCT_SUPPORTED

View File

@ -471,6 +471,18 @@ config SOC_SPI_PERIPH_SUPPORT_CONTROL_DUMMY_OUT
bool
default y
config SOC_SPI_SCT_SUPPORTED
bool
default y
config SOC_SPI_SCT_REG_NUM
int
default 14
config SOC_SPI_SCT_BUFFER_NUM_MAX
bool
default y
config SOC_MEMSPI_IS_INDEPENDENT
bool
default y

View File

@ -227,6 +227,11 @@
// Peripheral supports output given level during its "dummy phase"
#define SOC_SPI_PERIPH_SUPPORT_CONTROL_DUMMY_OUT 1
#define SOC_SPI_SCT_SUPPORTED 1
#define SOC_SPI_SCT_SUPPORTED_PERIPH(PERIPH_NUM) ((PERIPH_NUM==1) ? 1 : 0) //Support Segmented-Configure-Transfer
#define SOC_SPI_SCT_REG_NUM 14
#define SOC_SPI_SCT_BUFFER_NUM_MAX (1 + SOC_SPI_SCT_REG_NUM) //1-word-bitmap + 14-word-regs
#define SOC_MEMSPI_IS_INDEPENDENT 1
#define SOC_SPI_MAX_PRE_DIVIDER 16

View File

@ -699,6 +699,18 @@ config SOC_SPI_PERIPH_SUPPORT_CONTROL_DUMMY_OUT
bool
default y
config SOC_SPI_SCT_SUPPORTED
bool
default y
config SOC_SPI_SCT_REG_NUM
int
default 14
config SOC_SPI_SCT_BUFFER_NUM_MAX
bool
default y
config SOC_MEMSPI_IS_INDEPENDENT
bool
default y

View File

@ -311,6 +311,11 @@
// Peripheral supports output given level during its "dummy phase"
#define SOC_SPI_PERIPH_SUPPORT_CONTROL_DUMMY_OUT 1
#define SOC_SPI_SCT_SUPPORTED 1
#define SOC_SPI_SCT_SUPPORTED_PERIPH(PERIPH_NUM) ((PERIPH_NUM==1) ? 1 : 0) //Support Segmented-Configure-Transfer
#define SOC_SPI_SCT_REG_NUM 14
#define SOC_SPI_SCT_BUFFER_NUM_MAX (1 + SOC_SPI_SCT_REG_NUM) //1-word-bitmap + 14-word-regs
#define SOC_MEMSPI_IS_INDEPENDENT 1
#define SOC_SPI_MAX_PRE_DIVIDER 16

View File

@ -959,6 +959,18 @@ config SOC_SPI_SUPPORT_CLK_RC_FAST
bool
default y
config SOC_SPI_SCT_SUPPORTED
bool
default y
config SOC_SPI_SCT_REG_NUM
int
default 14
config SOC_SPI_SCT_BUFFER_NUM_MAX
bool
default y
config SOC_MEMSPI_IS_INDEPENDENT
bool
default y

View File

@ -386,6 +386,11 @@
// host_id = 0 -> SPI0/SPI1, host_id = 1 -> SPI2,
#define SOC_SPI_PERIPH_SUPPORT_MULTILINE_MODE(host_id) ({(void)host_id; 1;})
#define SOC_SPI_SCT_SUPPORTED 1
#define SOC_SPI_SCT_SUPPORTED_PERIPH(PERIPH_NUM) ((PERIPH_NUM==1) ? 1 : 0) //Support Segmented-Configure-Transfer
#define SOC_SPI_SCT_REG_NUM 14
#define SOC_SPI_SCT_BUFFER_NUM_MAX (1 + SOC_SPI_SCT_REG_NUM) //1-word-bitmap + 14-word-regs
#define SOC_MEMSPI_IS_INDEPENDENT 1
#define SOC_SPI_MAX_PRE_DIVIDER 16

View File

@ -695,11 +695,23 @@ config SOC_SPI_PERIPH_SUPPORT_CONTROL_DUMMY_OUT
bool
default y
config SOC_MEMSPI_IS_INDEPENDENT
config SOC_SPI_SUPPORT_OCT
bool
default y
config SOC_SPI_SUPPORT_OCT
config SOC_SPI_SCT_SUPPORTED
bool
default y
config SOC_SPI_SCT_REG_NUM
int
default 27
config SOC_SPI_SCT_BUFFER_NUM_MAX
bool
default y
config SOC_MEMSPI_IS_INDEPENDENT
bool
default y

View File

@ -301,9 +301,14 @@
// Only SPI1 supports this feature
#define SOC_SPI_PERIPH_SUPPORT_CONTROL_DUMMY_OUT 1
#define SOC_MEMSPI_IS_INDEPENDENT 1
#define SOC_SPI_SUPPORT_OCT 1
#define SOC_SPI_SUPPORT_OCT 1
#define SOC_SPI_SCT_SUPPORTED 1
#define SOC_SPI_SCT_SUPPORTED_PERIPH(PERIPH_NUM) (((PERIPH_NUM==1) || (PERIPH_NUM==2)) ? 1 : 0) //Support Segmented-Configure-Transfer
#define SOC_SPI_SCT_REG_NUM 27
#define SOC_SPI_SCT_BUFFER_NUM_MAX (1 + SOC_SPI_SCT_REG_NUM) //1-word-bitmap + 27-word-regs
#define SOC_MEMSPI_IS_INDEPENDENT 1
#define SOC_MEMSPI_SRC_FREQ_80M_SUPPORTED 1
#define SOC_MEMSPI_SRC_FREQ_40M_SUPPORTED 1
#define SOC_MEMSPI_SRC_FREQ_26M_SUPPORTED 1

View File

@ -839,6 +839,18 @@ config SOC_SPI_SUPPORT_OCT
bool
default y
config SOC_SPI_SCT_SUPPORTED
bool
default y
config SOC_SPI_SCT_REG_NUM
int
default 14
config SOC_SPI_SCT_BUFFER_NUM_MAX
bool
default y
config SOC_MEMSPI_SRC_FREQ_120M
bool
default y

View File

@ -332,6 +332,11 @@
#define SOC_SPI_MAX_PRE_DIVIDER 16
#define SOC_SPI_SUPPORT_OCT 1
#define SOC_SPI_SCT_SUPPORTED 1
#define SOC_SPI_SCT_SUPPORTED_PERIPH(PERIPH_NUM) ((PERIPH_NUM==1) ? 1 : 0) //Support Segmented-Configure-Transfer
#define SOC_SPI_SCT_REG_NUM 14
#define SOC_SPI_SCT_BUFFER_NUM_MAX (1 + SOC_SPI_SCT_REG_NUM) //1-word-bitmap + 14-word-regs
#define SOC_MEMSPI_SRC_FREQ_120M 1
#define SOC_MEMSPI_SRC_FREQ_80M_SUPPORTED 1
#define SOC_MEMSPI_SRC_FREQ_40M_SUPPORTED 1