2021-05-23 20:09:38 -04:00
/*
2021-12-27 00:00:07 -05:00
* SPDX - FileCopyrightText : 2015 - 2022 Espressif Systems ( Shanghai ) CO LTD
2021-05-23 20:09:38 -04:00
*
* SPDX - License - Identifier : Apache - 2.0
*/
2019-07-15 02:44:15 -04:00
2016-12-29 04:29:14 -05:00
# include <string.h>
2019-07-15 02:44:15 -04:00
# include <stdbool.h>
2017-08-16 04:31:11 -04:00
# include <math.h>
2016-12-29 04:29:14 -05:00
# include <esp_types.h>
# include "freertos/FreeRTOS.h"
# include "freertos/queue.h"
2020-03-01 23:07:47 -05:00
# include "freertos/semphr.h"
2016-12-29 04:29:14 -05:00
2022-04-07 03:32:46 -04:00
# include "sdkconfig.h"
# if CONFIG_I2S_ENABLE_DEBUG_LOG
// The local log level must be defined before including esp_log.h
// Set the maximum log level for this source file
# define LOG_LOCAL_LEVEL ESP_LOG_DEBUG
# endif
2020-06-19 00:00:58 -04:00
# include "soc/lldesc.h"
2016-12-29 04:29:14 -05:00
# include "driver/gpio.h"
2021-03-15 22:55:05 -04:00
# include "hal/gpio_hal.h"
2022-04-07 03:32:46 -04:00
# include "driver/i2s_types_legacy.h"
2021-06-17 06:49:44 -04:00
# include "hal/i2s_hal.h"
2021-08-17 22:52:16 -04:00
# if SOC_I2S_SUPPORTS_DAC
2017-05-03 06:55:52 -04:00
# include "driver/dac.h"
2022-07-20 23:47:09 -04:00
# include "esp_private/adc_share_hw_ctrl.h"
2022-07-15 00:52:44 -04:00
# include "adc1_private.h"
# include "driver/adc_i2s_legacy.h"
# include "driver/adc_types_legacy.h"
2021-08-17 22:52:16 -04:00
# endif // SOC_I2S_SUPPORTS_ADC
2016-12-29 04:29:14 -05:00
2021-06-15 03:43:03 -04:00
# if SOC_GDMA_SUPPORTED
# include "esp_private/gdma.h"
# endif
2022-01-12 01:53:47 -05:00
# include "clk_ctrl_os.h"
2019-03-26 04:30:43 -04:00
# include "esp_intr_alloc.h"
2016-12-29 04:29:14 -05:00
# include "esp_err.h"
2021-06-15 03:43:03 -04:00
# include "esp_check.h"
2020-03-01 23:07:47 -05:00
# include "esp_attr.h"
2016-12-29 04:29:14 -05:00
# include "esp_log.h"
2019-02-22 07:17:42 -05:00
# include "esp_pm.h"
2019-11-21 08:10:46 -05:00
# include "esp_efuse.h"
2020-06-19 00:00:58 -04:00
# include "esp_rom_gpio.h"
2021-10-25 05:13:46 -04:00
# include "esp_private/periph_ctrl.h"
2016-12-29 04:29:14 -05:00
2022-04-07 03:32:46 -04:00
static const char * TAG = " i2s(legacy) " ;
2019-07-15 02:44:15 -04:00
2021-07-20 09:03:52 -04:00
# define I2S_ENTER_CRITICAL_ISR(i2s_num) portENTER_CRITICAL_ISR(&i2s_spinlock[i2s_num])
# define I2S_EXIT_CRITICAL_ISR(i2s_num) portEXIT_CRITICAL_ISR(&i2s_spinlock[i2s_num])
# define I2S_ENTER_CRITICAL(i2s_num) portENTER_CRITICAL(&i2s_spinlock[i2s_num])
# define I2S_EXIT_CRITICAL(i2s_num) portEXIT_CRITICAL(&i2s_spinlock[i2s_num])
2021-08-02 07:17:29 -04:00
# define I2S_DMA_BUFFER_MAX_SIZE 4092
2019-07-15 02:44:15 -04:00
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_ADC_DAC
# define I2S_COMM_MODE_ADC_DAC -1
2021-06-15 03:43:03 -04:00
# endif
2021-08-18 07:45:51 -04:00
/**
* @ brief General clock configuration information
* @ note It is a general purpose struct , not supposed to be used directly by user
*/
typedef struct {
uint32_t sample_rate_hz ; /*!< I2S sample rate */
i2s_clock_src_t clk_src ; /*!< Choose clock source */
i2s_mclk_multiple_t mclk_multiple ; /*!< The multiple of mclk to the sample rate */
# if SOC_I2S_SUPPORTS_PDM_TX
uint32_t up_sample_fp ; /*!< Up-sampling param fp */
uint32_t up_sample_fs ; /*!< Up-sampling param fs */
# endif
# if SOC_I2S_SUPPORTS_PDM_RX
i2s_pdm_dsr_t dn_sample_mode ; /*!< Down-sampling rate mode */
# endif
} i2s_clk_config_t ;
2016-12-29 04:29:14 -05:00
/**
* @ brief DMA buffer object
*
*/
typedef struct {
char * * buf ;
int buf_size ;
2021-08-17 22:52:16 -04:00
volatile int rw_pos ;
volatile void * curr_ptr ;
2016-12-29 04:29:14 -05:00
SemaphoreHandle_t mux ;
2022-02-08 04:39:38 -05:00
QueueHandle_t queue ;
2016-12-29 04:29:14 -05:00
lldesc_t * * desc ;
} i2s_dma_t ;
/**
* @ brief I2S object instance
*
*/
typedef struct {
i2s_port_t i2s_num ; /*!< I2S port number*/
int queue_size ; /*!< I2S event queue size*/
QueueHandle_t i2s_queue ; /*!< I2S queue handler*/
2021-08-02 07:17:29 -04:00
uint32_t last_buf_size ; /*!< DMA last buffer size */
2021-06-15 03:43:03 -04:00
i2s_dma_t * tx ; /*!< DMA Tx buffer*/
i2s_dma_t * rx ; /*!< DMA Rx buffer*/
# if SOC_GDMA_SUPPORTED
gdma_channel_handle_t rx_dma_chan ; /*!< I2S rx gDMA channel handle*/
gdma_channel_handle_t tx_dma_chan ; /*!< I2S tx gDMA channel handle*/
# else
2022-03-13 23:34:46 -04:00
intr_handle_t i2s_isr_handle ; /*!< I2S Interrupt handle*/
2021-06-15 03:43:03 -04:00
# endif
2022-03-13 23:34:46 -04:00
uint32_t dma_desc_num ;
uint32_t dma_frame_num ;
2018-05-14 05:03:45 -04:00
bool tx_desc_auto_clear ; /*!< I2S auto clear tx descriptor on underflow */
2021-06-15 03:43:03 -04:00
bool use_apll ; /*!< I2S use APLL clock */
2018-02-16 01:50:45 -05:00
int fixed_mclk ; /*!< I2S fixed MLCK clock */
2021-08-02 07:17:29 -04:00
i2s_mclk_multiple_t mclk_multiple ; /*!< The multiple of I2S master clock(MCLK) to sample rate */
2021-07-27 03:54:31 -04:00
2019-02-22 07:17:42 -05:00
# ifdef CONFIG_PM_ENABLE
esp_pm_lock_handle_t pm_lock ;
# endif
2021-06-15 03:43:03 -04:00
i2s_hal_context_t hal ; /*!< I2S hal context*/
2022-03-13 23:34:46 -04:00
/* New config */
i2s_dir_t dir ;
i2s_role_t role ;
i2s_comm_mode_t mode ;
2021-08-18 07:45:51 -04:00
i2s_hal_slot_config_t slot_cfg ;
i2s_clk_config_t clk_cfg ;
2022-03-13 23:34:46 -04:00
uint32_t active_slot ; /*!< Active slot number */
uint32_t total_slot ; /*!< Total slot number */
2016-12-29 04:29:14 -05:00
} i2s_obj_t ;
2022-04-07 03:32:46 -04:00
// Record the component name that using I2S peripheral
static const char * comp_using_i2s [ SOC_I2S_NUM ] = { [ 0 . . . SOC_I2S_NUM - 1 ] = NULL } ;
// Global I2S object pointer
2022-03-13 23:34:46 -04:00
static i2s_obj_t * p_i2s [ SOC_I2S_NUM ] = {
[ 0 . . . SOC_I2S_NUM - 1 ] = NULL ,
} ;
2022-04-07 03:32:46 -04:00
// Global spin lock for all i2s controllers
2021-08-04 04:47:56 -04:00
static portMUX_TYPE i2s_spinlock [ SOC_I2S_NUM ] = {
[ 0 . . . SOC_I2S_NUM - 1 ] = ( portMUX_TYPE ) portMUX_INITIALIZER_UNLOCKED ,
} ;
2019-07-15 02:44:15 -04:00
2022-04-07 03:32:46 -04:00
__attribute__ ( ( weak ) ) esp_err_t i2s_platform_acquire_occupation ( int id , const char * comp_name ) ;
__attribute__ ( ( weak ) ) esp_err_t i2s_platform_release_occupation ( int id ) ;
2021-08-02 07:17:29 -04:00
/*-------------------------------------------------------------
I2S DMA operation
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
2021-06-15 03:43:03 -04:00
# if SOC_GDMA_SUPPORTED
static bool IRAM_ATTR i2s_dma_rx_callback ( gdma_channel_handle_t dma_chan , gdma_event_data_t * event_data , void * user_data )
{
i2s_obj_t * p_i2s = ( i2s_obj_t * ) user_data ;
2021-11-24 04:34:22 -05:00
portBASE_TYPE need_awoke = 0 ;
portBASE_TYPE tmp = 0 ;
2021-06-15 03:43:03 -04:00
int dummy ;
i2s_event_t i2s_event ;
uint32_t finish_desc ;
if ( p_i2s - > rx ) {
finish_desc = event_data - > rx_eof_desc_addr ;
2021-11-12 00:55:19 -05:00
i2s_event . size = ( ( lldesc_t * ) finish_desc ) - > size ;
2021-06-15 03:43:03 -04:00
if ( xQueueIsQueueFullFromISR ( p_i2s - > rx - > queue ) ) {
2021-11-24 04:34:22 -05:00
xQueueReceiveFromISR ( p_i2s - > rx - > queue , & dummy , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
if ( p_i2s - > i2s_queue ) {
i2s_event . type = I2S_EVENT_RX_Q_OVF ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
}
2021-06-15 03:43:03 -04:00
}
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > rx - > queue , & ( ( ( lldesc_t * ) finish_desc ) - > buf ) , & tmp ) ;
need_awoke | = tmp ;
2021-06-15 03:43:03 -04:00
if ( p_i2s - > i2s_queue ) {
2021-11-12 00:55:19 -05:00
i2s_event . type = I2S_EVENT_RX_DONE ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2021-06-15 03:43:03 -04:00
}
}
2021-11-24 04:34:22 -05:00
return need_awoke ;
2021-06-15 03:43:03 -04:00
}
static bool IRAM_ATTR i2s_dma_tx_callback ( gdma_channel_handle_t dma_chan , gdma_event_data_t * event_data , void * user_data )
{
i2s_obj_t * p_i2s = ( i2s_obj_t * ) user_data ;
2021-11-24 04:34:22 -05:00
portBASE_TYPE need_awoke = 0 ;
portBASE_TYPE tmp = 0 ;
2021-06-15 03:43:03 -04:00
int dummy ;
i2s_event_t i2s_event ;
uint32_t finish_desc ;
if ( p_i2s - > tx ) {
finish_desc = event_data - > tx_eof_desc_addr ;
2021-11-12 00:55:19 -05:00
i2s_event . size = ( ( lldesc_t * ) finish_desc ) - > size ;
2021-06-15 03:43:03 -04:00
if ( xQueueIsQueueFullFromISR ( p_i2s - > tx - > queue ) ) {
2021-11-24 04:34:22 -05:00
xQueueReceiveFromISR ( p_i2s - > tx - > queue , & dummy , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
if ( p_i2s - > i2s_queue ) {
i2s_event . type = I2S_EVENT_TX_Q_OVF ;
i2s_event . size = p_i2s - > tx - > buf_size ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
}
2021-06-15 03:43:03 -04:00
}
2022-10-11 04:51:32 -04:00
if ( p_i2s - > tx_desc_auto_clear ) {
memset ( ( void * ) ( ( ( lldesc_t * ) finish_desc ) - > buf ) , 0 , p_i2s - > tx - > buf_size ) ;
}
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > tx - > queue , & ( ( ( lldesc_t * ) finish_desc ) - > buf ) , & tmp ) ;
need_awoke | = tmp ;
2021-06-15 03:43:03 -04:00
if ( p_i2s - > i2s_queue ) {
2021-11-12 00:55:19 -05:00
i2s_event . type = I2S_EVENT_TX_DONE ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2021-06-15 03:43:03 -04:00
}
}
2021-11-24 04:34:22 -05:00
return need_awoke ;
2021-06-15 03:43:03 -04:00
}
# else
2016-12-29 04:29:14 -05:00
static void IRAM_ATTR i2s_intr_handler_default ( void * arg )
{
2021-06-15 03:43:03 -04:00
i2s_obj_t * p_i2s = ( i2s_obj_t * ) arg ;
2021-07-20 09:03:52 -04:00
uint32_t status = i2s_hal_get_intr_status ( & ( p_i2s - > hal ) ) ;
2021-06-15 03:43:03 -04:00
if ( status = = 0 ) {
2019-06-06 05:00:03 -04:00
//Avoid spurious interrupt
return ;
}
2020-09-09 22:37:58 -04:00
2016-12-29 04:29:14 -05:00
i2s_event_t i2s_event ;
int dummy ;
2021-11-24 04:34:22 -05:00
portBASE_TYPE need_awoke = 0 ;
portBASE_TYPE tmp = 0 ;
2020-05-31 21:47:48 -04:00
uint32_t finish_desc = 0 ;
2022-03-13 23:34:46 -04:00
if ( ( status & I2S_LL_EVENT_TX_DSCR_ERR ) | | ( status & I2S_LL_EVENT_RX_DSCR_ERR ) ) {
2021-06-15 03:43:03 -04:00
ESP_EARLY_LOGE ( TAG , " dma error, interrupt status: 0x%08x " , status ) ;
2016-12-29 04:29:14 -05:00
if ( p_i2s - > i2s_queue ) {
i2s_event . type = I2S_EVENT_DMA_ERROR ;
if ( xQueueIsQueueFullFromISR ( p_i2s - > i2s_queue ) ) {
2021-11-24 04:34:22 -05:00
xQueueReceiveFromISR ( p_i2s - > i2s_queue , & dummy , & tmp ) ;
need_awoke | = tmp ;
2016-12-29 04:29:14 -05:00
}
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2016-12-29 04:29:14 -05:00
}
}
2022-03-13 23:34:46 -04:00
if ( ( status & I2S_LL_EVENT_TX_EOF ) & & p_i2s - > tx ) {
2020-05-31 21:47:48 -04:00
i2s_hal_get_out_eof_des_addr ( & ( p_i2s - > hal ) , & finish_desc ) ;
2021-11-12 00:55:19 -05:00
i2s_event . size = ( ( lldesc_t * ) finish_desc ) - > size ;
2016-12-29 04:29:14 -05:00
// All buffers are empty. This means we have an underflow on our hands.
if ( xQueueIsQueueFullFromISR ( p_i2s - > tx - > queue ) ) {
2021-11-24 04:34:22 -05:00
xQueueReceiveFromISR ( p_i2s - > tx - > queue , & dummy , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
if ( p_i2s - > i2s_queue ) {
i2s_event . type = I2S_EVENT_TX_Q_OVF ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
}
2016-12-29 04:29:14 -05:00
}
2022-10-11 04:51:32 -04:00
// See if tx descriptor needs to be auto cleared:
// This will avoid any kind of noise that may get introduced due to transmission
// of previous data from tx descriptor on I2S line.
if ( p_i2s - > tx_desc_auto_clear = = true ) {
memset ( ( void * ) ( ( ( lldesc_t * ) finish_desc ) - > buf ) , 0 , p_i2s - > tx - > buf_size ) ;
}
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > tx - > queue , & ( ( ( lldesc_t * ) finish_desc ) - > buf ) , & tmp ) ;
need_awoke | = tmp ;
2016-12-29 04:29:14 -05:00
if ( p_i2s - > i2s_queue ) {
i2s_event . type = I2S_EVENT_TX_DONE ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2016-12-29 04:29:14 -05:00
}
2021-07-20 09:03:52 -04:00
}
2022-03-13 23:34:46 -04:00
if ( ( status & I2S_LL_EVENT_RX_EOF ) & & p_i2s - > rx ) {
2021-07-20 09:03:52 -04:00
// All buffers are full. This means we have an overflow.
i2s_hal_get_in_eof_des_addr ( & ( p_i2s - > hal ) , & finish_desc ) ;
2021-11-12 00:55:19 -05:00
i2s_event . size = ( ( lldesc_t * ) finish_desc ) - > size ;
2021-07-20 09:03:52 -04:00
if ( xQueueIsQueueFullFromISR ( p_i2s - > rx - > queue ) ) {
2021-11-24 04:34:22 -05:00
xQueueReceiveFromISR ( p_i2s - > rx - > queue , & dummy , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
if ( p_i2s - > i2s_queue ) {
i2s_event . type = I2S_EVENT_RX_Q_OVF ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2021-11-12 00:55:19 -05:00
}
2021-07-20 09:03:52 -04:00
}
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > rx - > queue , & ( ( ( lldesc_t * ) finish_desc ) - > buf ) , & tmp ) ;
need_awoke | = tmp ;
2021-07-20 09:03:52 -04:00
if ( p_i2s - > i2s_queue ) {
i2s_event . type = I2S_EVENT_RX_DONE ;
2021-11-24 04:34:22 -05:00
xQueueSendFromISR ( p_i2s - > i2s_queue , ( void * ) & i2s_event , & tmp ) ;
need_awoke | = tmp ;
2021-07-20 09:03:52 -04:00
}
}
i2s_hal_clear_intr_status ( & ( p_i2s - > hal ) , status ) ;
2021-11-24 04:34:22 -05:00
if ( need_awoke = = pdTRUE ) {
2021-07-20 09:03:52 -04:00
portYIELD_FROM_ISR ( ) ;
}
}
# endif
2021-08-17 22:52:16 -04:00
static esp_err_t i2s_dma_intr_init ( i2s_port_t i2s_num , int intr_flag )
2021-08-02 07:17:29 -04:00
{
# if SOC_GDMA_SUPPORTED
/* Set GDMA trigger module */
gdma_trigger_t trig = { . periph = GDMA_TRIG_PERIPH_I2S } ;
2021-08-17 22:52:16 -04:00
switch ( i2s_num ) {
2021-08-02 07:17:29 -04:00
# if SOC_I2S_NUM > 1
2021-08-17 22:52:16 -04:00
case I2S_NUM_1 :
trig . instance_id = SOC_GDMA_TRIG_PERIPH_I2S1 ;
break ;
# endif
default :
trig . instance_id = SOC_GDMA_TRIG_PERIPH_I2S0 ;
break ;
}
2021-08-02 07:17:29 -04:00
/* Set GDMA config */
2021-08-17 22:52:16 -04:00
gdma_channel_alloc_config_t dma_cfg = { } ;
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
2021-08-02 07:17:29 -04:00
dma_cfg . direction = GDMA_CHANNEL_DIRECTION_TX ;
/* Register a new GDMA tx channel */
ESP_RETURN_ON_ERROR ( gdma_new_channel ( & dma_cfg , & p_i2s [ i2s_num ] - > tx_dma_chan ) , TAG , " Register tx dma channel error " ) ;
ESP_RETURN_ON_ERROR ( gdma_connect ( p_i2s [ i2s_num ] - > tx_dma_chan , trig ) , TAG , " Connect tx dma channel error " ) ;
gdma_tx_event_callbacks_t cb = { . on_trans_eof = i2s_dma_tx_callback } ;
/* Set callback function for GDMA, the interrupt is triggered by GDMA, then the GDMA ISR will call the callback function */
gdma_register_tx_event_callbacks ( p_i2s [ i2s_num ] - > tx_dma_chan , & cb , p_i2s [ i2s_num ] ) ;
}
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
2021-08-02 07:17:29 -04:00
dma_cfg . direction = GDMA_CHANNEL_DIRECTION_RX ;
/* Register a new GDMA rx channel */
ESP_RETURN_ON_ERROR ( gdma_new_channel ( & dma_cfg , & p_i2s [ i2s_num ] - > rx_dma_chan ) , TAG , " Register rx dma channel error " ) ;
ESP_RETURN_ON_ERROR ( gdma_connect ( p_i2s [ i2s_num ] - > rx_dma_chan , trig ) , TAG , " Connect rx dma channel error " ) ;
gdma_rx_event_callbacks_t cb = { . on_recv_eof = i2s_dma_rx_callback } ;
/* Set callback function for GDMA, the interrupt is triggered by GDMA, then the GDMA ISR will call the callback function */
gdma_register_rx_event_callbacks ( p_i2s [ i2s_num ] - > rx_dma_chan , & cb , p_i2s [ i2s_num ] ) ;
}
# else
/* Initial I2S module interrupt */
2021-08-17 22:52:16 -04:00
ESP_RETURN_ON_ERROR ( esp_intr_alloc ( i2s_periph_signal [ i2s_num ] . irq , intr_flag , i2s_intr_handler_default , p_i2s [ i2s_num ] , & p_i2s [ i2s_num ] - > i2s_isr_handle ) , TAG , " Register I2S Interrupt error " ) ;
2021-08-02 07:17:29 -04:00
# endif // SOC_GDMA_SUPPORTED
return ESP_OK ;
}
2021-07-20 09:03:52 -04:00
static void i2s_tx_reset ( i2s_port_t i2s_num )
{
p_i2s [ i2s_num ] - > tx - > curr_ptr = NULL ;
p_i2s [ i2s_num ] - > tx - > rw_pos = 0 ;
2022-03-13 23:34:46 -04:00
i2s_hal_tx_reset ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-08-02 07:17:29 -04:00
# if SOC_GDMA_SUPPORTED
2021-07-20 09:03:52 -04:00
gdma_reset ( p_i2s [ i2s_num ] - > tx_dma_chan ) ;
# else
2022-03-13 23:34:46 -04:00
i2s_hal_tx_reset_dma ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
# endif
2022-03-13 23:34:46 -04:00
i2s_hal_tx_reset_fifo ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
}
2021-08-02 07:17:29 -04:00
/**
* @ brief I2S rx reset
*
* @ param i2s_num I2S device number
*/
2021-07-20 09:03:52 -04:00
static void i2s_rx_reset ( i2s_port_t i2s_num )
{
p_i2s [ i2s_num ] - > rx - > curr_ptr = NULL ;
p_i2s [ i2s_num ] - > rx - > rw_pos = 0 ;
2022-03-13 23:34:46 -04:00
i2s_hal_rx_reset ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-08-02 07:17:29 -04:00
# if SOC_GDMA_SUPPORTED
2021-07-20 09:03:52 -04:00
gdma_reset ( p_i2s [ i2s_num ] - > rx_dma_chan ) ;
# else
2022-03-13 23:34:46 -04:00
i2s_hal_rx_reset_dma ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
# endif
2022-03-13 23:34:46 -04:00
i2s_hal_rx_reset_fifo ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
}
static void i2s_tx_start ( i2s_port_t i2s_num )
{
# if SOC_GDMA_SUPPORTED
gdma_start ( p_i2s [ i2s_num ] - > tx_dma_chan , ( uint32_t ) p_i2s [ i2s_num ] - > tx - > desc [ 0 ] ) ;
# else
2022-03-13 23:34:46 -04:00
i2s_hal_tx_enable_dma ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_tx_enable_intr ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_tx_start_link ( & ( p_i2s [ i2s_num ] - > hal ) , ( uint32_t ) p_i2s [ i2s_num ] - > tx - > desc [ 0 ] ) ;
2021-07-20 09:03:52 -04:00
# endif
2022-03-13 23:34:46 -04:00
i2s_hal_tx_start ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
}
static void i2s_rx_start ( i2s_port_t i2s_num )
{
# if SOC_GDMA_SUPPORTED
gdma_start ( p_i2s [ i2s_num ] - > rx_dma_chan , ( uint32_t ) p_i2s [ i2s_num ] - > rx - > desc [ 0 ] ) ;
# else
2022-03-13 23:34:46 -04:00
i2s_hal_rx_enable_dma ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_rx_enable_intr ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_rx_start_link ( & ( p_i2s [ i2s_num ] - > hal ) , ( uint32_t ) p_i2s [ i2s_num ] - > rx - > desc [ 0 ] ) ;
2021-07-20 09:03:52 -04:00
# endif
2022-03-13 23:34:46 -04:00
i2s_hal_rx_start ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
}
static void i2s_tx_stop ( i2s_port_t i2s_num )
{
2022-03-13 23:34:46 -04:00
i2s_hal_tx_stop ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
# if SOC_GDMA_SUPPORTED
gdma_stop ( p_i2s [ i2s_num ] - > tx_dma_chan ) ;
# else
2022-03-13 23:34:46 -04:00
i2s_hal_tx_stop_link ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_tx_disable_intr ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_tx_disable_dma ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
# endif
}
static void i2s_rx_stop ( i2s_port_t i2s_num )
{
2022-03-13 23:34:46 -04:00
i2s_hal_rx_stop ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2021-07-20 09:03:52 -04:00
# if SOC_GDMA_SUPPORTED
gdma_stop ( p_i2s [ i2s_num ] - > rx_dma_chan ) ;
# else
2022-03-13 23:34:46 -04:00
i2s_hal_rx_stop_link ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_rx_disable_intr ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
i2s_hal_rx_disable_dma ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
# endif
}
esp_err_t i2s_start ( i2s_port_t i2s_num )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2022-03-13 23:34:46 -04:00
//start DMA link
I2S_ENTER_CRITICAL ( i2s_num ) ;
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_tx_reset ( i2s_num ) ;
i2s_tx_start ( i2s_num ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_rx_reset ( i2s_num ) ;
i2s_rx_start ( i2s_num ) ;
}
# if !SOC_GDMA_SUPPORTED
esp_intr_enable ( p_i2s [ i2s_num ] - > i2s_isr_handle ) ;
# endif
I2S_EXIT_CRITICAL ( i2s_num ) ;
return ESP_OK ;
}
esp_err_t i2s_stop ( i2s_port_t i2s_num )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2022-03-13 23:34:46 -04:00
I2S_ENTER_CRITICAL ( i2s_num ) ;
# if !SOC_GDMA_SUPPORTED
esp_intr_disable ( p_i2s [ i2s_num ] - > i2s_isr_handle ) ;
# endif
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_tx_stop ( i2s_num ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_rx_stop ( i2s_num ) ;
}
# if !SOC_GDMA_SUPPORTED
i2s_hal_clear_intr_status ( & ( p_i2s [ i2s_num ] - > hal ) , I2S_INTR_MAX ) ;
2021-07-20 09:03:52 -04:00
# endif
2022-03-13 23:34:46 -04:00
I2S_EXIT_CRITICAL ( i2s_num ) ;
return ESP_OK ;
2021-07-20 09:03:52 -04:00
}
2021-08-02 07:17:29 -04:00
/*-------------------------------------------------------------
I2S buffer operation
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
static inline uint32_t i2s_get_buf_size ( i2s_port_t i2s_num )
2021-07-20 09:03:52 -04:00
{
2021-08-18 07:45:51 -04:00
i2s_hal_slot_config_t * slot_cfg = & p_i2s [ i2s_num ] - > slot_cfg ;
2021-08-02 07:17:29 -04:00
/* Calculate bytes per sample, align to 16 bit */
2022-03-13 23:34:46 -04:00
uint32_t bytes_per_sample = ( ( slot_cfg - > data_bit_width + 15 ) / 16 ) * 2 ;
2021-08-02 07:17:29 -04:00
/* The DMA buffer limitation is 4092 bytes */
2022-03-13 23:34:46 -04:00
uint32_t bytes_per_frame = bytes_per_sample * p_i2s [ i2s_num ] - > active_slot ;
2021-11-24 00:21:13 -05:00
p_i2s [ i2s_num ] - > dma_frame_num = ( p_i2s [ i2s_num ] - > dma_frame_num * bytes_per_frame > I2S_DMA_BUFFER_MAX_SIZE ) ?
2022-04-07 03:32:46 -04:00
I2S_DMA_BUFFER_MAX_SIZE / bytes_per_frame : p_i2s [ i2s_num ] - > dma_frame_num ;
2021-11-24 00:21:13 -05:00
return p_i2s [ i2s_num ] - > dma_frame_num * bytes_per_frame ;
2021-08-02 07:17:29 -04:00
}
2021-07-20 09:03:52 -04:00
2021-08-02 07:17:29 -04:00
static esp_err_t i2s_delete_dma_buffer ( i2s_port_t i2s_num , i2s_dma_t * dma_obj )
{
ESP_RETURN_ON_FALSE ( dma_obj , ESP_ERR_INVALID_ARG , TAG , " I2S DMA object can't be NULL " ) ;
2022-03-13 23:34:46 -04:00
uint32_t buf_cnt = p_i2s [ i2s_num ] - > dma_desc_num ;
2021-08-02 07:17:29 -04:00
/* Loop to destroy every descriptor and buffer */
2022-03-13 23:34:46 -04:00
for ( int cnt = 0 ; cnt < buf_cnt ; cnt + + ) {
2021-08-02 07:17:29 -04:00
if ( dma_obj - > desc & & dma_obj - > desc [ cnt ] ) {
free ( dma_obj - > desc [ cnt ] ) ;
dma_obj - > desc [ cnt ] = NULL ;
}
if ( dma_obj - > buf & & dma_obj - > buf [ cnt ] ) {
free ( dma_obj - > buf [ cnt ] ) ;
dma_obj - > buf [ cnt ] = NULL ;
}
}
return ESP_OK ;
}
2021-07-20 09:03:52 -04:00
2021-08-02 07:17:29 -04:00
static esp_err_t i2s_alloc_dma_buffer ( i2s_port_t i2s_num , i2s_dma_t * dma_obj )
{
esp_err_t ret = ESP_OK ;
ESP_GOTO_ON_FALSE ( dma_obj , ESP_ERR_INVALID_ARG , err , TAG , " I2S DMA object can't be NULL " ) ;
2021-11-24 00:21:13 -05:00
uint32_t buf_cnt = p_i2s [ i2s_num ] - > dma_desc_num ;
2021-08-02 07:17:29 -04:00
for ( int cnt = 0 ; cnt < buf_cnt ; cnt + + ) {
/* Allocate DMA buffer */
2021-08-17 22:52:16 -04:00
dma_obj - > buf [ cnt ] = ( char * ) heap_caps_calloc ( dma_obj - > buf_size , sizeof ( char ) , MALLOC_CAP_DMA ) ;
2021-08-02 07:17:29 -04:00
ESP_GOTO_ON_FALSE ( dma_obj - > buf [ cnt ] , ESP_ERR_NO_MEM , err , TAG , " Error malloc dma buffer " ) ;
/* Initialize DMA buffer to 0 */
memset ( dma_obj - > buf [ cnt ] , 0 , dma_obj - > buf_size ) ;
/* Allocate DMA descpriptor */
2021-08-17 22:52:16 -04:00
dma_obj - > desc [ cnt ] = ( lldesc_t * ) heap_caps_calloc ( 1 , sizeof ( lldesc_t ) , MALLOC_CAP_DMA ) ;
2021-08-02 07:17:29 -04:00
ESP_GOTO_ON_FALSE ( dma_obj - > desc [ cnt ] , ESP_ERR_NO_MEM , err , TAG , " Error malloc dma description entry " ) ;
}
/* DMA descriptor must be initialize after all descriptor has been created, otherwise they can't be linked together as a chain */
for ( int cnt = 0 ; cnt < buf_cnt ; cnt + + ) {
/* Initialize DMA descriptor */
dma_obj - > desc [ cnt ] - > owner = 1 ;
dma_obj - > desc [ cnt ] - > eof = 1 ;
dma_obj - > desc [ cnt ] - > sosf = 0 ;
dma_obj - > desc [ cnt ] - > length = dma_obj - > buf_size ;
dma_obj - > desc [ cnt ] - > size = dma_obj - > buf_size ;
dma_obj - > desc [ cnt ] - > buf = ( uint8_t * ) dma_obj - > buf [ cnt ] ;
dma_obj - > desc [ cnt ] - > offset = 0 ;
/* Link to the next descriptor */
dma_obj - > desc [ cnt ] - > empty = ( uint32_t ) ( ( cnt < ( buf_cnt - 1 ) ) ? ( dma_obj - > desc [ cnt + 1 ] ) : dma_obj - > desc [ 0 ] ) ;
}
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_ll_rx_set_eof_num ( p_i2s [ i2s_num ] - > hal . dev , dma_obj - > buf_size ) ;
}
2022-08-04 01:08:48 -04:00
ESP_LOGD ( TAG , " DMA Malloc info, datalen=blocksize=%d, dma_desc_num=% " PRIu32 , dma_obj - > buf_size , buf_cnt ) ;
2021-08-02 07:17:29 -04:00
return ESP_OK ;
err :
/* Delete DMA buffer if failed to allocate memory */
i2s_delete_dma_buffer ( i2s_num , dma_obj ) ;
return ret ;
}
2021-07-20 09:03:52 -04:00
2021-08-02 07:17:29 -04:00
static esp_err_t i2s_realloc_dma_buffer ( i2s_port_t i2s_num , i2s_dma_t * dma_obj )
{
ESP_RETURN_ON_FALSE ( dma_obj , ESP_ERR_INVALID_ARG , TAG , " I2S DMA object can't be NULL " ) ;
2021-07-20 09:03:52 -04:00
2021-08-02 07:17:29 -04:00
/* Destroy old dma descriptor and buffer */
i2s_delete_dma_buffer ( i2s_num , dma_obj ) ;
/* Alloc new dma descriptor and buffer */
ESP_RETURN_ON_ERROR ( i2s_alloc_dma_buffer ( i2s_num , dma_obj ) , TAG , " Failed to allocate dma buffer " ) ;
2016-12-29 04:29:14 -05:00
2021-07-20 09:03:52 -04:00
return ESP_OK ;
2016-12-29 04:29:14 -05:00
}
2021-08-02 07:17:29 -04:00
static esp_err_t i2s_destroy_dma_object ( i2s_port_t i2s_num , i2s_dma_t * * dma )
2016-12-29 04:29:14 -05:00
{
2021-08-02 07:17:29 -04:00
/* Check if DMA truely need destroy */
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] , ESP_ERR_INVALID_ARG , TAG , " I2S not initialized yet " ) ;
if ( ! ( * dma ) ) {
return ESP_OK ;
2018-04-11 07:37:31 -04:00
}
2021-08-02 07:17:29 -04:00
/* Destroy every descriptor and buffer */
i2s_delete_dma_buffer ( i2s_num , ( * dma ) ) ;
/* Destroy descriptor pointer */
if ( ( * dma ) - > desc ) {
free ( ( * dma ) - > desc ) ;
( * dma ) - > desc = NULL ;
}
/* Destroy buffer pointer */
if ( ( * dma ) - > buf ) {
free ( ( * dma ) - > buf ) ;
( * dma ) - > buf = NULL ;
}
/* Delete DMA mux */
vSemaphoreDelete ( ( * dma ) - > mux ) ;
/* Delete DMA queue */
vQueueDelete ( ( * dma ) - > queue ) ;
/* Free DMA structure */
free ( * dma ) ;
* dma = NULL ;
2022-04-07 03:32:46 -04:00
ESP_LOGD ( TAG , " DMA queue destroyed " ) ;
2016-12-29 04:29:14 -05:00
return ESP_OK ;
}
2021-08-02 07:17:29 -04:00
static esp_err_t i2s_create_dma_object ( i2s_port_t i2s_num , i2s_dma_t * * dma )
2016-12-29 04:29:14 -05:00
{
2021-08-02 07:17:29 -04:00
ESP_RETURN_ON_FALSE ( dma , ESP_ERR_INVALID_ARG , TAG , " DMA object secondary pointer is NULL " ) ;
ESP_RETURN_ON_FALSE ( ( * dma = = NULL ) , ESP_ERR_INVALID_ARG , TAG , " DMA object has been created " ) ;
2021-11-24 00:21:13 -05:00
uint32_t buf_cnt = p_i2s [ i2s_num ] - > dma_desc_num ;
2021-08-02 07:17:29 -04:00
/* Allocate new DMA structure */
2022-03-13 23:34:46 -04:00
* dma = ( i2s_dma_t * ) calloc ( 1 , sizeof ( i2s_dma_t ) ) ;
2021-08-17 22:52:16 -04:00
ESP_RETURN_ON_FALSE ( * dma , ESP_ERR_NO_MEM , TAG , " DMA object allocate failed " ) ;
2021-08-02 07:17:29 -04:00
/* Allocate DMA buffer poiter */
2021-08-17 22:52:16 -04:00
( * dma ) - > buf = ( char * * ) heap_caps_calloc ( buf_cnt , sizeof ( char * ) , MALLOC_CAP_DMA ) ;
2021-08-02 07:17:29 -04:00
if ( ! ( * dma ) - > buf ) {
goto err ;
}
2021-08-17 22:52:16 -04:00
/* Allocate secondary pointer of DMA descriptor chain */
( * dma ) - > desc = ( lldesc_t * * ) heap_caps_calloc ( buf_cnt , sizeof ( lldesc_t * ) , MALLOC_CAP_DMA ) ;
2021-08-02 07:17:29 -04:00
if ( ! ( * dma ) - > desc ) {
goto err ;
}
/* Create queue and mutex */
( * dma ) - > queue = xQueueCreate ( buf_cnt - 1 , sizeof ( char * ) ) ;
2021-08-17 22:52:16 -04:00
if ( ! ( * dma ) - > queue ) {
goto err ;
}
2021-08-02 07:17:29 -04:00
( * dma ) - > mux = xSemaphoreCreateMutex ( ) ;
2021-08-17 22:52:16 -04:00
if ( ! ( * dma ) - > mux ) {
goto err ;
}
2016-12-29 04:29:14 -05:00
2021-08-02 07:17:29 -04:00
return ESP_OK ;
err :
ESP_LOGE ( TAG , " I2S DMA object create failed, preparing to uninstall " ) ;
/* Destroy DMA queue if failed to allocate memory */
i2s_destroy_dma_object ( i2s_num , dma ) ;
return ESP_ERR_NO_MEM ;
2016-12-29 04:29:14 -05:00
}
2021-08-02 07:17:29 -04:00
/*-------------------------------------------------------------
I2S clock operation
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
2021-09-07 23:59:32 -04:00
static uint32_t i2s_config_source_clock ( i2s_port_t i2s_num , bool use_apll , uint32_t mclk )
2021-07-20 09:03:52 -04:00
{
2021-08-02 07:17:29 -04:00
# if SOC_I2S_SUPPORTS_APLL
if ( use_apll ) {
2021-12-02 07:24:19 -05:00
/* Calculate the expected APLL */
int div = ( int ) ( ( SOC_APLL_MIN_HZ / mclk ) + 1 ) ;
/* apll_freq = mclk * div
* when div = 1 , hardware will still divide 2
* when div = 0 , the final mclk will be unpredictable
* So the div here should be at least 2 */
div = div < 2 ? 2 : div ;
uint32_t expt_freq = mclk * div ;
/* Set APLL coefficients to the given frequency */
uint32_t real_freq = 0 ;
esp_err_t ret = periph_rtc_apll_freq_set ( expt_freq , & real_freq ) ;
if ( ret = = ESP_ERR_INVALID_ARG ) {
ESP_LOGE ( TAG , " set APLL coefficients failed " ) ;
2021-09-07 23:59:32 -04:00
return 0 ;
2021-08-02 07:17:29 -04:00
}
2021-12-02 07:24:19 -05:00
if ( ret = = ESP_ERR_INVALID_STATE ) {
2022-08-04 01:08:48 -04:00
ESP_LOGW ( TAG , " APLL is occupied already, it is working at % " PRIu32 " Hz " , real_freq ) ;
2021-12-02 07:24:19 -05:00
}
2022-08-04 01:08:48 -04:00
ESP_LOGD ( TAG , " APLL expected frequency is % " PRIu32 " Hz, real frequency is % " PRIu32 " Hz " , expt_freq , real_freq ) ;
2021-09-07 23:59:32 -04:00
/* In APLL mode, there is no sclk but only mclk, so return 0 here to indicate APLL mode */
2021-12-02 07:24:19 -05:00
return real_freq ;
2021-08-02 07:17:29 -04:00
}
return I2S_LL_BASE_CLK ;
# else
if ( use_apll ) {
2022-04-07 03:32:46 -04:00
ESP_LOGW ( TAG , " APLL not supported on current chip, use I2S_CLK_SRC_DEFAULT as default clock source " ) ;
2021-08-02 07:17:29 -04:00
}
return I2S_LL_BASE_CLK ;
# endif
}
2021-07-20 09:03:52 -04:00
2021-08-17 22:52:16 -04:00
# if SOC_I2S_SUPPORTS_ADC || SOC_I2S_SUPPORTS_DAC
2021-08-18 07:45:51 -04:00
static esp_err_t i2s_calculate_adc_dac_clock ( int i2s_num , i2s_hal_clock_info_t * clk_info )
2021-08-02 07:17:29 -04:00
{
2022-03-13 23:34:46 -04:00
/* For ADC/DAC mode, the built-in ADC/DAC is driven by 'mclk' instead of 'bclk'
* ' bclk ' should be fixed to the double of sample rate
* ' bclk_div ' is the real coefficient that affects the slot bit */
2021-08-18 07:45:51 -04:00
i2s_clk_config_t * clk_cfg = & p_i2s [ i2s_num ] - > clk_cfg ;
i2s_hal_slot_config_t * slot_cfg = & p_i2s [ i2s_num ] - > slot_cfg ;
2022-03-13 23:34:46 -04:00
uint32_t slot_bits = slot_cfg - > slot_bit_width ;
2021-08-02 07:17:29 -04:00
/* Set I2S bit clock */
2022-03-13 23:34:46 -04:00
clk_info - > bclk = clk_cfg - > sample_rate_hz * I2S_LL_AD_BCK_FACTOR ;
2021-09-07 23:59:32 -04:00
/* Set I2S bit clock default division */
2022-03-13 23:34:46 -04:00
clk_info - > bclk_div = slot_bits ;
/* If fixed_mclk and use_apll are set, use fixed_mclk as mclk frequency, otherwise calculate by mclk = bclk * bclk_div */
clk_info - > mclk = ( p_i2s [ i2s_num ] - > use_apll & & p_i2s [ i2s_num ] - > fixed_mclk ) ?
2022-04-07 03:32:46 -04:00
p_i2s [ i2s_num ] - > fixed_mclk : clk_info - > bclk * clk_info - > bclk_div ;
2021-09-07 23:59:32 -04:00
/* Calculate bclk_div = mclk / bclk */
2022-03-13 23:34:46 -04:00
clk_info - > bclk_div = clk_info - > mclk / clk_info - > bclk ;
2021-09-07 23:59:32 -04:00
/* Get I2S system clock by config source clock */
2022-03-13 23:34:46 -04:00
clk_info - > sclk = i2s_config_source_clock ( i2s_num , p_i2s [ i2s_num ] - > use_apll , clk_info - > mclk ) ;
2021-08-02 07:17:29 -04:00
/* Get I2S master clock rough division, later will calculate the fine division parameters in HAL */
2022-03-13 23:34:46 -04:00
clk_info - > mclk_div = clk_info - > sclk / clk_info - > mclk ;
2021-08-02 07:17:29 -04:00
/* Check if the configuration is correct */
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( clk_info - > sclk / ( float ) clk_info - > mclk > 1.99 , ESP_ERR_INVALID_ARG , TAG , " sample rate is too large, the mclk division is below minimum value 2 " ) ;
ESP_RETURN_ON_FALSE ( clk_info - > mclk_div < 256 , ESP_ERR_INVALID_ARG , TAG , " sample rate is too small, the mclk division exceed the maximum value 255 " ) ;
2021-08-02 07:17:29 -04:00
return ESP_OK ;
}
2021-08-17 22:52:16 -04:00
# endif // SOC_I2S_SUPPORTS_ADC || SOC_I2S_SUPPORTS_DAC
2021-07-20 09:03:52 -04:00
# if SOC_I2S_SUPPORTS_PDM_TX
2021-08-18 07:45:51 -04:00
static esp_err_t i2s_calculate_pdm_tx_clock ( int i2s_num , i2s_hal_clock_info_t * clk_info )
2021-08-02 07:17:29 -04:00
{
2021-08-18 07:45:51 -04:00
i2s_clk_config_t * clk_cfg = & p_i2s [ i2s_num ] - > clk_cfg ;
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
int fp = clk_cfg - > up_sample_fp ;
int fs = clk_cfg - > up_sample_fs ;
2021-08-02 07:17:29 -04:00
/* Set I2S bit clock */
2022-03-13 23:34:46 -04:00
clk_info - > bclk = clk_cfg - > sample_rate_hz * I2S_LL_PDM_BCK_FACTOR * fp / fs ;
2021-09-07 23:59:32 -04:00
/* Set I2S bit clock default division */
2022-03-13 23:34:46 -04:00
clk_info - > bclk_div = 8 ;
/* If fixed_mclk and use_apll are set, use fixed_mclk as mclk frequency, otherwise calculate by mclk = sample_rate_hz * multiple */
clk_info - > mclk = ( p_i2s [ i2s_num ] - > use_apll & & p_i2s [ i2s_num ] - > fixed_mclk ) ?
2022-04-07 03:32:46 -04:00
p_i2s [ i2s_num ] - > fixed_mclk : clk_info - > bclk * clk_info - > bclk_div ;
2021-09-07 23:59:32 -04:00
/* Calculate bclk_div = mclk / bclk */
2022-03-13 23:34:46 -04:00
clk_info - > bclk_div = clk_info - > mclk / clk_info - > bclk ;
2021-09-07 23:59:32 -04:00
/* Get I2S system clock by config source clock */
2022-03-13 23:34:46 -04:00
clk_info - > sclk = i2s_config_source_clock ( i2s_num , p_i2s [ i2s_num ] - > use_apll , clk_info - > mclk ) ;
2021-08-02 07:17:29 -04:00
/* Get I2S master clock rough division, later will calculate the fine division parameters in HAL */
2022-03-13 23:34:46 -04:00
clk_info - > mclk_div = clk_info - > sclk / clk_info - > mclk ;
2021-08-02 07:17:29 -04:00
/* Check if the configuration is correct */
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( clk_info - > sclk / ( float ) clk_info - > mclk > 1.99 , ESP_ERR_INVALID_ARG , TAG , " sample rate is too large, the mclk division is below minimum value 2 " ) ;
ESP_RETURN_ON_FALSE ( clk_info - > mclk_div < 256 , ESP_ERR_INVALID_ARG , TAG , " sample rate is too small, the mclk division exceed the maximum value 255 " ) ;
2021-08-02 07:17:29 -04:00
return ESP_OK ;
}
# endif // SOC_I2S_SUPPORTS_PDM_TX
2021-07-20 09:03:52 -04:00
# if SOC_I2S_SUPPORTS_PDM_RX
2021-08-18 07:45:51 -04:00
static esp_err_t i2s_calculate_pdm_rx_clock ( int i2s_num , i2s_hal_clock_info_t * clk_info )
2021-08-02 07:17:29 -04:00
{
2021-08-18 07:45:51 -04:00
i2s_clk_config_t * clk_cfg = & p_i2s [ i2s_num ] - > clk_cfg ;
2022-03-13 23:34:46 -04:00
i2s_pdm_dsr_t dsr = clk_cfg - > dn_sample_mode ;
2021-08-02 07:17:29 -04:00
/* Set I2S bit clock */
2022-03-13 23:34:46 -04:00
clk_info - > bclk = clk_cfg - > sample_rate_hz * I2S_LL_PDM_BCK_FACTOR * ( dsr = = I2S_PDM_DSR_16S ? 2 : 1 ) ;
2021-09-07 23:59:32 -04:00
/* Set I2S bit clock default division */
2022-03-13 23:34:46 -04:00
clk_info - > bclk_div = 8 ;
/* If fixed_mclk and use_apll are set, use fixed_mclk as mclk frequency, otherwise calculate by mclk = sample_rate_hz * multiple */
clk_info - > mclk = ( p_i2s [ i2s_num ] - > use_apll & & p_i2s [ i2s_num ] - > fixed_mclk ) ?
2022-04-07 03:32:46 -04:00
p_i2s [ i2s_num ] - > fixed_mclk : clk_info - > bclk * clk_info - > bclk_div ;
2021-09-07 23:59:32 -04:00
/* Calculate bclk_div = mclk / bclk */
2022-03-13 23:34:46 -04:00
clk_info - > bclk_div = clk_info - > mclk / clk_info - > bclk ;
2021-09-07 23:59:32 -04:00
/* Get I2S system clock by config source clock */
2022-03-13 23:34:46 -04:00
clk_info - > sclk = i2s_config_source_clock ( i2s_num , p_i2s [ i2s_num ] - > use_apll , clk_info - > mclk ) ;
2021-08-02 07:17:29 -04:00
/* Get I2S master clock rough division, later will calculate the fine division parameters in HAL */
2022-03-13 23:34:46 -04:00
clk_info - > mclk_div = clk_info - > sclk / clk_info - > mclk ;
2021-08-02 07:17:29 -04:00
/* Check if the configuration is correct */
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( clk_info - > sclk / ( float ) clk_info - > mclk > 1.99 , ESP_ERR_INVALID_ARG , TAG , " sample rate is too large, the mclk division is below minimum value 2 " ) ;
ESP_RETURN_ON_FALSE ( clk_info - > mclk_div < 256 , ESP_ERR_INVALID_ARG , TAG , " sample rate is too small, the mclk division exceed the maximum value 255 " ) ;
2021-08-02 07:17:29 -04:00
return ESP_OK ;
}
2021-07-20 09:03:52 -04:00
# endif // SOC_I2S_SUPPORTS_PDM_RX
2021-08-18 07:45:51 -04:00
static esp_err_t i2s_calculate_common_clock ( int i2s_num , i2s_hal_clock_info_t * clk_info )
2021-08-02 07:17:29 -04:00
{
2021-08-18 07:45:51 -04:00
i2s_clk_config_t * clk_cfg = & p_i2s [ i2s_num ] - > clk_cfg ;
i2s_hal_slot_config_t * slot_cfg = & p_i2s [ i2s_num ] - > slot_cfg ;
2022-03-13 23:34:46 -04:00
uint32_t rate = clk_cfg - > sample_rate_hz ;
uint32_t slot_num = p_i2s [ i2s_num ] - > total_slot < 2 ? 2 : p_i2s [ i2s_num ] - > total_slot ;
uint32_t slot_bits = slot_cfg - > slot_bit_width ;
2021-08-17 22:52:16 -04:00
/* Calculate multiple */
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > role = = I2S_ROLE_MASTER ) {
clk_info - > bclk = rate * slot_num * slot_bits ;
clk_info - > mclk = rate * clk_cfg - > mclk_multiple ;
clk_info - > bclk_div = clk_info - > mclk / clk_info - > bclk ;
2021-08-17 22:52:16 -04:00
} else {
2022-03-13 23:34:46 -04:00
/* For slave mode, mclk >= bclk * 8, so fix bclk_div to 8 first */
clk_info - > bclk_div = 8 ;
clk_info - > bclk = rate * slot_num * slot_bits ;
clk_info - > mclk = clk_info - > bclk * clk_info - > bclk_div ;
2021-08-17 22:52:16 -04:00
}
2021-09-07 23:59:32 -04:00
/* Get I2S system clock by config source clock */
2022-03-13 23:34:46 -04:00
clk_info - > sclk = i2s_config_source_clock ( i2s_num , p_i2s [ i2s_num ] - > use_apll , clk_info - > mclk ) ;
2021-08-02 07:17:29 -04:00
/* Get I2S master clock rough division, later will calculate the fine division parameters in HAL */
2022-03-13 23:34:46 -04:00
clk_info - > mclk_div = clk_info - > sclk / clk_info - > mclk ;
2021-08-02 07:17:29 -04:00
/* Check if the configuration is correct */
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( clk_info - > mclk < = clk_info - > sclk , ESP_ERR_INVALID_ARG , TAG , " sample rate is too large " ) ;
2021-08-02 07:17:29 -04:00
return ESP_OK ;
}
2022-03-13 23:34:46 -04:00
2021-08-18 07:45:51 -04:00
static esp_err_t i2s_calculate_clock ( i2s_port_t i2s_num , i2s_hal_clock_info_t * clk_info )
2021-08-02 07:17:29 -04:00
{
2022-03-13 23:34:46 -04:00
/* Calculate clock for ADC/DAC mode */
# if SOC_I2S_SUPPORTS_ADC_DAC
if ( ( int ) p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_ADC_DAC ) {
ESP_RETURN_ON_ERROR ( i2s_calculate_adc_dac_clock ( i2s_num , clk_info ) , TAG , " ADC/DAC clock calculate failed " ) ;
2021-08-02 07:17:29 -04:00
return ESP_OK ;
2016-12-29 04:29:14 -05:00
}
2021-08-17 22:52:16 -04:00
# endif // SOC_I2S_SUPPORTS_ADC
/* Calculate clock for PDM mode */
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_PDM
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_PDM ) {
2021-08-17 22:52:16 -04:00
# if SOC_I2S_SUPPORTS_PDM_TX
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
ESP_RETURN_ON_ERROR ( i2s_calculate_pdm_tx_clock ( i2s_num , clk_info ) , TAG , " PDM TX clock calculate failed " ) ;
2021-08-17 22:52:16 -04:00
}
# endif // SOC_I2S_SUPPORTS_PDM_TX
2021-08-02 07:17:29 -04:00
# if SOC_I2S_SUPPORTS_PDM_RX
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
ESP_RETURN_ON_ERROR ( i2s_calculate_pdm_rx_clock ( i2s_num , clk_info ) , TAG , " PDM RX clock calculate failed " ) ;
2021-08-17 22:52:16 -04:00
}
# endif // SOC_I2S_SUPPORTS_PDM_RX
2021-08-02 07:17:29 -04:00
return ESP_OK ;
2021-07-20 09:03:52 -04:00
}
2021-08-17 22:52:16 -04:00
# endif // SOC_I2S_SUPPORTS_PDM_TX || SOC_I2S_SUPPORTS_PDM_RX
/* Calculate clock for common mode */
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_ERROR ( i2s_calculate_common_clock ( i2s_num , clk_info ) , TAG , " Common clock calculate failed " ) ;
2022-08-04 01:08:48 -04:00
ESP_LOGD ( TAG , " [sclk] % " PRIu32 " [mclk] % " PRIu32 " [mclk_div] %d [bclk] % " PRIu32 " [bclk_div] %d " ,
2022-08-01 06:05:29 -04:00
clk_info - > sclk , clk_info - > mclk , clk_info - > mclk_div , clk_info - > bclk , clk_info - > bclk_div ) ;
2016-12-29 04:29:14 -05:00
return ESP_OK ;
}
2021-08-02 07:17:29 -04:00
/*-------------------------------------------------------------
I2S configuration
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_ADC_DAC
static void i2s_dac_set_slot_legacy ( void )
{
i2s_dev_t * dev = p_i2s [ 0 ] - > hal . dev ;
2021-08-18 07:45:51 -04:00
i2s_hal_slot_config_t * slot_cfg = & p_i2s [ 0 ] - > slot_cfg ;
2022-03-13 23:34:46 -04:00
i2s_ll_tx_reset ( dev ) ;
i2s_ll_tx_set_slave_mod ( dev , false ) ;
i2s_ll_tx_set_sample_bit ( dev , slot_cfg - > slot_bit_width , slot_cfg - > data_bit_width ) ;
2022-04-05 03:47:16 -04:00
i2s_ll_tx_enable_mono_mode ( dev , slot_cfg - > slot_mode = = I2S_SLOT_MODE_MONO ) ;
2022-03-13 23:34:46 -04:00
i2s_ll_tx_enable_msb_shift ( dev , false ) ;
i2s_ll_tx_set_ws_width ( dev , slot_cfg - > slot_bit_width ) ;
i2s_ll_tx_enable_msb_right ( dev , false ) ;
i2s_ll_tx_enable_right_first ( dev , true ) ;
/* Should always enable fifo */
i2s_ll_tx_force_enable_fifo_mod ( dev , true ) ;
2016-12-29 04:29:14 -05:00
}
2017-05-03 06:55:52 -04:00
esp_err_t i2s_set_dac_mode ( i2s_dac_mode_t dac_mode )
2016-12-29 04:29:14 -05:00
{
2021-06-15 03:43:03 -04:00
ESP_RETURN_ON_FALSE ( ( dac_mode < I2S_DAC_CHANNEL_MAX ) , ESP_ERR_INVALID_ARG , TAG , " i2s dac mode error " ) ;
2017-08-16 04:31:11 -04:00
if ( dac_mode = = I2S_DAC_CHANNEL_DISABLE ) {
2017-05-03 06:55:52 -04:00
dac_output_disable ( DAC_CHANNEL_1 ) ;
2017-08-29 03:31:39 -04:00
dac_output_disable ( DAC_CHANNEL_2 ) ;
2017-05-03 06:55:52 -04:00
dac_i2s_disable ( ) ;
} else {
dac_i2s_enable ( ) ;
}
2016-12-29 04:29:14 -05:00
2017-05-03 06:55:52 -04:00
if ( dac_mode & I2S_DAC_CHANNEL_RIGHT_EN ) {
2019-12-09 02:20:41 -05:00
//DAC1, right channel
2017-05-03 06:55:52 -04:00
dac_output_enable ( DAC_CHANNEL_1 ) ;
}
if ( dac_mode & I2S_DAC_CHANNEL_LEFT_EN ) {
2019-12-09 02:20:41 -05:00
//DAC2, left channel
2017-05-03 06:55:52 -04:00
dac_output_enable ( DAC_CHANNEL_2 ) ;
}
2016-12-29 04:29:14 -05:00
return ESP_OK ;
}
2022-03-13 23:34:46 -04:00
static void i2s_adc_set_slot_legacy ( void )
{
i2s_dev_t * dev = p_i2s [ 0 ] - > hal . dev ;
2021-08-18 07:45:51 -04:00
i2s_hal_slot_config_t * slot_cfg = & p_i2s [ 0 ] - > slot_cfg ;
2022-03-13 23:34:46 -04:00
// When ADC/DAC are installed as duplex mode, ADC will share the WS and BCLK clock by working in slave mode
i2s_ll_rx_set_slave_mod ( dev , false ) ;
i2s_ll_rx_set_sample_bit ( dev , slot_cfg - > slot_bit_width , slot_cfg - > data_bit_width ) ;
i2s_ll_rx_enable_mono_mode ( dev , true ) ; // ADC should use mono mode to meet the sample rate
i2s_ll_rx_enable_msb_shift ( dev , false ) ;
i2s_ll_rx_set_ws_width ( dev , slot_cfg - > slot_bit_width ) ;
i2s_ll_rx_enable_msb_right ( dev , false ) ;
i2s_ll_rx_enable_right_first ( dev , false ) ;
/* Should always enable fifo */
i2s_ll_rx_force_enable_fifo_mod ( dev , true ) ;
}
static int _i2s_adc_unit = - 1 ;
static int _i2s_adc_channel = - 1 ;
2019-07-16 05:33:30 -04:00
static esp_err_t _i2s_adc_mode_recover ( void )
2017-12-08 07:07:19 -05:00
{
2021-06-15 03:43:03 -04:00
ESP_RETURN_ON_FALSE ( ( ( _i2s_adc_unit ! = - 1 ) & & ( _i2s_adc_channel ! = - 1 ) ) , ESP_ERR_INVALID_ARG , TAG , " i2s ADC recover error, not initialized... " ) ;
2017-12-08 07:07:19 -05:00
return adc_i2s_mode_init ( _i2s_adc_unit , _i2s_adc_channel ) ;
}
2017-08-23 11:12:56 -04:00
esp_err_t i2s_set_adc_mode ( adc_unit_t adc_unit , adc1_channel_t adc_channel )
{
2021-06-15 03:43:03 -04:00
ESP_RETURN_ON_FALSE ( ( adc_unit < ADC_UNIT_2 ) , ESP_ERR_INVALID_ARG , TAG , " i2s ADC unit error, only support ADC1 for now " ) ;
2017-08-23 11:12:56 -04:00
// For now, we only support SAR ADC1.
2017-12-08 07:07:19 -05:00
_i2s_adc_unit = adc_unit ;
_i2s_adc_channel = adc_channel ;
2017-08-23 11:12:56 -04:00
return adc_i2s_mode_init ( adc_unit , adc_channel ) ;
}
2020-05-31 21:47:48 -04:00
esp_err_t i2s_adc_enable ( i2s_port_t i2s_num )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-06-15 03:43:03 -04:00
ESP_RETURN_ON_FALSE ( ( p_i2s [ i2s_num ] ! = NULL ) , ESP_ERR_INVALID_STATE , TAG , " Not initialized yet " ) ;
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( ( ( int ) p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_ADC_DAC ) & & ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) ,
ESP_ERR_INVALID_STATE , TAG , " i2s built-in adc not enabled " ) ;
2020-05-31 21:47:48 -04:00
adc1_dma_mode_acquire ( ) ;
_i2s_adc_mode_recover ( ) ;
i2s_rx_reset ( i2s_num ) ;
2022-03-13 23:34:46 -04:00
return i2s_start ( i2s_num ) ;
2020-05-31 21:47:48 -04:00
}
esp_err_t i2s_adc_disable ( i2s_port_t i2s_num )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-06-15 03:43:03 -04:00
ESP_RETURN_ON_FALSE ( ( p_i2s [ i2s_num ] ! = NULL ) , ESP_ERR_INVALID_STATE , TAG , " Not initialized yet " ) ;
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( ( ( int ) p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_ADC_DAC ) & & ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) ,
ESP_ERR_INVALID_STATE , TAG , " i2s built-in adc not enabled " ) ;
2020-05-31 21:47:48 -04:00
2022-03-13 23:34:46 -04:00
i2s_hal_rx_stop ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
2020-05-31 21:47:48 -04:00
adc1_lock_release ( ) ;
return ESP_OK ;
}
2020-02-20 03:00:48 -05:00
# endif
2017-08-23 11:12:56 -04:00
2022-03-13 23:34:46 -04:00
static esp_err_t i2s_check_cfg_validity ( i2s_port_t i2s_num , const i2s_config_t * cfg )
2016-12-29 04:29:14 -05:00
{
2022-03-13 23:34:46 -04:00
/* Step 1: Check the validity of input parameters */
/* Check the validity of i2s device number */
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] = = NULL , ESP_ERR_INVALID_STATE , TAG , " this i2s port is in use " ) ;
ESP_RETURN_ON_FALSE ( cfg , ESP_ERR_INVALID_ARG , TAG , " I2S configuration must not be NULL " ) ;
/* Check the size of DMA buffer */
ESP_RETURN_ON_FALSE ( ( cfg - > dma_desc_num > = 2 & & cfg - > dma_desc_num < = 128 ) , ESP_ERR_INVALID_ARG , TAG , " I2S buffer count less than 128 and more than 2 " ) ;
ESP_RETURN_ON_FALSE ( ( cfg - > dma_frame_num > = 8 & & cfg - > dma_frame_num < = 1024 ) , ESP_ERR_INVALID_ARG , TAG , " I2S buffer length at most 1024 and more than 8 " ) ;
2021-07-20 09:03:52 -04:00
2018-11-27 01:10:33 -05:00
2021-08-02 07:17:29 -04:00
# if SOC_I2S_SUPPORTS_PDM_TX || SOC_I2S_SUPPORTS_PDM_RX
/* Check PDM mode */
if ( cfg - > mode & I2S_MODE_PDM ) {
ESP_RETURN_ON_FALSE ( i2s_num = = I2S_NUM_0 , ESP_ERR_INVALID_ARG , TAG , " I2S PDM mode only support on I2S0 " ) ;
# if !SOC_I2S_SUPPORTS_PDM_TX
2021-09-25 23:37:37 -04:00
ESP_RETURN_ON_FALSE ( ! ( cfg - > mode & I2S_MODE_TX ) , ESP_ERR_INVALID_ARG , TAG , " PDM does not support TX on this chip " ) ;
2021-08-02 07:17:29 -04:00
# endif // SOC_I2S_SUPPORTS_PDM_TX
# if !SOC_I2S_SUPPORTS_PDM_RX
2021-09-25 23:37:37 -04:00
ESP_RETURN_ON_FALSE ( ! ( cfg - > mode & I2S_MODE_RX ) , ESP_ERR_INVALID_ARG , TAG , " PDM does not support RX on this chip " ) ;
2021-08-02 07:17:29 -04:00
# endif // SOC_I2S_SUPPORTS_PDM_RX
}
# else
ESP_RETURN_ON_FALSE ( ! ( cfg - > mode & I2S_MODE_PDM ) , ESP_ERR_INVALID_ARG , TAG , " I2S PDM mode not supported on current chip " ) ;
# endif // SOC_I2S_SUPPORTS_PDM_TX || SOC_I2S_SUPPORTS_PDM_RX
2020-04-10 04:44:56 -04:00
2021-08-17 22:52:16 -04:00
# if SOC_I2S_SUPPORTS_ADC || SOC_I2S_SUPPORTS_DAC
2021-08-02 07:17:29 -04:00
/* Check built-in ADC/DAC mode */
if ( cfg - > mode & ( I2S_MODE_ADC_BUILT_IN | I2S_MODE_DAC_BUILT_IN ) ) {
ESP_RETURN_ON_FALSE ( i2s_num = = I2S_NUM_0 , ESP_ERR_INVALID_ARG , TAG , " I2S built-in ADC/DAC only support on I2S0 " ) ;
}
# else
/* Check the transmit/receive mode */
ESP_RETURN_ON_FALSE ( ( cfg - > mode & I2S_MODE_TX ) | | ( cfg - > mode & I2S_MODE_RX ) , ESP_ERR_INVALID_ARG , TAG , " I2S no TX/RX mode selected " ) ;
/* Check communication format */
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( cfg - > communication_format & & ( cfg - > communication_format < I2S_COMM_FORMAT_STAND_MAX ) , ESP_ERR_INVALID_ARG , TAG , " invalid communication formats " ) ;
2021-08-17 22:52:16 -04:00
# endif // SOC_I2S_SUPPORTS_ADC || SOC_I2S_SUPPORTS_DAC
2021-08-02 07:17:29 -04:00
2020-04-10 04:44:56 -04:00
return ESP_OK ;
}
2022-03-13 23:34:46 -04:00
static void i2s_set_slot_legacy ( i2s_port_t i2s_num )
2020-04-10 04:44:56 -04:00
{
2022-03-13 23:34:46 -04:00
bool is_tx_slave = p_i2s [ i2s_num ] - > role = = I2S_ROLE_SLAVE ;
bool is_rx_slave = is_tx_slave ;
if ( p_i2s [ i2s_num ] - > dir = = ( I2S_DIR_TX | I2S_DIR_RX ) ) {
i2s_ll_share_bck_ws ( p_i2s [ i2s_num ] - > hal . dev , true ) ;
/* Since bck and ws are shared, only tx or rx can be master
Force to set rx as slave to avoid conflict of clock signal */
is_rx_slave = true ;
} else {
i2s_ll_share_bck_ws ( p_i2s [ i2s_num ] - > hal . dev , false ) ;
}
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_STD ) {
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
2021-08-18 07:45:51 -04:00
i2s_hal_std_set_tx_slot ( & ( p_i2s [ i2s_num ] - > hal ) , is_tx_slave , ( i2s_hal_slot_config_t * ) ( & p_i2s [ i2s_num ] - > slot_cfg ) ) ;
2022-03-13 23:34:46 -04:00
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
2021-08-18 07:45:51 -04:00
i2s_hal_std_set_rx_slot ( & ( p_i2s [ i2s_num ] - > hal ) , is_rx_slave , ( i2s_hal_slot_config_t * ) ( & p_i2s [ i2s_num ] - > slot_cfg ) ) ;
2022-03-13 23:34:46 -04:00
}
}
# if SOC_I2S_SUPPORTS_PDM
else if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_PDM ) {
2022-04-07 03:32:46 -04:00
# if SOC_I2S_SUPPORTS_PDM_TX
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
2021-08-18 07:45:51 -04:00
i2s_hal_pdm_set_tx_slot ( & ( p_i2s [ i2s_num ] - > hal ) , is_tx_slave , ( i2s_hal_slot_config_t * ) ( & p_i2s [ i2s_num ] - > slot_cfg ) ) ;
2022-03-13 23:34:46 -04:00
}
2022-04-07 03:32:46 -04:00
# endif
# if SOC_I2S_SUPPORTS_PDM_RX
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
2021-08-18 07:45:51 -04:00
i2s_hal_pdm_set_rx_slot ( & ( p_i2s [ i2s_num ] - > hal ) , is_rx_slave , ( i2s_hal_slot_config_t * ) ( & p_i2s [ i2s_num ] - > slot_cfg ) ) ;
2022-03-13 23:34:46 -04:00
}
2022-04-07 03:32:46 -04:00
# endif
2022-03-13 23:34:46 -04:00
}
# endif
# if SOC_I2S_SUPPORTS_TDM
else if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_TDM ) {
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
2021-08-18 07:45:51 -04:00
i2s_hal_tdm_set_tx_slot ( & ( p_i2s [ i2s_num ] - > hal ) , is_tx_slave , ( i2s_hal_slot_config_t * ) ( & p_i2s [ i2s_num ] - > slot_cfg ) ) ;
2022-03-13 23:34:46 -04:00
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
2021-08-18 07:45:51 -04:00
i2s_hal_tdm_set_rx_slot ( & ( p_i2s [ i2s_num ] - > hal ) , is_rx_slave , ( i2s_hal_slot_config_t * ) ( & p_i2s [ i2s_num ] - > slot_cfg ) ) ;
2022-03-13 23:34:46 -04:00
}
}
# endif
# if SOC_I2S_SUPPORTS_ADC_DAC
else if ( ( int ) p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_ADC_DAC ) {
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_dac_set_slot_legacy ( ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_adc_set_slot_legacy ( ) ;
}
}
# endif
2021-08-02 07:17:29 -04:00
}
2020-04-10 04:44:56 -04:00
2022-03-13 23:34:46 -04:00
static void i2s_set_clock_legacy ( i2s_port_t i2s_num )
2021-08-02 07:17:29 -04:00
{
2021-08-18 07:45:51 -04:00
i2s_clk_config_t * clk_cfg = & p_i2s [ i2s_num ] - > clk_cfg ;
i2s_hal_clock_info_t clk_info ;
2022-03-13 23:34:46 -04:00
i2s_calculate_clock ( i2s_num , & clk_info ) ;
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_hal_set_tx_clock ( & ( p_i2s [ i2s_num ] - > hal ) , & clk_info , clk_cfg - > clk_src ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_hal_set_rx_clock ( & ( p_i2s [ i2s_num ] - > hal ) , & clk_info , clk_cfg - > clk_src ) ;
}
2021-08-02 07:17:29 -04:00
}
float i2s_get_clk ( i2s_port_t i2s_num )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-08-18 07:45:51 -04:00
i2s_clk_config_t * clk_cfg = & p_i2s [ i2s_num ] - > clk_cfg ;
2022-03-13 23:34:46 -04:00
return ( float ) clk_cfg - > sample_rate_hz ;
2016-12-29 04:29:14 -05:00
}
2021-07-20 09:03:52 -04:00
esp_err_t i2s_set_clk ( i2s_port_t i2s_num , uint32_t rate , uint32_t bits_cfg , i2s_channel_t ch )
2016-12-29 04:29:14 -05:00
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-08-02 07:17:29 -04:00
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] , ESP_ERR_INVALID_ARG , TAG , " I2S%d has not installed yet " , i2s_num ) ;
2021-07-20 09:03:52 -04:00
2022-05-22 23:25:23 -04:00
/* Acquire the lock before stop i2s, otherwise reading/writing operation will stuck on receiving the message queue from interrupt */
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_TX ) {
xSemaphoreTake ( p_i2s [ i2s_num ] - > tx - > mux , portMAX_DELAY ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_RX ) {
xSemaphoreTake ( p_i2s [ i2s_num ] - > rx - > mux , portMAX_DELAY ) ;
}
2022-01-18 02:51:50 -05:00
/* Stop I2S */
i2s_stop ( i2s_num ) ;
2021-08-18 07:45:51 -04:00
i2s_clk_config_t * clk_cfg = & p_i2s [ i2s_num ] - > clk_cfg ;
i2s_hal_slot_config_t * slot_cfg = & p_i2s [ i2s_num ] - > slot_cfg ;
2022-03-13 23:34:46 -04:00
clk_cfg - > sample_rate_hz = rate ;
slot_cfg - > data_bit_width = bits_cfg & 0xFFFF ;
ESP_RETURN_ON_FALSE ( ( slot_cfg - > data_bit_width % 8 = = 0 ) , ESP_ERR_INVALID_ARG , TAG , " Invalid bits per sample " ) ;
slot_cfg - > slot_bit_width = ( bits_cfg > > 16 ) > slot_cfg - > data_bit_width ?
( bits_cfg > > 16 ) : slot_cfg - > data_bit_width ;
ESP_RETURN_ON_FALSE ( ( slot_cfg - > slot_bit_width % 8 = = 0 ) , ESP_ERR_INVALID_ARG , TAG , " Invalid bits per channel " ) ;
ESP_RETURN_ON_FALSE ( ( ( int ) slot_cfg - > slot_bit_width < = ( int ) I2S_BITS_PER_SAMPLE_32BIT ) , ESP_ERR_INVALID_ARG , TAG , " Invalid bits per sample " ) ;
slot_cfg - > slot_mode = ( ( ch & 0xFFFF ) = = I2S_CHANNEL_MONO ) ? I2S_SLOT_MODE_MONO : I2S_SLOT_MODE_STEREO ;
2022-04-07 03:32:46 -04:00
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_STD ) {
if ( slot_cfg - > slot_mode = = I2S_SLOT_MODE_MONO ) {
2022-07-04 23:22:27 -04:00
if ( slot_cfg - > std . slot_mask = = I2S_STD_SLOT_BOTH ) {
slot_cfg - > std . slot_mask = I2S_STD_SLOT_LEFT ;
2022-04-07 03:32:46 -04:00
# if SOC_I2S_HW_VERSION_1
// Enable right first to get correct data sequence
slot_cfg - > std . ws_pol = ! slot_cfg - > std . ws_pol ;
# endif
}
} else {
2022-07-04 23:22:27 -04:00
slot_cfg - > std . slot_mask = I2S_STD_SLOT_BOTH ;
2022-04-07 03:32:46 -04:00
}
}
2021-08-02 07:17:29 -04:00
# if SOC_I2S_SUPPORTS_TDM
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_TDM ) {
uint32_t slot_mask = ch > > 16 ;
if ( slot_mask = = 0 ) {
slot_mask = ( slot_cfg - > slot_mode = = I2S_SLOT_MODE_MONO ) ? 1 : 2 ;
2022-01-18 02:51:50 -05:00
}
2021-08-18 07:45:51 -04:00
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] - > total_slot > = ( 32 - __builtin_clz ( slot_mask ) ) , ESP_ERR_INVALID_ARG , TAG ,
2022-08-04 01:08:48 -04:00
" The max channel number can't be greater than CH% " PRIu32 , p_i2s [ i2s_num ] - > total_slot ) ;
2021-08-18 07:45:51 -04:00
p_i2s [ i2s_num ] - > active_slot = __builtin_popcount ( slot_mask ) ;
2022-03-13 23:34:46 -04:00
} else
2021-08-02 07:17:29 -04:00
# endif
2022-03-13 23:34:46 -04:00
{
p_i2s [ i2s_num ] - > active_slot = ( slot_cfg - > slot_mode = = I2S_SLOT_MODE_MONO ) ? 1 : 2 ;
2021-08-02 07:17:29 -04:00
}
2021-07-20 09:03:52 -04:00
2022-03-13 23:34:46 -04:00
i2s_set_slot_legacy ( i2s_num ) ;
i2s_set_clock_legacy ( i2s_num ) ;
2021-08-02 07:17:29 -04:00
uint32_t buf_size = i2s_get_buf_size ( i2s_num ) ;
2022-03-13 23:34:46 -04:00
bool need_realloc = buf_size ! = p_i2s [ i2s_num ] - > last_buf_size ;
if ( need_realloc ) {
esp_err_t ret = ESP_OK ;
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
2021-08-02 07:17:29 -04:00
p_i2s [ i2s_num ] - > tx - > buf_size = buf_size ;
ret = i2s_realloc_dma_buffer ( i2s_num , p_i2s [ i2s_num ] - > tx ) ;
2022-03-13 23:34:46 -04:00
xQueueReset ( p_i2s [ i2s_num ] - > tx - > queue ) ;
ESP_RETURN_ON_ERROR ( ret , TAG , " I2S%d tx DMA buffer malloc failed " , i2s_num ) ;
2017-11-29 00:16:26 -05:00
}
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
2021-08-02 07:17:29 -04:00
p_i2s [ i2s_num ] - > rx - > buf_size = buf_size ;
ret = i2s_realloc_dma_buffer ( i2s_num , p_i2s [ i2s_num ] - > rx ) ;
2022-03-13 23:34:46 -04:00
xQueueReset ( p_i2s [ i2s_num ] - > rx - > queue ) ;
ESP_RETURN_ON_ERROR ( ret , TAG , " I2S%d rx DMA buffer malloc failed " , i2s_num ) ;
2021-08-17 22:52:16 -04:00
}
2021-07-20 09:03:52 -04:00
}
2021-08-17 22:52:16 -04:00
/* Update last buffer size */
p_i2s [ i2s_num ] - > last_buf_size = buf_size ;
2021-07-20 09:03:52 -04:00
2021-08-02 07:17:29 -04:00
/* I2S start */
2021-07-20 09:03:52 -04:00
i2s_start ( i2s_num ) ;
2021-08-02 07:17:29 -04:00
2022-05-22 23:25:23 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_TX ) {
xSemaphoreGive ( p_i2s [ i2s_num ] - > tx - > mux ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_RX ) {
xSemaphoreGive ( p_i2s [ i2s_num ] - > rx - > mux ) ;
}
2021-07-20 09:03:52 -04:00
return ESP_OK ;
}
2022-03-13 23:34:46 -04:00
esp_err_t i2s_set_sample_rates ( i2s_port_t i2s_num , uint32_t rate )
2021-07-20 09:03:52 -04:00
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-08-18 07:45:51 -04:00
i2s_hal_slot_config_t * slot_cfg = & p_i2s [ i2s_num ] - > slot_cfg ;
2022-03-13 23:34:46 -04:00
uint32_t mask = 0 ;
# if SOC_I2S_SUPPORTS_TDM
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_TDM ) {
2022-04-02 09:31:35 -04:00
mask = slot_cfg - > tdm . slot_mask ;
2021-07-20 09:03:52 -04:00
}
# endif
2022-03-13 23:34:46 -04:00
return i2s_set_clk ( i2s_num , rate , slot_cfg - > data_bit_width , slot_cfg - > slot_mode | ( mask < < 16 ) ) ;
2021-07-20 09:03:52 -04:00
}
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_PCM
esp_err_t i2s_pcm_config ( i2s_port_t i2s_num , const i2s_pcm_cfg_t * pcm_cfg )
2021-07-20 09:03:52 -04:00
{
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] , ESP_FAIL , TAG , " i2s has not installed yet " ) ;
2022-05-22 23:25:23 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_TX ) {
xSemaphoreTake ( p_i2s [ i2s_num ] - > tx - > mux , portMAX_DELAY ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_RX ) {
xSemaphoreTake ( p_i2s [ i2s_num ] - > rx - > mux , portMAX_DELAY ) ;
}
2022-03-13 23:34:46 -04:00
i2s_stop ( i2s_num ) ;
2021-07-20 09:03:52 -04:00
I2S_ENTER_CRITICAL ( i2s_num ) ;
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_ll_tx_set_pcm_type ( p_i2s [ i2s_num ] - > hal . dev , pcm_cfg - > pcm_type ) ;
2021-07-20 09:03:52 -04:00
}
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_ll_rx_set_pcm_type ( p_i2s [ i2s_num ] - > hal . dev , pcm_cfg - > pcm_type ) ;
2021-07-20 09:03:52 -04:00
}
I2S_EXIT_CRITICAL ( i2s_num ) ;
2022-03-13 23:34:46 -04:00
i2s_start ( i2s_num ) ;
2022-05-22 23:25:23 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_TX ) {
xSemaphoreGive ( p_i2s [ i2s_num ] - > tx - > mux ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_MODE_RX ) {
xSemaphoreGive ( p_i2s [ i2s_num ] - > rx - > mux ) ;
}
2016-12-29 04:29:14 -05:00
return ESP_OK ;
}
2022-03-13 23:34:46 -04:00
# endif
2016-12-29 04:29:14 -05:00
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_PDM_RX
esp_err_t i2s_set_pdm_rx_down_sample ( i2s_port_t i2s_num , i2s_pdm_dsr_t downsample )
2016-12-29 04:29:14 -05:00
{
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] , ESP_FAIL , TAG , " i2s has not installed yet " ) ;
ESP_RETURN_ON_FALSE ( ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_PDM ) , ESP_ERR_INVALID_ARG , TAG , " i2s mode is not PDM mode " ) ;
2022-05-22 23:25:23 -04:00
xSemaphoreTake ( p_i2s [ i2s_num ] - > rx - > mux , portMAX_DELAY ) ;
2022-03-13 23:34:46 -04:00
i2s_stop ( i2s_num ) ;
2021-08-18 07:45:51 -04:00
p_i2s [ i2s_num ] - > clk_cfg . dn_sample_mode = downsample ;
2022-03-13 23:34:46 -04:00
i2s_ll_rx_set_pdm_dsr ( p_i2s [ i2s_num ] - > hal . dev , downsample ) ;
2022-05-22 23:25:23 -04:00
i2s_start ( i2s_num ) ;
xSemaphoreGive ( p_i2s [ i2s_num ] - > rx - > mux ) ;
2021-08-18 07:45:51 -04:00
return i2s_set_clk ( i2s_num , p_i2s [ i2s_num ] - > clk_cfg . sample_rate_hz , p_i2s [ i2s_num ] - > slot_cfg . data_bit_width , p_i2s [ i2s_num ] - > slot_cfg . slot_mode ) ;
2022-03-13 23:34:46 -04:00
}
# endif
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_PDM_TX
esp_err_t i2s_set_pdm_tx_up_sample ( i2s_port_t i2s_num , const i2s_pdm_tx_upsample_cfg_t * upsample_cfg )
{
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] , ESP_FAIL , TAG , " i2s has not installed yet " ) ;
ESP_RETURN_ON_FALSE ( ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_PDM ) & & ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) ,
ESP_ERR_INVALID_ARG , TAG , " i2s mode is not PDM mode " ) ;
2022-05-22 23:25:23 -04:00
xSemaphoreTake ( p_i2s [ i2s_num ] - > tx - > mux , portMAX_DELAY ) ;
2022-03-13 23:34:46 -04:00
i2s_stop ( i2s_num ) ;
2021-08-18 07:45:51 -04:00
p_i2s [ i2s_num ] - > clk_cfg . up_sample_fp = upsample_cfg - > fp ;
p_i2s [ i2s_num ] - > clk_cfg . up_sample_fs = upsample_cfg - > fs ;
2022-03-13 23:34:46 -04:00
i2s_ll_tx_set_pdm_fpfs ( p_i2s [ i2s_num ] - > hal . dev , upsample_cfg - > fp , upsample_cfg - > fs ) ;
2022-05-22 23:25:23 -04:00
i2s_start ( i2s_num ) ;
xSemaphoreGive ( p_i2s [ i2s_num ] - > tx - > mux ) ;
2021-08-18 07:45:51 -04:00
return i2s_set_clk ( i2s_num , p_i2s [ i2s_num ] - > clk_cfg . sample_rate_hz , p_i2s [ i2s_num ] - > slot_cfg . data_bit_width , p_i2s [ i2s_num ] - > slot_cfg . slot_mode ) ;
2021-08-02 07:17:29 -04:00
}
2022-03-13 23:34:46 -04:00
# endif
2021-08-02 07:17:29 -04:00
static esp_err_t i2s_dma_object_init ( i2s_port_t i2s_num )
{
uint32_t buf_size = i2s_get_buf_size ( i2s_num ) ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > last_buf_size = buf_size ;
2021-08-02 07:17:29 -04:00
/* Create DMA object */
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
2021-08-02 07:17:29 -04:00
ESP_RETURN_ON_ERROR ( i2s_create_dma_object ( i2s_num , & p_i2s [ i2s_num ] - > tx ) , TAG , " I2S TX DMA object create failed " ) ;
p_i2s [ i2s_num ] - > tx - > buf_size = buf_size ;
}
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
2021-08-02 07:17:29 -04:00
ESP_RETURN_ON_ERROR ( i2s_create_dma_object ( i2s_num , & p_i2s [ i2s_num ] - > rx ) , TAG , " I2S RX DMA object create failed " ) ;
p_i2s [ i2s_num ] - > rx - > buf_size = buf_size ;
}
return ESP_OK ;
}
2016-12-29 04:29:14 -05:00
2022-03-13 23:34:46 -04:00
static void i2s_mode_identify ( i2s_port_t i2s_num , const i2s_config_t * i2s_config )
2021-08-02 07:17:29 -04:00
{
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > mode = I2S_COMM_MODE_STD ;
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
if ( i2s_config - > mode & I2S_MODE_MASTER ) {
p_i2s [ i2s_num ] - > role = I2S_ROLE_MASTER ;
} else if ( i2s_config - > mode & I2S_MODE_SLAVE ) {
p_i2s [ i2s_num ] - > role = I2S_ROLE_SLAVE ;
}
if ( i2s_config - > mode & I2S_MODE_TX ) {
p_i2s [ i2s_num ] - > dir | = I2S_DIR_TX ;
}
if ( i2s_config - > mode & I2S_MODE_RX ) {
p_i2s [ i2s_num ] - > dir | = I2S_DIR_RX ;
}
# if SOC_I2S_SUPPORTS_PDM
if ( i2s_config - > mode & I2S_MODE_PDM ) {
p_i2s [ i2s_num ] - > mode = I2S_COMM_MODE_PDM ;
}
# endif // SOC_I2S_SUPPORTS_PDM
# if SOC_I2S_SUPPORTS_TDM
if ( i2s_config - > channel_format = = I2S_CHANNEL_FMT_MULTIPLE ) {
2022-04-07 03:32:46 -04:00
p_i2s [ i2s_num ] - > mode = I2S_COMM_MODE_TDM ;
2021-08-02 07:17:29 -04:00
}
2022-03-13 23:34:46 -04:00
# endif // SOC_I2S_SUPPORTS_TDM
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_ADC_DAC
if ( ( i2s_config - > mode & I2S_MODE_DAC_BUILT_IN ) | |
( i2s_config - > mode & I2S_MODE_ADC_BUILT_IN ) ) {
p_i2s [ i2s_num ] - > mode = ( i2s_comm_mode_t ) I2S_COMM_MODE_ADC_DAC ;
}
# endif // SOC_I2S_SUPPORTS_ADC_DAC
}
static esp_err_t i2s_config_transfer ( i2s_port_t i2s_num , const i2s_config_t * i2s_config )
{
2022-04-07 03:32:46 -04:00
# define SLOT_CFG(m) p_i2s[i2s_num]->slot_cfg.m
# define CLK_CFG() p_i2s[i2s_num]->clk_cfg
2022-03-13 23:34:46 -04:00
/* Convert legacy configuration into general part of slot and clock configuration */
2021-08-18 07:45:51 -04:00
p_i2s [ i2s_num ] - > slot_cfg . data_bit_width = i2s_config - > bits_per_sample ;
p_i2s [ i2s_num ] - > slot_cfg . slot_bit_width = ( int ) i2s_config - > bits_per_chan < ( int ) i2s_config - > bits_per_sample ?
2022-04-07 03:32:46 -04:00
i2s_config - > bits_per_sample : i2s_config - > bits_per_chan ;
2021-08-18 07:45:51 -04:00
2022-04-07 03:32:46 -04:00
p_i2s [ i2s_num ] - > slot_cfg . slot_mode = i2s_config - > channel_format < I2S_CHANNEL_FMT_ONLY_RIGHT ?
I2S_SLOT_MODE_STEREO : I2S_SLOT_MODE_MONO ;
2021-08-18 07:45:51 -04:00
CLK_CFG ( ) . sample_rate_hz = i2s_config - > sample_rate ;
CLK_CFG ( ) . mclk_multiple = i2s_config - > mclk_multiple = = 0 ? I2S_MCLK_MULTIPLE_256 : i2s_config - > mclk_multiple ;
2022-04-07 03:32:46 -04:00
CLK_CFG ( ) . clk_src = I2S_CLK_SRC_DEFAULT ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > fixed_mclk = i2s_config - > fixed_mclk ;
p_i2s [ i2s_num ] - > use_apll = false ;
2021-08-18 07:45:51 -04:00
# if SOC_I2S_SUPPORTS_APLL
2022-04-07 03:32:46 -04:00
CLK_CFG ( ) . clk_src = i2s_config - > use_apll ? I2S_CLK_SRC_APLL : I2S_CLK_SRC_DEFAULT ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > use_apll = i2s_config - > use_apll ;
2021-08-18 07:45:51 -04:00
# endif // SOC_I2S_SUPPORTS_APLL
2022-03-13 23:34:46 -04:00
/* Convert legacy configuration into particular part of slot and clock configuration */
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_STD ) {
/* Generate STD slot configuration */
2021-08-18 07:45:51 -04:00
SLOT_CFG ( std ) . ws_width = i2s_config - > bits_per_sample ;
SLOT_CFG ( std ) . ws_pol = false ;
2022-04-05 03:47:16 -04:00
if ( i2s_config - > channel_format = = I2S_CHANNEL_FMT_RIGHT_LEFT ) {
2022-07-04 23:22:27 -04:00
SLOT_CFG ( std ) . slot_mask = I2S_STD_SLOT_BOTH ;
2022-04-05 03:47:16 -04:00
} else if ( i2s_config - > channel_format = = I2S_CHANNEL_FMT_ALL_LEFT | |
i2s_config - > channel_format = = I2S_CHANNEL_FMT_ONLY_LEFT ) {
2022-07-04 23:22:27 -04:00
SLOT_CFG ( std ) . slot_mask = I2S_STD_SLOT_LEFT ;
2022-04-05 03:47:16 -04:00
} else {
2022-07-04 23:22:27 -04:00
SLOT_CFG ( std ) . slot_mask = I2S_STD_SLOT_RIGHT ;
2022-04-05 03:47:16 -04:00
}
2022-03-13 23:34:46 -04:00
if ( i2s_config - > communication_format = = I2S_COMM_FORMAT_STAND_I2S ) {
2021-08-18 07:45:51 -04:00
SLOT_CFG ( std ) . bit_shift = true ;
2022-03-13 23:34:46 -04:00
}
if ( i2s_config - > communication_format & I2S_COMM_FORMAT_STAND_PCM_SHORT ) {
2021-08-18 07:45:51 -04:00
SLOT_CFG ( std ) . bit_shift = true ;
SLOT_CFG ( std ) . ws_width = 1 ;
SLOT_CFG ( std ) . ws_pol = true ;
2022-03-13 23:34:46 -04:00
}
2022-04-07 03:32:46 -04:00
# if SOC_I2S_HW_VERSION_1
SLOT_CFG ( std ) . msb_right = true ;
# elif SOC_I2S_HW_VERSION_2
2021-08-18 07:45:51 -04:00
SLOT_CFG ( std ) . left_align = i2s_config - > left_align ;
SLOT_CFG ( std ) . big_endian = i2s_config - > big_edin ;
SLOT_CFG ( std ) . bit_order_lsb = i2s_config - > bit_order_msb ; // The old name is incorrect
2022-04-07 03:32:46 -04:00
# endif // SOC_I2S_HW_VERSION_1
2021-08-18 07:45:51 -04:00
p_i2s [ i2s_num ] - > active_slot = ( int ) p_i2s [ i2s_num ] - > slot_cfg . slot_mode = = I2S_SLOT_MODE_MONO ? 1 : 2 ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > total_slot = 2 ;
goto finish ;
}
# if SOC_I2S_SUPPORTS_PDM_TX
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_PDM ) {
/* Generate PDM TX slot configuration */
2021-08-18 07:45:51 -04:00
SLOT_CFG ( pdm_tx ) . sd_prescale = 0 ;
SLOT_CFG ( pdm_tx ) . sd_scale = I2S_PDM_SIG_SCALING_MUL_1 ;
SLOT_CFG ( pdm_tx ) . hp_scale = I2S_PDM_SIG_SCALING_MUL_1 ;
SLOT_CFG ( pdm_tx ) . lp_scale = I2S_PDM_SIG_SCALING_MUL_1 ;
SLOT_CFG ( pdm_tx ) . sinc_scale = I2S_PDM_SIG_SCALING_MUL_1 ;
2022-04-07 03:32:46 -04:00
# if SOC_I2S_HW_VERSION_2
2022-06-20 08:30:31 -04:00
SLOT_CFG ( pdm_tx ) . line_mode = I2S_PDM_TX_ONE_LINE_CODEC ;
2021-08-18 07:45:51 -04:00
SLOT_CFG ( pdm_tx ) . hp_en = true ;
SLOT_CFG ( pdm_tx ) . hp_cut_off_freq_hz = 49 ;
SLOT_CFG ( pdm_tx ) . sd_dither = 0 ;
2022-06-20 08:30:31 -04:00
SLOT_CFG ( pdm_tx ) . sd_dither2 = 1 ;
2022-04-07 03:32:46 -04:00
# endif // SOC_I2S_HW_VERSION_2
2022-03-13 23:34:46 -04:00
/* Generate PDM TX clock configuration */
2021-08-18 07:45:51 -04:00
CLK_CFG ( ) . up_sample_fp = 960 ;
CLK_CFG ( ) . up_sample_fs = i2s_config - > sample_rate / 100 ;
p_i2s [ i2s_num ] - > active_slot = ( int ) p_i2s [ i2s_num ] - > slot_cfg . slot_mode = = I2S_SLOT_MODE_MONO ? 1 : 2 ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > total_slot = 2 ;
goto finish ;
}
# endif // SOC_I2S_SUPPORTS_PDM_TX
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_PDM_RX
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_PDM ) {
/* Generate PDM RX clock configuration */
2021-08-18 07:45:51 -04:00
CLK_CFG ( ) . dn_sample_mode = I2S_PDM_DSR_8S ;
p_i2s [ i2s_num ] - > active_slot = ( int ) p_i2s [ i2s_num ] - > slot_cfg . slot_mode = = I2S_SLOT_MODE_MONO ? 1 : 2 ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > total_slot = 2 ;
goto finish ;
}
# endif // SOC_I2S_SUPPOTYS_PDM_RX
2020-05-31 21:47:48 -04:00
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_TDM
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_TDM ) {
/* Generate TDM slot configuration */
2021-08-18 07:45:51 -04:00
SLOT_CFG ( tdm ) . slot_mask = i2s_config - > chan_mask > > 16 ;
2022-04-02 09:31:35 -04:00
SLOT_CFG ( tdm ) . ws_width = 0 ; // I2S_TDM_AUTO_WS_WIDTH
p_i2s [ i2s_num ] - > slot_cfg . slot_mode = I2S_SLOT_MODE_STEREO ;
2021-08-18 07:45:51 -04:00
SLOT_CFG ( tdm ) . ws_pol = false ;
2022-03-13 23:34:46 -04:00
if ( i2s_config - > communication_format = = I2S_COMM_FORMAT_STAND_I2S ) {
2021-08-18 07:45:51 -04:00
SLOT_CFG ( tdm ) . bit_shift = true ;
2022-04-07 03:32:46 -04:00
} else if ( i2s_config - > communication_format = = I2S_COMM_FORMAT_STAND_PCM_SHORT ) {
2021-08-18 07:45:51 -04:00
SLOT_CFG ( tdm ) . bit_shift = true ;
SLOT_CFG ( tdm ) . ws_width = 1 ;
SLOT_CFG ( tdm ) . ws_pol = true ;
2022-04-07 03:32:46 -04:00
} else if ( i2s_config - > communication_format = = I2S_COMM_FORMAT_STAND_PCM_LONG ) {
2021-08-18 07:45:51 -04:00
SLOT_CFG ( tdm ) . bit_shift = true ;
2022-04-02 09:31:35 -04:00
SLOT_CFG ( tdm ) . ws_width = p_i2s [ i2s_num ] - > slot_cfg . slot_bit_width ;
2021-08-18 07:45:51 -04:00
SLOT_CFG ( tdm ) . ws_pol = true ;
2022-03-13 23:34:46 -04:00
}
2021-08-18 07:45:51 -04:00
SLOT_CFG ( tdm ) . left_align = i2s_config - > left_align ;
SLOT_CFG ( tdm ) . big_endian = i2s_config - > big_edin ;
SLOT_CFG ( tdm ) . bit_order_lsb = i2s_config - > bit_order_msb ; // The old name is incorrect
SLOT_CFG ( tdm ) . skip_mask = i2s_config - > skip_msk ;
2022-03-13 23:34:46 -04:00
/* Generate TDM clock configuration */
2022-04-02 09:31:35 -04:00
p_i2s [ i2s_num ] - > active_slot = __builtin_popcount ( SLOT_CFG ( tdm ) . slot_mask ) ;
2021-08-18 07:45:51 -04:00
uint32_t mx_slot = 32 - __builtin_clz ( SLOT_CFG ( tdm ) . slot_mask ) ;
mx_slot = mx_slot < 2 ? 2 : mx_slot ;
2022-04-02 09:31:35 -04:00
p_i2s [ i2s_num ] - > total_slot = mx_slot < i2s_config - > total_chan ? mx_slot : i2s_config - > total_chan ;
2022-03-13 23:34:46 -04:00
goto finish ;
}
# endif // SOC_I2S_SUPPORTS_TDM
# if SOC_I2S_SUPPORTS_ADC_DAC
if ( ( int ) p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_ADC_DAC ) {
2021-08-18 07:45:51 -04:00
p_i2s [ i2s_num ] - > slot_cfg . slot_mode = ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) ?
2022-04-07 03:32:46 -04:00
I2S_SLOT_MODE_STEREO : I2S_SLOT_MODE_MONO ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] - > active_slot = ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) ? 2 : 1 ;
p_i2s [ i2s_num ] - > total_slot = 2 ;
}
# endif // SOC_I2S_SUPPORTS_ADC_DAC
2022-04-07 03:32:46 -04:00
# undef SLOT_CFG
# undef CLK_CFG
2022-03-13 23:34:46 -04:00
finish :
return ESP_OK ;
}
static esp_err_t i2s_init_legacy ( i2s_port_t i2s_num , int intr_alloc_flag )
{
/* Create power management lock */
2019-02-22 07:17:42 -05:00
# ifdef CONFIG_PM_ENABLE
2021-08-17 22:52:16 -04:00
esp_pm_lock_type_t pm_lock = ESP_PM_APB_FREQ_MAX ;
2021-07-27 03:54:31 -04:00
# if SOC_I2S_SUPPORTS_APLL
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > use_apll ) {
2021-08-02 07:17:29 -04:00
pm_lock = ESP_PM_NO_LIGHT_SLEEP ;
2019-02-22 07:17:42 -05:00
}
2021-08-02 07:17:29 -04:00
# endif // SOC_I2S_SUPPORTS_APLL
2022-03-13 23:34:46 -04:00
ESP_RETURN_ON_ERROR ( esp_pm_lock_create ( pm_lock , 0 , " i2s_driver " , & p_i2s [ i2s_num ] - > pm_lock ) , TAG , " I2S pm lock error " ) ;
2019-02-22 07:17:42 -05:00
# endif //CONFIG_PM_ENABLE
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_APLL
if ( p_i2s [ i2s_num ] - > use_apll ) {
periph_rtc_apll_acquire ( ) ;
}
# endif
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
/* Enable communicaiton mode */
if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_STD ) {
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_hal_std_enable_tx_channel ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_hal_std_enable_rx_channel ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
}
}
# if SOC_I2S_SUPPORTS_PDM
else if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_PDM ) {
2022-04-07 03:32:46 -04:00
# if SOC_I2S_SUPPORTS_PDM_TX
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_hal_pdm_enable_tx_channel ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
}
2022-04-07 03:32:46 -04:00
# endif
# if SOC_I2S_SUPPORTS_PDM_RX
2022-03-13 23:34:46 -04:00
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_hal_pdm_enable_rx_channel ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
}
2022-04-07 03:32:46 -04:00
# endif
2021-06-15 03:43:03 -04:00
}
2021-08-02 07:17:29 -04:00
# endif
2022-01-10 00:25:26 -05:00
# if SOC_I2S_SUPPORTS_TDM
2022-03-13 23:34:46 -04:00
else if ( p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_TDM ) {
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_hal_tdm_enable_tx_channel ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_hal_tdm_enable_rx_channel ( & ( p_i2s [ i2s_num ] - > hal ) ) ;
}
2022-01-10 00:25:26 -05:00
}
2022-03-13 23:34:46 -04:00
# endif
# if SOC_I2S_SUPPORTS_ADC_DAC
if ( ( int ) p_i2s [ i2s_num ] - > mode = = I2S_COMM_MODE_ADC_DAC ) {
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
adc_power_acquire ( ) ;
adc_set_i2s_data_source ( ADC_I2S_DATA_SRC_ADC ) ;
i2s_ll_enable_builtin_adc ( p_i2s [ i2s_num ] - > hal . dev , true ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_ll_enable_builtin_dac ( p_i2s [ i2s_num ] - > hal . dev , true ) ;
}
} else {
adc_set_i2s_data_source ( ADC_I2S_DATA_SRC_IO_SIG ) ;
i2s_ll_enable_builtin_adc ( p_i2s [ i2s_num ] - > hal . dev , false ) ;
i2s_ll_enable_builtin_dac ( p_i2s [ i2s_num ] - > hal . dev , false ) ;
2022-01-10 00:25:26 -05:00
}
# endif
2021-08-02 07:17:29 -04:00
2022-03-13 23:34:46 -04:00
i2s_set_slot_legacy ( i2s_num ) ;
i2s_set_clock_legacy ( i2s_num ) ;
ESP_RETURN_ON_ERROR ( i2s_dma_intr_init ( i2s_num , intr_alloc_flag ) , TAG , " I2S interrupt initailze failed " ) ;
ESP_RETURN_ON_ERROR ( i2s_dma_object_init ( i2s_num ) , TAG , " I2S dma object create failed " ) ;
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
ESP_RETURN_ON_ERROR ( i2s_realloc_dma_buffer ( i2s_num , p_i2s [ i2s_num ] - > tx ) , TAG , " Allocate I2S dma tx buffer failed " ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
ESP_RETURN_ON_ERROR ( i2s_realloc_dma_buffer ( i2s_num , p_i2s [ i2s_num ] - > rx ) , TAG , " Allocate I2S dma rx buffer failed " ) ;
2021-06-15 03:43:03 -04:00
}
2022-03-13 23:34:46 -04:00
/* Initialize I2S DMA object */
# if SOC_I2S_HW_VERSION_2
/* Enable tx/rx submodule clock */
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
i2s_ll_tx_enable_clock ( p_i2s [ i2s_num ] - > hal . dev ) ;
}
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
i2s_ll_rx_enable_clock ( p_i2s [ i2s_num ] - > hal . dev ) ;
2021-12-02 07:24:19 -05:00
}
# endif
2021-08-02 07:17:29 -04:00
return ESP_OK ;
2016-12-29 04:29:14 -05:00
}
esp_err_t i2s_driver_uninstall ( i2s_port_t i2s_num )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( i2s_num < SOC_I2S_NUM , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-08-04 04:47:56 -04:00
ESP_RETURN_ON_FALSE ( p_i2s [ i2s_num ] , ESP_ERR_INVALID_STATE , TAG , " I2S port %d has not installed " , i2s_num ) ;
i2s_obj_t * obj = p_i2s [ i2s_num ] ;
2016-12-29 04:29:14 -05:00
i2s_stop ( i2s_num ) ;
2022-03-13 23:34:46 -04:00
# if SOC_I2S_SUPPORTS_ADC_DAC
if ( ( int ) ( obj - > mode ) = = I2S_COMM_MODE_ADC_DAC ) {
if ( obj - > dir & I2S_DIR_TX ) {
// Deinit DAC
i2s_set_dac_mode ( I2S_DAC_CHANNEL_DISABLE ) ;
}
if ( obj - > dir & I2S_DIR_RX ) {
// Deinit ADC
adc_set_i2s_data_source ( ADC_I2S_DATA_SRC_IO_SIG ) ;
adc_power_release ( ) ;
}
}
2021-06-15 03:43:03 -04:00
# endif
# if SOC_GDMA_SUPPORTED
2022-03-13 23:34:46 -04:00
if ( obj - > tx_dma_chan ) {
gdma_disconnect ( obj - > tx_dma_chan ) ;
gdma_del_channel ( obj - > tx_dma_chan ) ;
2016-12-29 04:29:14 -05:00
}
2022-03-13 23:34:46 -04:00
if ( obj - > rx_dma_chan ) {
gdma_disconnect ( obj - > rx_dma_chan ) ;
gdma_del_channel ( obj - > rx_dma_chan ) ;
2021-06-15 03:43:03 -04:00
}
# else
2022-03-13 23:34:46 -04:00
if ( obj - > i2s_isr_handle ) {
esp_intr_free ( obj - > i2s_isr_handle ) ;
2021-08-04 04:47:56 -04:00
}
2021-06-15 03:43:03 -04:00
# endif
2021-08-02 07:17:29 -04:00
/* Destroy dma object if exist */
2022-03-13 23:34:46 -04:00
i2s_destroy_dma_object ( i2s_num , & obj - > tx ) ;
i2s_destroy_dma_object ( i2s_num , & obj - > rx ) ;
2016-12-29 04:29:14 -05:00
2022-03-13 23:34:46 -04:00
if ( obj - > i2s_queue ) {
vQueueDelete ( obj - > i2s_queue ) ;
obj - > i2s_queue = NULL ;
2016-12-29 04:29:14 -05:00
}
2020-05-31 21:47:48 -04:00
# if SOC_I2S_SUPPORTS_APLL
2022-03-13 23:34:46 -04:00
if ( obj - > use_apll ) {
2021-04-19 23:48:56 -04:00
// switch back to PLL clock source
2022-03-13 23:34:46 -04:00
if ( obj - > dir & I2S_DIR_TX ) {
2022-04-07 03:32:46 -04:00
i2s_ll_tx_clk_set_src ( obj - > hal . dev , I2S_CLK_SRC_DEFAULT ) ;
2022-03-13 23:34:46 -04:00
}
if ( obj - > dir & I2S_DIR_RX ) {
2022-04-07 03:32:46 -04:00
i2s_ll_rx_clk_set_src ( obj - > hal . dev , I2S_CLK_SRC_DEFAULT ) ;
2022-03-13 23:34:46 -04:00
}
2021-12-02 07:24:19 -05:00
periph_rtc_apll_release ( ) ;
2017-08-16 04:31:11 -04:00
}
2020-05-31 21:47:48 -04:00
# endif
2019-02-22 07:17:42 -05:00
# ifdef CONFIG_PM_ENABLE
2022-03-13 23:34:46 -04:00
if ( obj - > pm_lock ) {
esp_pm_lock_delete ( obj - > pm_lock ) ;
obj - > pm_lock = NULL ;
2019-02-22 07:17:42 -05:00
}
2022-01-10 00:25:26 -05:00
# endif
2022-03-13 23:34:46 -04:00
# if SOC_I2S_HW_VERSION_2
if ( obj - > dir & I2S_DIR_TX ) {
i2s_ll_tx_disable_clock ( obj - > hal . dev ) ;
2022-01-10 00:25:26 -05:00
}
2022-03-13 23:34:46 -04:00
if ( obj - > dir & I2S_DIR_RX ) {
i2s_ll_rx_disable_clock ( obj - > hal . dev ) ;
2022-01-10 00:25:26 -05:00
}
2019-02-22 07:17:42 -05:00
# endif
2021-08-17 22:52:16 -04:00
/* Disable module clock */
2021-08-18 07:45:51 -04:00
i2s_platform_release_occupation ( i2s_num ) ;
2021-08-04 04:47:56 -04:00
free ( obj ) ;
2022-03-13 23:34:46 -04:00
p_i2s [ i2s_num ] = NULL ;
2016-12-29 04:29:14 -05:00
return ESP_OK ;
}
2022-03-13 23:34:46 -04:00
esp_err_t i2s_driver_install ( i2s_port_t i2s_num , const i2s_config_t * i2s_config , int queue_size , void * i2s_queue )
{
2022-04-07 03:32:46 -04:00
# if CONFIG_I2S_ENABLE_DEBUG_LOG
esp_log_level_set ( TAG , ESP_LOG_DEBUG ) ;
# endif
2022-03-13 23:34:46 -04:00
esp_err_t ret = ESP_OK ;
/* Step 1: Check the validity of input parameters */
ESP_RETURN_ON_ERROR ( i2s_check_cfg_validity ( i2s_num , i2s_config ) , TAG , " I2S configuration is invalid " ) ;
/* Step 2: Allocate driver object and register to platform */
i2s_obj_t * i2s_obj = calloc ( 1 , sizeof ( i2s_obj_t ) ) ;
ESP_RETURN_ON_FALSE ( i2s_obj , ESP_ERR_NO_MEM , TAG , " no mem for I2S driver " ) ;
2021-08-18 07:45:51 -04:00
if ( i2s_platform_acquire_occupation ( i2s_num , " i2s_legacy " ) ! = ESP_OK ) {
2022-03-13 23:34:46 -04:00
free ( i2s_obj ) ;
ESP_LOGE ( TAG , " register I2S object to platform failed " ) ;
return ESP_ERR_INVALID_STATE ;
}
2022-04-07 03:32:46 -04:00
p_i2s [ i2s_num ] = i2s_obj ;
2022-03-13 23:34:46 -04:00
i2s_hal_init ( & i2s_obj - > hal , i2s_num ) ;
/* Step 3: Store and assign configarations */
i2s_mode_identify ( i2s_num , i2s_config ) ;
ESP_GOTO_ON_ERROR ( i2s_config_transfer ( i2s_num , i2s_config ) , err , TAG , " I2S install failed " ) ;
i2s_obj - > dma_desc_num = i2s_config - > dma_desc_num ;
i2s_obj - > dma_frame_num = i2s_config - > dma_frame_num ;
i2s_obj - > tx_desc_auto_clear = i2s_config - > tx_desc_auto_clear ;
/* Step 4: Apply configurations and init hardware */
ESP_GOTO_ON_ERROR ( i2s_init_legacy ( i2s_num , i2s_config - > intr_alloc_flags ) , err , TAG , " I2S init failed " ) ;
/* Step 5: Initialise i2s event queue if user needs */
if ( i2s_queue ) {
i2s_obj - > i2s_queue = xQueueCreate ( queue_size , sizeof ( i2s_event_t ) ) ;
ESP_GOTO_ON_FALSE ( i2s_obj - > i2s_queue , ESP_ERR_NO_MEM , err , TAG , " I2S queue create failed " ) ;
* ( ( QueueHandle_t * ) i2s_queue ) = i2s_obj - > i2s_queue ;
2022-04-07 03:32:46 -04:00
ESP_LOGD ( TAG , " queue free spaces: %d " , uxQueueSpacesAvailable ( i2s_obj - > i2s_queue ) ) ;
2022-03-13 23:34:46 -04:00
} else {
i2s_obj - > i2s_queue = NULL ;
}
/* Step 6: Start I2S for backward compatibility */
ESP_GOTO_ON_ERROR ( i2s_start ( i2s_num ) , err , TAG , " I2S start failed " ) ;
return ESP_OK ;
err :
/* I2S install failed, prepare to uninstall */
i2s_driver_uninstall ( i2s_num ) ;
return ret ;
}
2018-04-11 07:37:31 -04:00
esp_err_t i2s_write ( i2s_port_t i2s_num , const void * src , size_t size , size_t * bytes_written , TickType_t ticks_to_wait )
{
2022-03-13 23:34:46 -04:00
char * data_ptr ;
char * src_byte ;
2020-11-16 23:48:35 -05:00
size_t bytes_can_write ;
2018-04-11 07:37:31 -04:00
* bytes_written = 0 ;
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-08-02 07:17:29 -04:00
ESP_RETURN_ON_FALSE ( ( p_i2s [ i2s_num ] - > tx ) , ESP_ERR_INVALID_ARG , TAG , " TX mode is not enabled " ) ;
2022-03-13 23:34:46 -04:00
xSemaphoreTake ( p_i2s [ i2s_num ] - > tx - > mux , portMAX_DELAY ) ;
2019-02-22 07:17:42 -05:00
# ifdef CONFIG_PM_ENABLE
2021-06-15 03:43:03 -04:00
esp_pm_lock_acquire ( p_i2s [ i2s_num ] - > pm_lock ) ;
2019-02-22 07:17:42 -05:00
# endif
2018-04-11 07:37:31 -04:00
src_byte = ( char * ) src ;
2016-12-29 04:29:14 -05:00
while ( size > 0 ) {
2021-06-15 03:43:03 -04:00
if ( p_i2s [ i2s_num ] - > tx - > rw_pos = = p_i2s [ i2s_num ] - > tx - > buf_size | | p_i2s [ i2s_num ] - > tx - > curr_ptr = = NULL ) {
if ( xQueueReceive ( p_i2s [ i2s_num ] - > tx - > queue , & p_i2s [ i2s_num ] - > tx - > curr_ptr , ticks_to_wait ) = = pdFALSE ) {
2016-12-29 04:29:14 -05:00
break ;
}
2021-06-15 03:43:03 -04:00
p_i2s [ i2s_num ] - > tx - > rw_pos = 0 ;
2016-12-29 04:29:14 -05:00
}
2021-06-15 03:43:03 -04:00
ESP_LOGD ( TAG , " size: %d, rw_pos: %d, buf_size: %d, curr_ptr: %d " , size , p_i2s [ i2s_num ] - > tx - > rw_pos , p_i2s [ i2s_num ] - > tx - > buf_size , ( int ) p_i2s [ i2s_num ] - > tx - > curr_ptr ) ;
data_ptr = ( char * ) p_i2s [ i2s_num ] - > tx - > curr_ptr ;
data_ptr + = p_i2s [ i2s_num ] - > tx - > rw_pos ;
bytes_can_write = p_i2s [ i2s_num ] - > tx - > buf_size - p_i2s [ i2s_num ] - > tx - > rw_pos ;
2016-12-29 04:29:14 -05:00
if ( bytes_can_write > size ) {
bytes_can_write = size ;
}
2018-04-11 07:37:31 -04:00
memcpy ( data_ptr , src_byte , bytes_can_write ) ;
2016-12-29 04:29:14 -05:00
size - = bytes_can_write ;
2018-04-11 07:37:31 -04:00
src_byte + = bytes_can_write ;
2021-06-15 03:43:03 -04:00
p_i2s [ i2s_num ] - > tx - > rw_pos + = bytes_can_write ;
2018-04-11 07:37:31 -04:00
( * bytes_written ) + = bytes_can_write ;
2016-12-29 04:29:14 -05:00
}
2019-02-22 07:17:42 -05:00
# ifdef CONFIG_PM_ENABLE
2021-06-15 03:43:03 -04:00
esp_pm_lock_release ( p_i2s [ i2s_num ] - > pm_lock ) ;
2019-02-22 07:17:42 -05:00
# endif
2021-06-15 03:43:03 -04:00
xSemaphoreGive ( p_i2s [ i2s_num ] - > tx - > mux ) ;
2021-12-27 00:00:07 -05:00
return ESP_OK ;
2016-12-29 04:29:14 -05:00
}
2018-04-11 07:37:31 -04:00
esp_err_t i2s_write_expand ( i2s_port_t i2s_num , const void * src , size_t size , size_t src_bits , size_t aim_bits , size_t * bytes_written , TickType_t ticks_to_wait )
2017-11-29 00:16:26 -05:00
{
char * data_ptr ;
2022-03-13 23:34:46 -04:00
int bytes_can_write ;
int tail ;
int src_bytes ;
int aim_bytes ;
int zero_bytes ;
2018-04-11 07:37:31 -04:00
* bytes_written = 0 ;
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-06-15 03:43:03 -04:00
ESP_RETURN_ON_FALSE ( ( size > 0 ) , ESP_ERR_INVALID_ARG , TAG , " size must greater than zero " ) ;
ESP_RETURN_ON_FALSE ( ( aim_bits > = src_bits ) , ESP_ERR_INVALID_ARG , TAG , " aim_bits mustn't be less than src_bits " ) ;
2021-08-02 07:17:29 -04:00
ESP_RETURN_ON_FALSE ( ( p_i2s [ i2s_num ] - > tx ) , ESP_ERR_INVALID_ARG , TAG , " TX mode is not enabled " ) ;
2017-11-29 00:16:26 -05:00
if ( src_bits < I2S_BITS_PER_SAMPLE_8BIT | | aim_bits < I2S_BITS_PER_SAMPLE_8BIT ) {
2021-06-15 03:43:03 -04:00
ESP_LOGE ( TAG , " bits mustn't be less than 8, src_bits %d aim_bits %d " , src_bits , aim_bits ) ;
2018-04-11 07:37:31 -04:00
return ESP_ERR_INVALID_ARG ;
2017-11-29 00:16:26 -05:00
}
if ( src_bits > I2S_BITS_PER_SAMPLE_32BIT | | aim_bits > I2S_BITS_PER_SAMPLE_32BIT ) {
2021-06-15 03:43:03 -04:00
ESP_LOGE ( TAG , " bits mustn't be greater than 32, src_bits %d aim_bits %d " , src_bits , aim_bits ) ;
2018-04-11 07:37:31 -04:00
return ESP_ERR_INVALID_ARG ;
2017-11-29 00:16:26 -05:00
}
if ( ( src_bits = = I2S_BITS_PER_SAMPLE_16BIT | | src_bits = = I2S_BITS_PER_SAMPLE_32BIT ) & & ( size % 2 ! = 0 ) ) {
2021-06-15 03:43:03 -04:00
ESP_LOGE ( TAG , " size must be a even number while src_bits is even, src_bits %d size %d " , src_bits , size ) ;
2018-04-11 07:37:31 -04:00
return ESP_ERR_INVALID_ARG ;
2017-11-29 00:16:26 -05:00
}
if ( src_bits = = I2S_BITS_PER_SAMPLE_24BIT & & ( size % 3 ! = 0 ) ) {
2021-06-15 03:43:03 -04:00
ESP_LOGE ( TAG , " size must be a multiple of 3 while src_bits is 24, size %d " , size ) ;
2018-04-11 07:37:31 -04:00
return ESP_ERR_INVALID_ARG ;
2017-11-29 00:16:26 -05:00
}
2018-04-11 07:37:31 -04:00
src_bytes = src_bits / 8 ;
aim_bytes = aim_bits / 8 ;
zero_bytes = aim_bytes - src_bytes ;
2022-03-13 23:34:46 -04:00
xSemaphoreTake ( p_i2s [ i2s_num ] - > tx - > mux , portMAX_DELAY ) ;
2017-11-29 00:16:26 -05:00
size = size * aim_bytes / src_bytes ;
2021-06-15 03:43:03 -04:00
ESP_LOGD ( TAG , " aim_bytes %d src_bytes %d size %d " , aim_bytes , src_bytes , size ) ;
2017-11-29 00:16:26 -05:00
while ( size > 0 ) {
2021-06-15 03:43:03 -04:00
if ( p_i2s [ i2s_num ] - > tx - > rw_pos = = p_i2s [ i2s_num ] - > tx - > buf_size | | p_i2s [ i2s_num ] - > tx - > curr_ptr = = NULL ) {
if ( xQueueReceive ( p_i2s [ i2s_num ] - > tx - > queue , & p_i2s [ i2s_num ] - > tx - > curr_ptr , ticks_to_wait ) = = pdFALSE ) {
2017-11-29 00:16:26 -05:00
break ;
}
2021-06-15 03:43:03 -04:00
p_i2s [ i2s_num ] - > tx - > rw_pos = 0 ;
2017-11-29 00:16:26 -05:00
}
2021-06-15 03:43:03 -04:00
data_ptr = ( char * ) p_i2s [ i2s_num ] - > tx - > curr_ptr ;
data_ptr + = p_i2s [ i2s_num ] - > tx - > rw_pos ;
bytes_can_write = p_i2s [ i2s_num ] - > tx - > buf_size - p_i2s [ i2s_num ] - > tx - > rw_pos ;
2020-11-16 23:48:35 -05:00
if ( bytes_can_write > ( int ) size ) {
2017-11-29 00:16:26 -05:00
bytes_can_write = size ;
}
tail = bytes_can_write % aim_bytes ;
bytes_can_write = bytes_can_write - tail ;
memset ( data_ptr , 0 , bytes_can_write ) ;
for ( int j = 0 ; j < bytes_can_write ; j + = ( aim_bytes - zero_bytes ) ) {
j + = zero_bytes ;
2018-04-11 07:37:31 -04:00
memcpy ( & data_ptr [ j ] , ( const char * ) ( src + * bytes_written ) , aim_bytes - zero_bytes ) ;
( * bytes_written ) + = ( aim_bytes - zero_bytes ) ;
2017-11-29 00:16:26 -05:00
}
size - = bytes_can_write ;
2021-06-15 03:43:03 -04:00
p_i2s [ i2s_num ] - > tx - > rw_pos + = bytes_can_write ;
2017-11-29 00:16:26 -05:00
}
2021-06-15 03:43:03 -04:00
xSemaphoreGive ( p_i2s [ i2s_num ] - > tx - > mux ) ;
2021-12-27 00:00:07 -05:00
return ESP_OK ;
2017-11-29 00:16:26 -05:00
}
2018-04-11 07:37:31 -04:00
esp_err_t i2s_read ( i2s_port_t i2s_num , void * dest , size_t size , size_t * bytes_read , TickType_t ticks_to_wait )
{
2022-03-13 23:34:46 -04:00
char * data_ptr ; ;
2022-04-07 03:32:46 -04:00
char * dest_byte ;
2018-04-11 07:37:31 -04:00
int bytes_can_read ;
* bytes_read = 0 ;
dest_byte = ( char * ) dest ;
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2021-08-02 07:17:29 -04:00
ESP_RETURN_ON_FALSE ( ( p_i2s [ i2s_num ] - > rx ) , ESP_ERR_INVALID_ARG , TAG , " RX mode is not enabled " ) ;
2022-03-13 23:34:46 -04:00
xSemaphoreTake ( p_i2s [ i2s_num ] - > rx - > mux , portMAX_DELAY ) ;
2019-02-22 07:17:42 -05:00
# ifdef CONFIG_PM_ENABLE
2021-06-15 03:43:03 -04:00
esp_pm_lock_acquire ( p_i2s [ i2s_num ] - > pm_lock ) ;
2019-02-22 07:17:42 -05:00
# endif
2016-12-29 04:29:14 -05:00
while ( size > 0 ) {
2021-06-15 03:43:03 -04:00
if ( p_i2s [ i2s_num ] - > rx - > rw_pos = = p_i2s [ i2s_num ] - > rx - > buf_size | | p_i2s [ i2s_num ] - > rx - > curr_ptr = = NULL ) {
if ( xQueueReceive ( p_i2s [ i2s_num ] - > rx - > queue , & p_i2s [ i2s_num ] - > rx - > curr_ptr , ticks_to_wait ) = = pdFALSE ) {
2016-12-29 04:29:14 -05:00
break ;
}
2021-06-15 03:43:03 -04:00
p_i2s [ i2s_num ] - > rx - > rw_pos = 0 ;
2016-12-29 04:29:14 -05:00
}
2021-06-15 03:43:03 -04:00
data_ptr = ( char * ) p_i2s [ i2s_num ] - > rx - > curr_ptr ;
data_ptr + = p_i2s [ i2s_num ] - > rx - > rw_pos ;
bytes_can_read = p_i2s [ i2s_num ] - > rx - > buf_size - p_i2s [ i2s_num ] - > rx - > rw_pos ;
2020-11-16 23:48:35 -05:00
if ( bytes_can_read > ( int ) size ) {
2016-12-29 04:29:14 -05:00
bytes_can_read = size ;
}
2018-04-11 07:37:31 -04:00
memcpy ( dest_byte , data_ptr , bytes_can_read ) ;
2016-12-29 04:29:14 -05:00
size - = bytes_can_read ;
2018-04-11 07:37:31 -04:00
dest_byte + = bytes_can_read ;
2021-06-15 03:43:03 -04:00
p_i2s [ i2s_num ] - > rx - > rw_pos + = bytes_can_read ;
2018-04-11 07:37:31 -04:00
( * bytes_read ) + = bytes_can_read ;
2016-12-29 04:29:14 -05:00
}
2019-02-22 07:17:42 -05:00
# ifdef CONFIG_PM_ENABLE
2021-06-15 03:43:03 -04:00
esp_pm_lock_release ( p_i2s [ i2s_num ] - > pm_lock ) ;
2019-02-22 07:17:42 -05:00
# endif
2021-06-15 03:43:03 -04:00
xSemaphoreGive ( p_i2s [ i2s_num ] - > rx - > mux ) ;
2021-12-27 00:00:07 -05:00
return ESP_OK ;
2019-11-21 08:10:46 -05:00
}
2021-08-04 04:47:56 -04:00
2022-03-13 23:34:46 -04:00
/*-------------------------------------------------------------
I2S GPIO operation
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
static void gpio_matrix_out_check_and_set ( gpio_num_t gpio , uint32_t signal_idx , bool out_inv , bool oen_inv )
{
//if pin = -1, do not need to configure
if ( gpio ! = - 1 ) {
gpio_hal_iomux_func_sel ( GPIO_PIN_MUX_REG [ gpio ] , PIN_FUNC_GPIO ) ;
gpio_set_direction ( gpio , GPIO_MODE_OUTPUT ) ;
esp_rom_gpio_connect_out_signal ( gpio , signal_idx , out_inv , oen_inv ) ;
}
}
static void gpio_matrix_in_check_and_set ( gpio_num_t gpio , uint32_t signal_idx , bool inv )
{
if ( gpio ! = - 1 ) {
gpio_hal_iomux_func_sel ( GPIO_PIN_MUX_REG [ gpio ] , PIN_FUNC_GPIO ) ;
/* Set direction, for some GPIOs, the input function are not enabled as default */
gpio_set_direction ( gpio , GPIO_MODE_INPUT ) ;
esp_rom_gpio_connect_in_signal ( gpio , signal_idx , inv ) ;
}
}
static esp_err_t i2s_check_set_mclk ( i2s_port_t i2s_num , gpio_num_t gpio_num )
{
if ( gpio_num = = - 1 ) {
return ESP_OK ;
}
# if CONFIG_IDF_TARGET_ESP32
ESP_RETURN_ON_FALSE ( ( gpio_num = = GPIO_NUM_0 | | gpio_num = = GPIO_NUM_1 | | gpio_num = = GPIO_NUM_3 ) ,
ESP_ERR_INVALID_ARG , TAG ,
" ESP32 only support to set GPIO0/GPIO1/GPIO3 as mclk signal, error GPIO number:%d " , gpio_num ) ;
bool is_i2s0 = i2s_num = = I2S_NUM_0 ;
if ( gpio_num = = GPIO_NUM_0 ) {
PIN_FUNC_SELECT ( PERIPHS_IO_MUX_GPIO0_U , FUNC_GPIO0_CLK_OUT1 ) ;
WRITE_PERI_REG ( PIN_CTRL , is_i2s0 ? 0xFFF0 : 0xFFFF ) ;
} else if ( gpio_num = = GPIO_NUM_1 ) {
PIN_FUNC_SELECT ( PERIPHS_IO_MUX_U0TXD_U , FUNC_U0TXD_CLK_OUT3 ) ;
WRITE_PERI_REG ( PIN_CTRL , is_i2s0 ? 0xF0F0 : 0xF0FF ) ;
} else {
PIN_FUNC_SELECT ( PERIPHS_IO_MUX_U0RXD_U , FUNC_U0RXD_CLK_OUT2 ) ;
WRITE_PERI_REG ( PIN_CTRL , is_i2s0 ? 0xFF00 : 0xFF0F ) ;
}
# else
ESP_RETURN_ON_FALSE ( GPIO_IS_VALID_GPIO ( gpio_num ) , ESP_ERR_INVALID_ARG , TAG , " mck_io_num invalid " ) ;
gpio_matrix_out_check_and_set ( gpio_num , i2s_periph_signal [ i2s_num ] . mck_out_sig , 0 , 0 ) ;
# endif
2022-04-07 03:32:46 -04:00
ESP_LOGD ( TAG , " I2S%d, MCLK output by GPIO%d " , i2s_num , gpio_num ) ;
2022-03-13 23:34:46 -04:00
return ESP_OK ;
}
esp_err_t i2s_zero_dma_buffer ( i2s_port_t i2s_num )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2022-03-13 23:34:46 -04:00
uint32_t buf_cnt = p_i2s [ i2s_num ] - > dma_desc_num ;
/* Clear I2S RX DMA buffer */
if ( p_i2s [ i2s_num ] - > rx & & p_i2s [ i2s_num ] - > rx - > buf ! = NULL & & p_i2s [ i2s_num ] - > rx - > buf_size ! = 0 ) {
for ( int i = 0 ; i < buf_cnt ; i + + ) {
memset ( p_i2s [ i2s_num ] - > rx - > buf [ i ] , 0 , p_i2s [ i2s_num ] - > rx - > buf_size ) ;
}
}
/* Clear I2S TX DMA buffer */
if ( p_i2s [ i2s_num ] - > tx & & p_i2s [ i2s_num ] - > tx - > buf ! = NULL & & p_i2s [ i2s_num ] - > tx - > buf_size ! = 0 ) {
/* Finish to write all tx data */
int bytes_left = ( p_i2s [ i2s_num ] - > tx - > buf_size - p_i2s [ i2s_num ] - > tx - > rw_pos ) % 4 ;
if ( bytes_left ) {
size_t zero_bytes = 0 ;
size_t bytes_written ;
i2s_write ( i2s_num , ( void * ) & zero_bytes , bytes_left , & bytes_written , portMAX_DELAY ) ;
}
for ( int i = 0 ; i < buf_cnt ; i + + ) {
memset ( p_i2s [ i2s_num ] - > tx - > buf [ i ] , 0 , p_i2s [ i2s_num ] - > tx - > buf_size ) ;
}
}
return ESP_OK ;
}
esp_err_t i2s_set_pin ( i2s_port_t i2s_num , const i2s_pin_config_t * pin )
{
2022-04-07 03:32:46 -04:00
ESP_RETURN_ON_FALSE ( ( i2s_num < SOC_I2S_NUM ) , ESP_ERR_INVALID_ARG , TAG , " i2s_num error " ) ;
2022-03-13 23:34:46 -04:00
if ( pin = = NULL ) {
# if SOC_I2S_SUPPORTS_DAC
return i2s_set_dac_mode ( I2S_DAC_CHANNEL_BOTH_EN ) ;
# else
return ESP_ERR_INVALID_ARG ;
# endif
}
/* Check validity of selected pins */
ESP_RETURN_ON_FALSE ( ( pin - > bck_io_num = = - 1 | | GPIO_IS_VALID_GPIO ( pin - > bck_io_num ) ) ,
ESP_ERR_INVALID_ARG , TAG , " bck_io_num invalid " ) ;
ESP_RETURN_ON_FALSE ( ( pin - > ws_io_num = = - 1 | | GPIO_IS_VALID_GPIO ( pin - > ws_io_num ) ) ,
ESP_ERR_INVALID_ARG , TAG , " ws_io_num invalid " ) ;
ESP_RETURN_ON_FALSE ( ( pin - > data_out_num = = - 1 | | GPIO_IS_VALID_GPIO ( pin - > data_out_num ) ) ,
ESP_ERR_INVALID_ARG , TAG , " data_out_num invalid " ) ;
ESP_RETURN_ON_FALSE ( ( pin - > data_in_num = = - 1 | | GPIO_IS_VALID_GPIO ( pin - > data_in_num ) ) ,
ESP_ERR_INVALID_ARG , TAG , " data_in_num invalid " ) ;
if ( p_i2s [ i2s_num ] - > role = = I2S_ROLE_SLAVE ) {
/* For "tx + rx + slave" or "rx + slave" mode, we should select RX signal index for ws and bck */
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_RX ) {
gpio_matrix_in_check_and_set ( pin - > ws_io_num , i2s_periph_signal [ i2s_num ] . s_rx_ws_sig , 0 ) ;
gpio_matrix_in_check_and_set ( pin - > bck_io_num , i2s_periph_signal [ i2s_num ] . s_rx_bck_sig , 0 ) ;
/* For "tx + slave" mode, we should select TX signal index for ws and bck */
} else {
gpio_matrix_in_check_and_set ( pin - > ws_io_num , i2s_periph_signal [ i2s_num ] . s_tx_ws_sig , 0 ) ;
gpio_matrix_in_check_and_set ( pin - > bck_io_num , i2s_periph_signal [ i2s_num ] . s_tx_bck_sig , 0 ) ;
}
} else {
/* mclk only available in master mode */
ESP_RETURN_ON_ERROR ( i2s_check_set_mclk ( i2s_num , pin - > mck_io_num ) , TAG , " mclk config failed " ) ;
/* For "tx + rx + master" or "tx + master" mode, we should select TX signal index for ws and bck */
if ( p_i2s [ i2s_num ] - > dir & I2S_DIR_TX ) {
gpio_matrix_out_check_and_set ( pin - > ws_io_num , i2s_periph_signal [ i2s_num ] . m_tx_ws_sig , 0 , 0 ) ;
gpio_matrix_out_check_and_set ( pin - > bck_io_num , i2s_periph_signal [ i2s_num ] . m_tx_bck_sig , 0 , 0 ) ;
/* For "rx + master" mode, we should select RX signal index for ws and bck */
} else {
gpio_matrix_out_check_and_set ( pin - > ws_io_num , i2s_periph_signal [ i2s_num ] . m_rx_ws_sig , 0 , 0 ) ;
gpio_matrix_out_check_and_set ( pin - > bck_io_num , i2s_periph_signal [ i2s_num ] . m_rx_bck_sig , 0 , 0 ) ;
}
}
/* Set data input/ouput GPIO */
gpio_matrix_out_check_and_set ( pin - > data_out_num , i2s_periph_signal [ i2s_num ] . data_out_sig , 0 , 0 ) ;
gpio_matrix_in_check_and_set ( pin - > data_in_num , i2s_periph_signal [ i2s_num ] . data_in_sig , 0 ) ;
return ESP_OK ;
}
2022-04-07 03:32:46 -04:00
esp_err_t i2s_platform_acquire_occupation ( int id , const char * comp_name )
{
esp_err_t ret = ESP_ERR_NOT_FOUND ;
ESP_RETURN_ON_FALSE ( id < SOC_I2S_NUM , ESP_ERR_INVALID_ARG , TAG , " invalid i2s port id " ) ;
portENTER_CRITICAL ( & i2s_spinlock [ id ] ) ;
if ( ! comp_using_i2s [ id ] ) {
ret = ESP_OK ;
comp_using_i2s [ id ] = comp_name ;
periph_module_enable ( i2s_periph_signal [ id ] . module ) ;
i2s_ll_enable_clock ( I2S_LL_GET_HW ( id ) ) ;
}
portEXIT_CRITICAL ( & i2s_spinlock [ id ] ) ;
return ret ;
}
esp_err_t i2s_platform_release_occupation ( int id )
{
esp_err_t ret = ESP_ERR_INVALID_STATE ;
ESP_RETURN_ON_FALSE ( id < SOC_I2S_NUM , ESP_ERR_INVALID_ARG , TAG , " invalid i2s port id " ) ;
portENTER_CRITICAL ( & i2s_spinlock [ id ] ) ;
if ( comp_using_i2s [ id ] ) {
ret = ESP_OK ;
comp_using_i2s [ id ] = NULL ;
/* Disable module clock */
periph_module_disable ( i2s_periph_signal [ id ] . module ) ;
i2s_ll_disable_clock ( I2S_LL_GET_HW ( id ) ) ;
}
portEXIT_CRITICAL ( & i2s_spinlock [ id ] ) ;
return ret ;
}
/**
* @ brief This function will be called during start up , to check that pulse_cnt driver is not running along with the legacy i2s driver
*/
static __attribute__ ( ( constructor ) ) void check_i2s_driver_conflict ( void )
{
extern __attribute__ ( ( weak ) ) esp_err_t i2s_del_channel ( void * handle ) ;
/* If the new I2S driver is linked, the weak function will point to the actual function in the new driver, otherwise it is NULL*/
if ( ( void * ) i2s_del_channel ! = NULL ) {
ESP_EARLY_LOGE ( TAG , " CONFLICT! The new i2s driver can't work along with the legacy i2s driver " ) ;
abort ( ) ;
}
ESP_EARLY_LOGW ( TAG , " legacy i2s driver is deprecated, please migrate to use driver/i2s_std.h, driver/i2s_pdm.h or driver/i2s_tdm.h " ) ;
}