i2s.c 61 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534
  1. /*
  2. * SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. #include <string.h>
  7. #include <stdbool.h>
  8. #include <math.h>
  9. #include <esp_types.h>
  10. #include "freertos/FreeRTOS.h"
  11. #include "freertos/queue.h"
  12. #include "freertos/semphr.h"
  13. #include "soc/lldesc.h"
  14. #include "driver/gpio.h"
  15. #include "driver/i2s.h"
  16. #include "hal/gpio_hal.h"
  17. #include "hal/i2s_hal.h"
  18. #if SOC_I2S_SUPPORTS_ADC_DAC
  19. #include "driver/dac.h"
  20. #include "adc1_private.h"
  21. #endif
  22. #if SOC_GDMA_SUPPORTED
  23. #include "esp_private/gdma.h"
  24. #endif
  25. #include "soc/rtc.h"
  26. #include "esp_intr_alloc.h"
  27. #include "esp_err.h"
  28. #include "esp_check.h"
  29. #include "esp_attr.h"
  30. #include "esp_log.h"
  31. #include "esp_pm.h"
  32. #include "esp_efuse.h"
  33. #include "esp_rom_gpio.h"
  34. #include "sdkconfig.h"
  35. static const char *TAG = "I2S";
  36. #define I2S_ENTER_CRITICAL_ISR(i2s_num) portENTER_CRITICAL_ISR(&i2s_spinlock[i2s_num])
  37. #define I2S_EXIT_CRITICAL_ISR(i2s_num) portEXIT_CRITICAL_ISR(&i2s_spinlock[i2s_num])
  38. #define I2S_ENTER_CRITICAL(i2s_num) portENTER_CRITICAL(&i2s_spinlock[i2s_num])
  39. #define I2S_EXIT_CRITICAL(i2s_num) portEXIT_CRITICAL(&i2s_spinlock[i2s_num])
  40. #define I2S_FULL_DUPLEX_SLAVE_MODE_MASK (I2S_MODE_TX | I2S_MODE_RX | I2S_MODE_SLAVE)
  41. #define I2S_FULL_DUPLEX_MASTER_MODE_MASK (I2S_MODE_TX | I2S_MODE_RX | I2S_MODE_MASTER)
  42. #if !SOC_GDMA_SUPPORTED
  43. #define I2S_INTR_IN_SUC_EOF BIT(9)
  44. #define I2S_INTR_OUT_EOF BIT(12)
  45. #define I2S_INTR_IN_DSCR_ERR BIT(13)
  46. #define I2S_INTR_OUT_DSCR_ERR BIT(14)
  47. #define I2S_INTR_MAX (~0)
  48. #endif
  49. /**
  50. * @brief DMA buffer object
  51. *
  52. */
  53. typedef struct {
  54. char **buf;
  55. int buf_size;
  56. int rw_pos;
  57. void *curr_ptr;
  58. SemaphoreHandle_t mux;
  59. xQueueHandle queue;
  60. lldesc_t **desc;
  61. } i2s_dma_t;
  62. /**
  63. * @brief I2S object instance
  64. *
  65. */
  66. typedef struct {
  67. i2s_port_t i2s_num; /*!< I2S port number*/
  68. int queue_size; /*!< I2S event queue size*/
  69. QueueHandle_t i2s_queue; /*!< I2S queue handler*/
  70. int dma_buf_count; /*!< DMA buffer count, number of buffer*/
  71. int dma_buf_len; /*!< DMA buffer length, length of each buffer*/
  72. i2s_dma_t *tx; /*!< DMA Tx buffer*/
  73. i2s_dma_t *rx; /*!< DMA Rx buffer*/
  74. #if SOC_GDMA_SUPPORTED
  75. gdma_channel_handle_t rx_dma_chan; /*!< I2S rx gDMA channel handle*/
  76. gdma_channel_handle_t tx_dma_chan; /*!< I2S tx gDMA channel handle*/
  77. #else
  78. i2s_isr_handle_t i2s_isr_handle; /*!< I2S Interrupt handle*/
  79. #endif
  80. int channel_num; /*!< Number of channels*/
  81. int bytes_per_sample; /*!< Bytes per sample*/
  82. int bits_per_sample; /*!< Bits per sample*/
  83. i2s_comm_format_t communication_format; /*!<communication standard format*/
  84. i2s_mode_t mode; /*!< I2S Working mode*/
  85. uint32_t sample_rate; /*!< I2S sample rate */
  86. bool tx_desc_auto_clear; /*!< I2S auto clear tx descriptor on underflow */
  87. bool use_apll; /*!< I2S use APLL clock */
  88. int fixed_mclk; /*!< I2S fixed MLCK clock */
  89. i2s_mclk_multiple_t mclk_multiple; /*!< The multiple of I2S master clock(MCLK) to sample rate */
  90. #ifdef CONFIG_PM_ENABLE
  91. esp_pm_lock_handle_t pm_lock;
  92. #endif
  93. i2s_hal_context_t hal; /*!< I2S hal context*/
  94. i2s_hal_config_t hal_cfg; /*!< I2S hal configurations*/
  95. } i2s_obj_t;
  96. static i2s_obj_t *p_i2s[I2S_NUM_MAX] = {0};
  97. static portMUX_TYPE i2s_spinlock[I2S_NUM_MAX];
  98. #if SOC_I2S_SUPPORTS_ADC_DAC
  99. static int _i2s_adc_unit = -1;
  100. static int _i2s_adc_channel = -1;
  101. #endif
  102. static i2s_dma_t *i2s_create_dma_queue(i2s_port_t i2s_num, int dma_buf_count, int dma_buf_len);
  103. static esp_err_t i2s_destroy_dma_queue(i2s_port_t i2s_num, i2s_dma_t *dma);
  104. /**************************************************************
  105. * I2S GPIO operation *
  106. * - gpio_matrix_out_check_and_set *
  107. * - gpio_matrix_in_check_and_set *
  108. * - i2s_check_set_mclk *
  109. * - i2s_set_pin *
  110. **************************************************************/
  111. static void gpio_matrix_out_check_and_set(int gpio, uint32_t signal_idx, bool out_inv, bool oen_inv)
  112. {
  113. //if pin = -1, do not need to configure
  114. if (gpio != -1) {
  115. gpio_hal_iomux_func_sel(GPIO_PIN_MUX_REG[gpio], PIN_FUNC_GPIO);
  116. gpio_set_direction(gpio, GPIO_MODE_OUTPUT);
  117. esp_rom_gpio_connect_out_signal(gpio, signal_idx, out_inv, oen_inv);
  118. }
  119. }
  120. static void gpio_matrix_in_check_and_set(int gpio, uint32_t signal_idx, bool inv)
  121. {
  122. if (gpio != -1) {
  123. gpio_hal_iomux_func_sel(GPIO_PIN_MUX_REG[gpio], PIN_FUNC_GPIO);
  124. //Set direction, for some GPIOs, the input function are not enabled as default.
  125. gpio_set_direction(gpio, GPIO_MODE_INPUT);
  126. esp_rom_gpio_connect_in_signal(gpio, signal_idx, inv);
  127. }
  128. }
  129. static esp_err_t i2s_check_set_mclk(i2s_port_t i2s_num, gpio_num_t gpio_num)
  130. {
  131. if (gpio_num == -1) {
  132. return ESP_OK;
  133. }
  134. #if CONFIG_IDF_TARGET_ESP32
  135. ESP_RETURN_ON_FALSE((gpio_num == GPIO_NUM_0 || gpio_num == GPIO_NUM_1 || gpio_num == GPIO_NUM_3),
  136. ESP_ERR_INVALID_ARG, TAG,
  137. "ESP32 only support to set GPIO0/GPIO1/GPIO3 as mclk signal, error GPIO number:%d", gpio_num);
  138. bool is_i2s0 = i2s_num == I2S_NUM_0;
  139. if (gpio_num == GPIO_NUM_0) {
  140. PIN_FUNC_SELECT(PERIPHS_IO_MUX_GPIO0_U, FUNC_GPIO0_CLK_OUT1);
  141. WRITE_PERI_REG(PIN_CTRL, is_i2s0 ? 0xFFF0 : 0xFFFF);
  142. } else if (gpio_num == GPIO_NUM_1) {
  143. PIN_FUNC_SELECT(PERIPHS_IO_MUX_U0TXD_U, FUNC_U0TXD_CLK_OUT3);
  144. WRITE_PERI_REG(PIN_CTRL, is_i2s0 ? 0xF0F0 : 0xF0FF);
  145. } else {
  146. PIN_FUNC_SELECT(PERIPHS_IO_MUX_U0RXD_U, FUNC_U0RXD_CLK_OUT2);
  147. WRITE_PERI_REG(PIN_CTRL, is_i2s0 ? 0xFF00 : 0xFF0F);
  148. }
  149. #else
  150. ESP_RETURN_ON_FALSE(GPIO_IS_VALID_GPIO(gpio_num), ESP_ERR_INVALID_ARG, TAG, "mck_io_num invalid");
  151. gpio_matrix_out_check_and_set(gpio_num, i2s_periph_signal[i2s_num].mck_out_sig, 0, 0);
  152. #endif
  153. ESP_LOGI(TAG, "I2S%d, MCLK output by GPIO%d", i2s_num, gpio_num);
  154. return ESP_OK;
  155. }
  156. esp_err_t i2s_set_pin(i2s_port_t i2s_num, const i2s_pin_config_t *pin)
  157. {
  158. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  159. if (pin == NULL) {
  160. #if SOC_I2S_SUPPORTS_ADC_DAC
  161. return i2s_set_dac_mode(I2S_DAC_CHANNEL_BOTH_EN);
  162. #else
  163. return ESP_ERR_INVALID_ARG;
  164. #endif
  165. }
  166. ESP_RETURN_ON_FALSE((pin->bck_io_num == -1 || GPIO_IS_VALID_GPIO(pin->bck_io_num)),
  167. ESP_ERR_INVALID_ARG, TAG, "bck_io_num invalid");
  168. ESP_RETURN_ON_FALSE((pin->ws_io_num == -1 || GPIO_IS_VALID_GPIO(pin->ws_io_num)),
  169. ESP_ERR_INVALID_ARG, TAG, "ws_io_num invalid");
  170. ESP_RETURN_ON_FALSE((pin->data_out_num == -1 || GPIO_IS_VALID_GPIO(pin->data_out_num)),
  171. ESP_ERR_INVALID_ARG, TAG, "data_out_num invalid");
  172. ESP_RETURN_ON_FALSE((pin->data_in_num == -1 || GPIO_IS_VALID_GPIO(pin->data_in_num)),
  173. ESP_ERR_INVALID_ARG, TAG, "data_in_num invalid");
  174. if (p_i2s[i2s_num]->mode & I2S_MODE_SLAVE) {
  175. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  176. gpio_matrix_in_check_and_set(pin->ws_io_num, i2s_periph_signal[i2s_num].tx_ws_sig, 0);
  177. gpio_matrix_in_check_and_set(pin->bck_io_num, i2s_periph_signal[i2s_num].tx_bck_sig, 0);
  178. } else {
  179. gpio_matrix_in_check_and_set(pin->ws_io_num, i2s_periph_signal[i2s_num].rx_ws_sig, 0);
  180. gpio_matrix_in_check_and_set(pin->bck_io_num, i2s_periph_signal[i2s_num].rx_bck_sig, 0);
  181. }
  182. } else {
  183. ESP_RETURN_ON_ERROR(i2s_check_set_mclk(i2s_num, pin->mck_io_num), TAG, "mclk config failed");
  184. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  185. gpio_matrix_out_check_and_set(pin->ws_io_num, i2s_periph_signal[i2s_num].tx_ws_sig, 0, 0);
  186. gpio_matrix_out_check_and_set(pin->bck_io_num, i2s_periph_signal[i2s_num].tx_bck_sig, 0, 0);
  187. } else {
  188. gpio_matrix_out_check_and_set(pin->ws_io_num, i2s_periph_signal[i2s_num].rx_ws_sig, 0, 0);
  189. gpio_matrix_out_check_and_set(pin->bck_io_num, i2s_periph_signal[i2s_num].rx_bck_sig, 0, 0);
  190. }
  191. }
  192. gpio_matrix_out_check_and_set(pin->data_out_num, i2s_periph_signal[i2s_num].data_out_sig, 0, 0);
  193. gpio_matrix_in_check_and_set(pin->data_in_num, i2s_periph_signal[i2s_num].data_in_sig, 0);
  194. return ESP_OK;
  195. }
  196. /**************************************************************
  197. * I2S DMA operation *
  198. * - i2s_dma_rx_callback *
  199. * - i2s_dma_tx_callback *
  200. * - i2s_intr_handler_default *
  201. * - i2s_tx_reset *
  202. * - i2s_rx_reset *
  203. * - i2s_tx_start *
  204. * - i2s_rx_start *
  205. * - i2s_tx_stop *
  206. * - i2s_rx_stop *
  207. **************************************************************/
  208. #if SOC_GDMA_SUPPORTED
  209. static bool IRAM_ATTR i2s_dma_rx_callback(gdma_channel_handle_t dma_chan, gdma_event_data_t *event_data, void *user_data)
  210. {
  211. i2s_obj_t *p_i2s = (i2s_obj_t *) user_data;
  212. portBASE_TYPE high_priority_task_awoken = 0;
  213. BaseType_t ret = 0;
  214. int dummy;
  215. i2s_event_t i2s_event;
  216. uint32_t finish_desc;
  217. if (p_i2s->rx) {
  218. finish_desc = event_data->rx_eof_desc_addr;
  219. if (xQueueIsQueueFullFromISR(p_i2s->rx->queue)) {
  220. xQueueReceiveFromISR(p_i2s->rx->queue, &dummy, &high_priority_task_awoken);
  221. }
  222. ret = xQueueSendFromISR(p_i2s->rx->queue, &(((lldesc_t *)finish_desc)->buf), &high_priority_task_awoken);
  223. if (p_i2s->i2s_queue) {
  224. i2s_event.type = (ret == pdPASS) ? I2S_EVENT_RX_DONE : I2S_EVENT_RX_Q_OVF;
  225. if (p_i2s->i2s_queue && xQueueIsQueueFullFromISR(p_i2s->i2s_queue)) {
  226. xQueueReceiveFromISR(p_i2s->i2s_queue, &dummy, &high_priority_task_awoken);
  227. }
  228. xQueueSendFromISR(p_i2s->i2s_queue, (void * )&i2s_event, &high_priority_task_awoken);
  229. }
  230. }
  231. return high_priority_task_awoken;
  232. }
  233. static bool IRAM_ATTR i2s_dma_tx_callback(gdma_channel_handle_t dma_chan, gdma_event_data_t *event_data, void *user_data)
  234. {
  235. i2s_obj_t *p_i2s = (i2s_obj_t *) user_data;
  236. portBASE_TYPE high_priority_task_awoken = 0;
  237. BaseType_t ret;
  238. int dummy;
  239. i2s_event_t i2s_event;
  240. uint32_t finish_desc;
  241. if (p_i2s->tx) {
  242. finish_desc = event_data->tx_eof_desc_addr;
  243. if (xQueueIsQueueFullFromISR(p_i2s->tx->queue)) {
  244. xQueueReceiveFromISR(p_i2s->tx->queue, &dummy, &high_priority_task_awoken);
  245. if (p_i2s->tx_desc_auto_clear) {
  246. memset((void *) dummy, 0, p_i2s->tx->buf_size);
  247. }
  248. }
  249. ret = xQueueSendFromISR(p_i2s->tx->queue, &(((lldesc_t *)finish_desc)->buf), &high_priority_task_awoken);
  250. if (p_i2s->i2s_queue) {
  251. i2s_event.type = (ret == pdPASS) ? I2S_EVENT_TX_DONE : I2S_EVENT_TX_Q_OVF;
  252. if (xQueueIsQueueFullFromISR(p_i2s->i2s_queue)) {
  253. xQueueReceiveFromISR(p_i2s->i2s_queue, &dummy, &high_priority_task_awoken);
  254. }
  255. xQueueSendFromISR(p_i2s->i2s_queue, (void * )&i2s_event, &high_priority_task_awoken);
  256. }
  257. }
  258. return high_priority_task_awoken;
  259. }
  260. #else
  261. static void IRAM_ATTR i2s_intr_handler_default(void *arg)
  262. {
  263. i2s_obj_t *p_i2s = (i2s_obj_t *) arg;
  264. uint32_t status = i2s_hal_get_intr_status(&(p_i2s->hal));
  265. if (status == 0) {
  266. //Avoid spurious interrupt
  267. return;
  268. }
  269. i2s_event_t i2s_event;
  270. int dummy;
  271. portBASE_TYPE high_priority_task_awoken = 0;
  272. uint32_t finish_desc = 0;
  273. if ((status & I2S_INTR_OUT_DSCR_ERR) || (status & I2S_INTR_IN_DSCR_ERR)) {
  274. ESP_EARLY_LOGE(TAG, "dma error, interrupt status: 0x%08x", status);
  275. if (p_i2s->i2s_queue) {
  276. i2s_event.type = I2S_EVENT_DMA_ERROR;
  277. if (xQueueIsQueueFullFromISR(p_i2s->i2s_queue)) {
  278. xQueueReceiveFromISR(p_i2s->i2s_queue, &dummy, &high_priority_task_awoken);
  279. }
  280. xQueueSendFromISR(p_i2s->i2s_queue, (void * )&i2s_event, &high_priority_task_awoken);
  281. }
  282. }
  283. if ((status & I2S_INTR_OUT_EOF) && p_i2s->tx) {
  284. i2s_hal_get_out_eof_des_addr(&(p_i2s->hal), &finish_desc);
  285. // All buffers are empty. This means we have an underflow on our hands.
  286. if (xQueueIsQueueFullFromISR(p_i2s->tx->queue)) {
  287. xQueueReceiveFromISR(p_i2s->tx->queue, &dummy, &high_priority_task_awoken);
  288. // See if tx descriptor needs to be auto cleared:
  289. // This will avoid any kind of noise that may get introduced due to transmission
  290. // of previous data from tx descriptor on I2S line.
  291. if (p_i2s->tx_desc_auto_clear == true) {
  292. memset((void *) dummy, 0, p_i2s->tx->buf_size);
  293. }
  294. }
  295. xQueueSendFromISR(p_i2s->tx->queue, &(((lldesc_t *)finish_desc)->buf), &high_priority_task_awoken);
  296. if (p_i2s->i2s_queue) {
  297. i2s_event.type = I2S_EVENT_TX_DONE;
  298. if (xQueueIsQueueFullFromISR(p_i2s->i2s_queue)) {
  299. xQueueReceiveFromISR(p_i2s->i2s_queue, &dummy, &high_priority_task_awoken);
  300. }
  301. xQueueSendFromISR(p_i2s->i2s_queue, (void * )&i2s_event, &high_priority_task_awoken);
  302. }
  303. }
  304. if ((status & I2S_INTR_IN_SUC_EOF) && p_i2s->rx) {
  305. // All buffers are full. This means we have an overflow.
  306. i2s_hal_get_in_eof_des_addr(&(p_i2s->hal), &finish_desc);
  307. if (xQueueIsQueueFullFromISR(p_i2s->rx->queue)) {
  308. xQueueReceiveFromISR(p_i2s->rx->queue, &dummy, &high_priority_task_awoken);
  309. }
  310. xQueueSendFromISR(p_i2s->rx->queue, &(((lldesc_t *)finish_desc)->buf), &high_priority_task_awoken);
  311. if (p_i2s->i2s_queue) {
  312. i2s_event.type = I2S_EVENT_RX_DONE;
  313. if (p_i2s->i2s_queue && xQueueIsQueueFullFromISR(p_i2s->i2s_queue)) {
  314. xQueueReceiveFromISR(p_i2s->i2s_queue, &dummy, &high_priority_task_awoken);
  315. }
  316. xQueueSendFromISR(p_i2s->i2s_queue, (void * )&i2s_event, &high_priority_task_awoken);
  317. }
  318. }
  319. i2s_hal_clear_intr_status(&(p_i2s->hal), status);
  320. if (high_priority_task_awoken == pdTRUE) {
  321. portYIELD_FROM_ISR();
  322. }
  323. }
  324. #endif
  325. static void i2s_tx_reset(i2s_port_t i2s_num)
  326. {
  327. p_i2s[i2s_num]->tx->curr_ptr = NULL;
  328. p_i2s[i2s_num]->tx->rw_pos = 0;
  329. #if SOC_GDMA_SUPPORTED
  330. // gdma_stop(p_i2s[i2s_num]->tx_dma_chan);
  331. i2s_hal_reset_tx(&(p_i2s[i2s_num]->hal));
  332. gdma_reset(p_i2s[i2s_num]->tx_dma_chan);
  333. i2s_hal_reset_tx_fifo(&(p_i2s[i2s_num]->hal));
  334. #else
  335. // Reset I2S TX module first, and then, reset DMA and FIFO.
  336. i2s_hal_reset_tx(&(p_i2s[i2s_num]->hal));
  337. i2s_hal_reset_txdma(&(p_i2s[i2s_num]->hal));
  338. i2s_hal_reset_tx_fifo(&(p_i2s[i2s_num]->hal));
  339. #endif
  340. }
  341. static void i2s_rx_reset(i2s_port_t i2s_num)
  342. {
  343. p_i2s[i2s_num]->rx->curr_ptr = NULL;
  344. p_i2s[i2s_num]->rx->rw_pos = 0;
  345. #if SOC_GDMA_SUPPORTED
  346. i2s_hal_reset_rx(&(p_i2s[i2s_num]->hal));
  347. gdma_reset(p_i2s[i2s_num]->rx_dma_chan);
  348. i2s_hal_reset_rx_fifo(&(p_i2s[i2s_num]->hal));
  349. #else
  350. // Reset I2S RX module first, and then, reset DMA and FIFO.
  351. i2s_hal_reset_rx(&(p_i2s[i2s_num]->hal));
  352. i2s_hal_reset_rxdma(&(p_i2s[i2s_num]->hal));
  353. i2s_hal_reset_rx_fifo(&(p_i2s[i2s_num]->hal));
  354. #endif
  355. }
  356. static void i2s_tx_start(i2s_port_t i2s_num)
  357. {
  358. #if SOC_GDMA_SUPPORTED
  359. gdma_start(p_i2s[i2s_num]->tx_dma_chan, (uint32_t) p_i2s[i2s_num]->tx->desc[0]);
  360. #else
  361. i2s_hal_enable_tx_dma(&(p_i2s[i2s_num]->hal));
  362. i2s_hal_enable_tx_intr(&(p_i2s[i2s_num]->hal));
  363. i2s_hal_start_tx_link(&(p_i2s[i2s_num]->hal), (uint32_t) p_i2s[i2s_num]->tx->desc[0]);
  364. #endif
  365. i2s_hal_start_tx(&(p_i2s[i2s_num]->hal));
  366. }
  367. static void i2s_rx_start(i2s_port_t i2s_num)
  368. {
  369. #if SOC_GDMA_SUPPORTED
  370. gdma_start(p_i2s[i2s_num]->rx_dma_chan, (uint32_t) p_i2s[i2s_num]->rx->desc[0]);
  371. #else
  372. i2s_hal_enable_rx_dma(&(p_i2s[i2s_num]->hal));
  373. i2s_hal_enable_rx_intr(&(p_i2s[i2s_num]->hal));
  374. i2s_hal_start_rx_link(&(p_i2s[i2s_num]->hal), (uint32_t) p_i2s[i2s_num]->rx->desc[0]);
  375. #endif
  376. i2s_hal_start_rx(&(p_i2s[i2s_num]->hal));
  377. }
  378. static void i2s_tx_stop(i2s_port_t i2s_num)
  379. {
  380. #if SOC_GDMA_SUPPORTED
  381. gdma_stop(p_i2s[i2s_num]->tx_dma_chan);
  382. #else
  383. i2s_hal_stop_tx_link(&(p_i2s[i2s_num]->hal));
  384. i2s_hal_stop_tx(&(p_i2s[i2s_num]->hal));
  385. i2s_hal_disable_tx_intr(&(p_i2s[i2s_num]->hal));
  386. i2s_hal_disable_tx_dma(&(p_i2s[i2s_num]->hal));
  387. #endif
  388. }
  389. static void i2s_rx_stop(i2s_port_t i2s_num)
  390. {
  391. #if SOC_GDMA_SUPPORTED
  392. gdma_stop(p_i2s[i2s_num]->rx_dma_chan);
  393. #else
  394. i2s_hal_stop_rx_link(&(p_i2s[i2s_num]->hal));
  395. i2s_hal_stop_rx(&(p_i2s[i2s_num]->hal));
  396. i2s_hal_disable_rx_intr(&(p_i2s[i2s_num]->hal));
  397. i2s_hal_disable_rx_dma(&(p_i2s[i2s_num]->hal));
  398. #endif
  399. }
  400. /**************************************************************
  401. * I2S buffer operation *
  402. * - i2s_alloc_dma_buffer *
  403. * - i2s_destroy_dma_queue *
  404. * - i2s_create_dma_queue *
  405. * - i2s_zero_dma_buffer *
  406. **************************************************************/
  407. static esp_err_t i2s_alloc_dma_buffer(i2s_port_t i2s_num, int data_bits, int ch)
  408. {
  409. if (p_i2s[i2s_num]->channel_num != ch) {
  410. p_i2s[i2s_num]->channel_num = (ch == 2) ? 2 : 1;
  411. }
  412. i2s_dma_t *save_tx = NULL, *save_rx = NULL;
  413. if (data_bits != p_i2s[i2s_num]->bits_per_sample) {
  414. p_i2s[i2s_num]->bits_per_sample = data_bits;
  415. // Round bytes_per_sample up to next multiple of 16 bits
  416. int halfwords_per_sample = (data_bits + 15) / 16;
  417. p_i2s[i2s_num]->bytes_per_sample = halfwords_per_sample * 2;
  418. // Because limited of DMA buffer is 4092 bytes
  419. if (p_i2s[i2s_num]->dma_buf_len * p_i2s[i2s_num]->bytes_per_sample * p_i2s[i2s_num]->channel_num > 4092) {
  420. p_i2s[i2s_num]->dma_buf_len = 4092 / p_i2s[i2s_num]->bytes_per_sample / p_i2s[i2s_num]->channel_num;
  421. }
  422. // Re-create TX DMA buffer
  423. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  424. save_tx = p_i2s[i2s_num]->tx;
  425. //destroy old tx dma if exist
  426. if (save_tx) {
  427. i2s_destroy_dma_queue(i2s_num, save_tx);
  428. }
  429. p_i2s[i2s_num]->tx = i2s_create_dma_queue(i2s_num, p_i2s[i2s_num]->dma_buf_count, p_i2s[i2s_num]->dma_buf_len);
  430. if (p_i2s[i2s_num]->tx == NULL) {
  431. ESP_LOGE(TAG, "Failed to create tx dma buffer");
  432. i2s_driver_uninstall(i2s_num);
  433. return ESP_ERR_NO_MEM;
  434. }
  435. }
  436. // Re-create RX DMA buffer
  437. if (p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  438. save_rx = p_i2s[i2s_num]->rx;
  439. //destroy old rx dma if exist
  440. if (save_rx) {
  441. i2s_destroy_dma_queue(i2s_num, save_rx);
  442. }
  443. p_i2s[i2s_num]->rx = i2s_create_dma_queue(i2s_num, p_i2s[i2s_num]->dma_buf_count, p_i2s[i2s_num]->dma_buf_len);
  444. if (p_i2s[i2s_num]->rx == NULL) {
  445. ESP_LOGE(TAG, "Failed to create rx dma buffer");
  446. i2s_driver_uninstall(i2s_num);
  447. return ESP_ERR_NO_MEM;
  448. }
  449. i2s_hal_set_rx_eof_num(&(p_i2s[i2s_num]->hal), p_i2s[i2s_num]->dma_buf_len * p_i2s[i2s_num]->channel_num * p_i2s[i2s_num]->bytes_per_sample);
  450. }
  451. }
  452. return ESP_OK;
  453. }
  454. static esp_err_t i2s_destroy_dma_queue(i2s_port_t i2s_num, i2s_dma_t *dma)
  455. {
  456. int bux_idx;
  457. if (p_i2s[i2s_num] == NULL) {
  458. ESP_LOGE(TAG, "Not initialized yet");
  459. return ESP_ERR_INVALID_ARG;
  460. }
  461. if (dma == NULL) {
  462. ESP_LOGE(TAG, "dma is NULL");
  463. return ESP_ERR_INVALID_ARG;
  464. }
  465. for (bux_idx = 0; bux_idx < p_i2s[i2s_num]->dma_buf_count; bux_idx++) {
  466. if (dma->desc && dma->desc[bux_idx]) {
  467. free(dma->desc[bux_idx]);
  468. }
  469. if (dma->buf && dma->buf[bux_idx]) {
  470. free(dma->buf[bux_idx]);
  471. }
  472. }
  473. if (dma->buf) {
  474. free(dma->buf);
  475. }
  476. if (dma->desc) {
  477. free(dma->desc);
  478. }
  479. ESP_LOGI(TAG, "DMA queue destroyed");
  480. vQueueDelete(dma->queue);
  481. vSemaphoreDelete(dma->mux);
  482. free(dma);
  483. return ESP_OK;
  484. }
  485. static i2s_dma_t *i2s_create_dma_queue(i2s_port_t i2s_num, int dma_buf_count, int dma_buf_len)
  486. {
  487. int bux_idx;
  488. int sample_size = p_i2s[i2s_num]->bytes_per_sample * p_i2s[i2s_num]->channel_num;
  489. i2s_dma_t *dma = (i2s_dma_t *) malloc(sizeof(i2s_dma_t));
  490. if (dma == NULL) {
  491. ESP_LOGE(TAG, "Error malloc i2s_dma_t");
  492. return NULL;
  493. }
  494. memset(dma, 0, sizeof(i2s_dma_t));
  495. dma->buf = (char **)malloc(sizeof(char *) * dma_buf_count);
  496. if (dma->buf == NULL) {
  497. ESP_LOGE(TAG, "Error malloc dma buffer pointer");
  498. free(dma);
  499. return NULL;
  500. }
  501. memset(dma->buf, 0, sizeof(char *) * dma_buf_count);
  502. for (bux_idx = 0; bux_idx < dma_buf_count; bux_idx++) {
  503. dma->buf[bux_idx] = (char *) heap_caps_calloc(1, dma_buf_len * sample_size, MALLOC_CAP_DMA);
  504. if (dma->buf[bux_idx] == NULL) {
  505. ESP_LOGE(TAG, "Error malloc dma buffer");
  506. i2s_destroy_dma_queue(i2s_num, dma);
  507. return NULL;
  508. }
  509. ESP_LOGD(TAG, "Addr[%d] = %d", bux_idx, (int)dma->buf[bux_idx]);
  510. }
  511. dma->desc = (lldesc_t **) malloc(sizeof(lldesc_t *) * dma_buf_count);
  512. if (dma->desc == NULL) {
  513. ESP_LOGE(TAG, "Error malloc dma description");
  514. i2s_destroy_dma_queue(i2s_num, dma);
  515. return NULL;
  516. }
  517. for (bux_idx = 0; bux_idx < dma_buf_count; bux_idx++) {
  518. dma->desc[bux_idx] = (lldesc_t *) heap_caps_malloc(sizeof(lldesc_t), MALLOC_CAP_DMA);
  519. if (dma->desc[bux_idx] == NULL) {
  520. ESP_LOGE(TAG, "Error malloc dma description entry");
  521. i2s_destroy_dma_queue(i2s_num, dma);
  522. return NULL;
  523. }
  524. }
  525. for (bux_idx = 0; bux_idx < dma_buf_count; bux_idx++) {
  526. dma->desc[bux_idx]->owner = 1;
  527. dma->desc[bux_idx]->eof = 1;
  528. dma->desc[bux_idx]->sosf = 0;
  529. dma->desc[bux_idx]->length = dma_buf_len * sample_size;
  530. dma->desc[bux_idx]->size = dma_buf_len * sample_size;
  531. dma->desc[bux_idx]->buf = (uint8_t *) dma->buf[bux_idx];
  532. dma->desc[bux_idx]->offset = 0;
  533. dma->desc[bux_idx]->empty = (uint32_t)((bux_idx < (dma_buf_count - 1)) ? (dma->desc[bux_idx + 1]) : dma->desc[0]);
  534. }
  535. dma->queue = xQueueCreate(dma_buf_count - 1, sizeof(char *));
  536. dma->mux = xSemaphoreCreateMutex();
  537. dma->buf_size = dma_buf_len * sample_size;
  538. ESP_LOGI(TAG, "DMA Malloc info, datalen=blocksize=%d, dma_buf_count=%d", dma_buf_len * sample_size, dma_buf_count);
  539. return dma;
  540. }
  541. esp_err_t i2s_zero_dma_buffer(i2s_port_t i2s_num)
  542. {
  543. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  544. if (p_i2s[i2s_num]->rx && p_i2s[i2s_num]->rx->buf != NULL && p_i2s[i2s_num]->rx->buf_size != 0) {
  545. for (int i = 0; i < p_i2s[i2s_num]->dma_buf_count; i++) {
  546. memset(p_i2s[i2s_num]->rx->buf[i], 0, p_i2s[i2s_num]->rx->buf_size);
  547. }
  548. }
  549. if (p_i2s[i2s_num]->tx && p_i2s[i2s_num]->tx->buf != NULL && p_i2s[i2s_num]->tx->buf_size != 0) {
  550. int bytes_left = 0;
  551. bytes_left = (p_i2s[i2s_num]->tx->buf_size - p_i2s[i2s_num]->tx->rw_pos) % 4;
  552. if (bytes_left) {
  553. size_t zero_bytes = 0, bytes_written;
  554. i2s_write(i2s_num, (void *)&zero_bytes, bytes_left, &bytes_written, portMAX_DELAY);
  555. }
  556. for (int i = 0; i < p_i2s[i2s_num]->dma_buf_count; i++) {
  557. memset(p_i2s[i2s_num]->tx->buf[i], 0, p_i2s[i2s_num]->tx->buf_size);
  558. }
  559. }
  560. return ESP_OK;
  561. }
  562. /**************************************************************
  563. * I2S clock operation *
  564. * - i2s_get_clk *
  565. * - i2s_apll_get_fi2s *
  566. * - i2s_apll_calculate_fi2s *
  567. * - i2s_fbclk_cal *
  568. **************************************************************/
  569. float i2s_get_clk(i2s_port_t i2s_num)
  570. {
  571. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  572. return (float)p_i2s[i2s_num]->sample_rate;
  573. }
  574. #if SOC_I2S_SUPPORTS_APLL
  575. static float i2s_apll_get_fi2s(int bits_per_sample, int sdm0, int sdm1, int sdm2, int odir)
  576. {
  577. int f_xtal = (int)rtc_clk_xtal_freq_get() * 1000000;
  578. #if CONFIG_IDF_TARGET_ESP32
  579. /* ESP32 rev0 silicon issue for APLL range/accuracy, please see ESP32 ECO document for more information on this */
  580. if (esp_efuse_get_chip_ver() == 0) {
  581. sdm0 = 0;
  582. sdm1 = 0;
  583. }
  584. #endif
  585. float fout = f_xtal * (sdm2 + sdm1 / 256.0f + sdm0 / 65536.0f + 4);
  586. if (fout < SOC_I2S_APLL_MIN_FREQ || fout > SOC_I2S_APLL_MAX_FREQ) {
  587. return SOC_I2S_APLL_MAX_FREQ;
  588. }
  589. float fpll = fout / (2 * (odir + 2)); //== fi2s (N=1, b=0, a=1)
  590. return fpll / 2;
  591. }
  592. /**
  593. * @brief APLL calculate function, was described by following:
  594. * APLL Output frequency is given by the formula:
  595. *
  596. * apll_freq = xtal_freq * (4 + sdm2 + sdm1/256 + sdm0/65536)/((o_div + 2) * 2)
  597. * apll_freq = fout / ((o_div + 2) * 2)
  598. *
  599. * The dividend in this expression should be in the range of 240 - 600 MHz.
  600. * In rev. 0 of ESP32, sdm0 and sdm1 are unused and always set to 0.
  601. * * sdm0 frequency adjustment parameter, 0..255
  602. * * sdm1 frequency adjustment parameter, 0..255
  603. * * sdm2 frequency adjustment parameter, 0..63
  604. * * o_div frequency divider, 0..31
  605. *
  606. * The most accurate way to find the sdm0..2 and odir parameters is to loop through them all,
  607. * then apply the above formula, finding the closest frequency to the desired one.
  608. * But 256*256*64*32 = 134.217.728 loops are too slow with ESP32
  609. * 1. We will choose the parameters with the highest level of change,
  610. * With 350MHz<fout<500MHz, we limit the sdm2 from 4 to 9,
  611. * Take average frequency close to the desired frequency, and select sdm2
  612. * 2. Next, we look for sequences of less influential and more detailed parameters,
  613. * also by taking the average of the largest and smallest frequencies closer to the desired frequency.
  614. * 3. And finally, loop through all the most detailed of the parameters, finding the best desired frequency
  615. *
  616. * @param[in] rate The I2S Frequency (MCLK)
  617. * @param[in] bits_per_sample The bits per sample
  618. * @param[out] sdm0 The sdm 0
  619. * @param[out] sdm1 The sdm 1
  620. * @param[out] sdm2 The sdm 2
  621. * @param[out] odir The odir
  622. *
  623. * @return ESP_ERR_INVALID_ARG or ESP_OK
  624. */
  625. static esp_err_t i2s_apll_calculate_fi2s(int rate, int bits_per_sample, int *sdm0, int *sdm1, int *sdm2, int *odir)
  626. {
  627. int _odir, _sdm0, _sdm1, _sdm2;
  628. float avg;
  629. float min_rate, max_rate, min_diff;
  630. if (rate / bits_per_sample / 2 / 8 < SOC_I2S_APLL_MIN_RATE) {
  631. return ESP_ERR_INVALID_ARG;
  632. }
  633. *sdm0 = 0;
  634. *sdm1 = 0;
  635. *sdm2 = 0;
  636. *odir = 0;
  637. min_diff = SOC_I2S_APLL_MAX_FREQ;
  638. for (_sdm2 = 4; _sdm2 < 9; _sdm2 ++) {
  639. max_rate = i2s_apll_get_fi2s(bits_per_sample, 255, 255, _sdm2, 0);
  640. min_rate = i2s_apll_get_fi2s(bits_per_sample, 0, 0, _sdm2, 31);
  641. avg = (max_rate + min_rate) / 2;
  642. if (abs(avg - rate) < min_diff) {
  643. min_diff = abs(avg - rate);
  644. *sdm2 = _sdm2;
  645. }
  646. }
  647. min_diff = SOC_I2S_APLL_MAX_FREQ;
  648. for (_odir = 0; _odir < 32; _odir ++) {
  649. max_rate = i2s_apll_get_fi2s(bits_per_sample, 255, 255, *sdm2, _odir);
  650. min_rate = i2s_apll_get_fi2s(bits_per_sample, 0, 0, *sdm2, _odir);
  651. avg = (max_rate + min_rate) / 2;
  652. if (abs(avg - rate) < min_diff) {
  653. min_diff = abs(avg - rate);
  654. *odir = _odir;
  655. }
  656. }
  657. min_diff = SOC_I2S_APLL_MAX_FREQ;
  658. for (_sdm2 = 4; _sdm2 < 9; _sdm2 ++) {
  659. max_rate = i2s_apll_get_fi2s(bits_per_sample, 255, 255, _sdm2, *odir);
  660. min_rate = i2s_apll_get_fi2s(bits_per_sample, 0, 0, _sdm2, *odir);
  661. avg = (max_rate + min_rate) / 2;
  662. if (abs(avg - rate) < min_diff) {
  663. min_diff = abs(avg - rate);
  664. *sdm2 = _sdm2;
  665. }
  666. }
  667. min_diff = SOC_I2S_APLL_MAX_FREQ;
  668. for (_sdm1 = 0; _sdm1 < 256; _sdm1 ++) {
  669. max_rate = i2s_apll_get_fi2s(bits_per_sample, 255, _sdm1, *sdm2, *odir);
  670. min_rate = i2s_apll_get_fi2s(bits_per_sample, 0, _sdm1, *sdm2, *odir);
  671. avg = (max_rate + min_rate) / 2;
  672. if (abs(avg - rate) < min_diff) {
  673. min_diff = abs(avg - rate);
  674. *sdm1 = _sdm1;
  675. }
  676. }
  677. min_diff = SOC_I2S_APLL_MAX_FREQ;
  678. for (_sdm0 = 0; _sdm0 < 256; _sdm0 ++) {
  679. avg = i2s_apll_get_fi2s(bits_per_sample, _sdm0, *sdm1, *sdm2, *odir);
  680. if (abs(avg - rate) < min_diff) {
  681. min_diff = abs(avg - rate);
  682. *sdm0 = _sdm0;
  683. }
  684. }
  685. return ESP_OK;
  686. }
  687. #endif
  688. static esp_err_t i2s_fbclk_cal(int i2s_num, uint32_t rate, int channel, int channel_bit, uint32_t *sclk, uint32_t *fbck, uint32_t *bck_div)
  689. {
  690. //Default select I2S_D2CLK (160M)
  691. uint32_t _sclk = I2S_LL_BASE_CLK;
  692. uint32_t _fbck = rate * channel * channel_bit;
  693. i2s_mclk_multiple_t multi = p_i2s[i2s_num]->mclk_multiple ? p_i2s[i2s_num]->mclk_multiple : I2S_MCLK_MULTIPLE_256;
  694. uint32_t _bck_div = rate * multi / _fbck;
  695. i2s_clock_src_t clk_src = I2S_CLK_D2CLK;
  696. //ADC mode only support on ESP32,
  697. #if SOC_I2S_SUPPORTS_ADC_DAC
  698. if ( p_i2s[i2s_num]->mode & (I2S_MODE_DAC_BUILT_IN | I2S_MODE_ADC_BUILT_IN)) {
  699. _fbck = rate * I2S_LL_AD_BCK_FACTOR * 2;
  700. _bck_div = I2S_LL_AD_BCK_FACTOR;
  701. }
  702. #endif // SOC_I2S_SUPPORTS_ADC_DAC
  703. if ( p_i2s[i2s_num]->mode & I2S_MODE_PDM) {
  704. #if SOC_I2S_SUPPORTS_PDM_TX
  705. if ( p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  706. int fp = i2s_hal_get_tx_pdm_fp(&(p_i2s[i2s_num]->hal));
  707. int fs = i2s_hal_get_tx_pdm_fs(&(p_i2s[i2s_num]->hal));
  708. _fbck = rate * I2S_LL_PDM_BCK_FACTOR * fp / fs;
  709. }
  710. #endif //SOC_I2S_SUPPORTS_PDM_TX
  711. #if SOC_I2S_SUPPORTS_PDM_RX
  712. if ( p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  713. i2s_pdm_dsr_t dsr;
  714. i2s_hal_get_rx_pdm_dsr(&(p_i2s[i2s_num]->hal), &dsr);
  715. _fbck = rate * I2S_LL_PDM_BCK_FACTOR * (dsr == I2S_PDM_DSR_16S ? 2 : 1);
  716. }
  717. #endif // SOC_I2S_SUPPORTS_PDM_RX
  718. _bck_div = 8;
  719. }
  720. #if SOC_I2S_SUPPORTS_APLL
  721. int sdm0 = 0;
  722. int sdm1 = 0;
  723. int sdm2 = 0;
  724. int odir = 0;
  725. //If APLL is specified, try to calculate in APLL
  726. if (p_i2s[i2s_num]->use_apll && i2s_apll_calculate_fi2s(p_i2s[i2s_num]->fixed_mclk, channel_bit, &sdm0, &sdm1, &sdm2, &odir) == ESP_OK) {
  727. _sclk = p_i2s[i2s_num]->fixed_mclk;
  728. clk_src = I2S_CLK_APLL;
  729. ESP_LOGD(TAG, "sdm0=%d, sdm1=%d, sdm2=%d, odir=%d", sdm0, sdm1, sdm2, odir);
  730. rtc_clk_apll_enable(1, sdm0, sdm1, sdm2, odir);
  731. }
  732. #endif // SOC_I2S_SUPPORTS_APLL
  733. if ((_fbck * _bck_div) > _sclk) {
  734. ESP_LOGE(TAG, "sample rate is too large\r\n");
  735. return ESP_ERR_INVALID_ARG;
  736. }
  737. i2s_hal_set_clock_src(&(p_i2s[i2s_num]->hal), clk_src);
  738. *sclk = _sclk;
  739. *fbck = _fbck;
  740. *bck_div = _bck_div;
  741. return ESP_OK;
  742. }
  743. /**************************************************************
  744. * I2S configuration *
  745. * - i2s_get_active_chan_num *
  746. * - i2s_set_dac_mode *
  747. * - _i2s_adc_mode_recover *
  748. * - i2s_set_adc_mode *
  749. * - i2s_adc_enable *
  750. * - i2s_adc_disable *
  751. * - i2s_set_sample_rates *
  752. * - i2s_pcm_config *
  753. * - i2s_set_pdm_rx_down_sample *
  754. * - i2s_set_pdm_tx_up_sample *
  755. * - i2s_check_cfg_static *
  756. * - i2s_param_config *
  757. * - i2s_set_clk *
  758. * - i2s_set_mode *
  759. **************************************************************/
  760. static uint32_t i2s_get_active_chan_num(i2s_hal_config_t *hal_cfg)
  761. {
  762. switch (hal_cfg->chan_fmt) {
  763. case I2S_CHANNEL_FMT_RIGHT_LEFT: //fall through
  764. case I2S_CHANNEL_FMT_ALL_RIGHT: //fall through
  765. case I2S_CHANNEL_FMT_ALL_LEFT:
  766. return 2;
  767. case I2S_CHANNEL_FMT_ONLY_RIGHT: //fall through
  768. case I2S_CHANNEL_FMT_ONLY_LEFT:
  769. return 1;
  770. #if SOC_I2S_SUPPORTS_TDM
  771. case I2S_CHANNEL_FMT_MULTIPLE: {
  772. uint32_t num = 0;
  773. uint32_t max_chan = 0;
  774. uint32_t chan_mask = hal_cfg->chan_mask;
  775. for (int i = 0; chan_mask && i < 16; i++, chan_mask >>= 1) {
  776. if ((chan_mask & 0x01) == 1) {
  777. num++;
  778. max_chan = i + 1;
  779. }
  780. }
  781. if (max_chan > hal_cfg->total_chan) {
  782. hal_cfg->total_chan = max_chan;
  783. }
  784. return num;
  785. }
  786. #endif
  787. default:
  788. return 0;
  789. }
  790. }
  791. #if SOC_I2S_SUPPORTS_ADC_DAC
  792. esp_err_t i2s_set_dac_mode(i2s_dac_mode_t dac_mode)
  793. {
  794. ESP_RETURN_ON_FALSE((dac_mode < I2S_DAC_CHANNEL_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s dac mode error");
  795. if (dac_mode == I2S_DAC_CHANNEL_DISABLE) {
  796. dac_output_disable(DAC_CHANNEL_1);
  797. dac_output_disable(DAC_CHANNEL_2);
  798. dac_i2s_disable();
  799. } else {
  800. dac_i2s_enable();
  801. }
  802. if (dac_mode & I2S_DAC_CHANNEL_RIGHT_EN) {
  803. //DAC1, right channel
  804. dac_output_enable(DAC_CHANNEL_1);
  805. }
  806. if (dac_mode & I2S_DAC_CHANNEL_LEFT_EN) {
  807. //DAC2, left channel
  808. dac_output_enable(DAC_CHANNEL_2);
  809. }
  810. return ESP_OK;
  811. }
  812. static esp_err_t _i2s_adc_mode_recover(void)
  813. {
  814. ESP_RETURN_ON_FALSE(((_i2s_adc_unit != -1) && (_i2s_adc_channel != -1)), ESP_ERR_INVALID_ARG, TAG, "i2s ADC recover error, not initialized...");
  815. return adc_i2s_mode_init(_i2s_adc_unit, _i2s_adc_channel);
  816. }
  817. esp_err_t i2s_set_adc_mode(adc_unit_t adc_unit, adc1_channel_t adc_channel)
  818. {
  819. ESP_RETURN_ON_FALSE((adc_unit < ADC_UNIT_2), ESP_ERR_INVALID_ARG, TAG, "i2s ADC unit error, only support ADC1 for now");
  820. // For now, we only support SAR ADC1.
  821. _i2s_adc_unit = adc_unit;
  822. _i2s_adc_channel = adc_channel;
  823. return adc_i2s_mode_init(adc_unit, adc_channel);
  824. }
  825. esp_err_t i2s_adc_enable(i2s_port_t i2s_num)
  826. {
  827. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  828. ESP_RETURN_ON_FALSE((p_i2s[i2s_num] != NULL), ESP_ERR_INVALID_STATE, TAG, "Not initialized yet");
  829. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->mode & I2S_MODE_ADC_BUILT_IN), ESP_ERR_INVALID_STATE, TAG, "i2s built-in adc not enabled");
  830. adc1_dma_mode_acquire();
  831. _i2s_adc_mode_recover();
  832. i2s_rx_reset(i2s_num);
  833. return i2s_set_clk(i2s_num, p_i2s[i2s_num]->sample_rate, p_i2s[i2s_num]->bits_per_sample, p_i2s[i2s_num]->channel_num);
  834. }
  835. esp_err_t i2s_adc_disable(i2s_port_t i2s_num)
  836. {
  837. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  838. ESP_RETURN_ON_FALSE((p_i2s[i2s_num] != NULL), ESP_ERR_INVALID_STATE, TAG, "Not initialized yet");
  839. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->mode & I2S_MODE_ADC_BUILT_IN), ESP_ERR_INVALID_STATE, TAG, "i2s built-in adc not enabled");
  840. i2s_hal_stop_rx(&(p_i2s[i2s_num]->hal));
  841. adc1_lock_release();
  842. return ESP_OK;
  843. }
  844. #endif
  845. esp_err_t i2s_set_sample_rates(i2s_port_t i2s_num, uint32_t rate)
  846. {
  847. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  848. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->bytes_per_sample > 0), ESP_ERR_INVALID_ARG, TAG, "bits_per_sample not set");
  849. return i2s_set_clk(i2s_num, rate, p_i2s[i2s_num]->bits_per_sample, p_i2s[i2s_num]->channel_num);
  850. }
  851. #if SOC_I2S_SUPPORTS_PCM
  852. esp_err_t i2s_pcm_config(i2s_port_t i2s_num, const i2s_pcm_cfg_t *pcm_cfg)
  853. {
  854. ESP_RETURN_ON_FALSE(p_i2s[i2s_num], ESP_FAIL, TAG, "i2s has not installed yet");
  855. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->communication_format & I2S_COMM_FORMAT_STAND_PCM_SHORT),
  856. ESP_ERR_INVALID_ARG, TAG, "i2s communication mode is not PCM mode");
  857. i2s_stop(i2s_num);
  858. I2S_ENTER_CRITICAL(i2s_num);
  859. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  860. i2s_hal_tx_pcm_cfg(&(p_i2s[i2s_num]->hal), pcm_cfg->pcm_type);
  861. } else if(p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  862. i2s_hal_rx_pcm_cfg(&(p_i2s[i2s_num]->hal), pcm_cfg->pcm_type);
  863. }
  864. I2S_EXIT_CRITICAL(i2s_num);
  865. i2s_start(i2s_num);
  866. return ESP_OK;
  867. }
  868. #endif
  869. #if SOC_I2S_SUPPORTS_PDM_RX
  870. esp_err_t i2s_set_pdm_rx_down_sample(i2s_port_t i2s_num, i2s_pdm_dsr_t downsample)
  871. {
  872. ESP_RETURN_ON_FALSE(p_i2s[i2s_num], ESP_FAIL, TAG, "i2s has not installed yet");
  873. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->mode & I2S_MODE_PDM), ESP_ERR_INVALID_ARG, TAG, "i2s mode is not PDM mode");
  874. i2s_stop(i2s_num);
  875. i2s_hal_set_rx_pdm_dsr(&(p_i2s[i2s_num]->hal), downsample);
  876. // i2s will start in 'i2s_set_clk'
  877. return i2s_set_clk(i2s_num, p_i2s[i2s_num]->sample_rate, p_i2s[i2s_num]->bits_per_sample, p_i2s[i2s_num]->channel_num);
  878. }
  879. #endif
  880. #if SOC_I2S_SUPPORTS_PDM_TX
  881. esp_err_t i2s_set_pdm_tx_up_sample(i2s_port_t i2s_num, const i2s_pdm_tx_upsample_cfg_t *upsample_cfg)
  882. {
  883. ESP_RETURN_ON_FALSE(p_i2s[i2s_num], ESP_FAIL, TAG, "i2s has not installed yet");
  884. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->mode & I2S_MODE_PDM), ESP_ERR_INVALID_ARG, TAG, "i2s mode is not PDM mode");
  885. i2s_stop(i2s_num);
  886. i2s_hal_set_tx_pdm_fpfs(&(p_i2s[i2s_num]->hal), upsample_cfg->fp, upsample_cfg->fs);
  887. // i2s will start in 'i2s_set_clk'
  888. return i2s_set_clk(i2s_num, upsample_cfg->sample_rate, p_i2s[i2s_num]->bits_per_sample, p_i2s[i2s_num]->channel_num);
  889. }
  890. #endif
  891. static esp_err_t i2s_check_cfg_static(i2s_port_t i2s_num)
  892. {
  893. i2s_hal_config_t *cfg = &p_i2s[i2s_num]->hal_cfg;
  894. #if SOC_I2S_SUPPORTS_ADC_DAC
  895. //We only check if the I2S number is invalid when set to build in ADC and DAC mode.
  896. ESP_RETURN_ON_FALSE(!((cfg->mode & I2S_MODE_ADC_BUILT_IN) && (i2s_num != I2S_NUM_0)), ESP_ERR_INVALID_ARG, TAG, "I2S ADC built-in only support on I2S0");
  897. ESP_RETURN_ON_FALSE(!((cfg->mode & I2S_MODE_DAC_BUILT_IN) && (i2s_num != I2S_NUM_0)), ESP_ERR_INVALID_ARG, TAG, "I2S DAC built-in only support on I2S0");
  898. return ESP_OK;
  899. #endif
  900. //We only check if the I2S number is invalid when set to PDM mode.
  901. ESP_RETURN_ON_FALSE(!((cfg->mode & I2S_MODE_PDM) && (i2s_num != I2S_NUM_0)), ESP_ERR_INVALID_ARG, TAG, "I2S DAC PDM only support on I2S0");
  902. return ESP_OK;
  903. ESP_RETURN_ON_FALSE(cfg->comm_fmt && (cfg->comm_fmt < I2S_COMM_FORMAT_STAND_MAX), ESP_ERR_INVALID_ARG, TAG, "invalid communication formats");
  904. ESP_RETURN_ON_FALSE(!((cfg->comm_fmt & I2S_COMM_FORMAT_STAND_MSB) && (cfg->comm_fmt & I2S_COMM_FORMAT_STAND_PCM_LONG)), ESP_ERR_INVALID_ARG, TAG, "multiple communication formats specified");
  905. return ESP_OK;
  906. }
  907. static esp_err_t i2s_param_config(i2s_port_t i2s_num)
  908. {
  909. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  910. ESP_RETURN_ON_FALSE((i2s_check_cfg_static(i2s_num) == ESP_OK), ESP_ERR_INVALID_ARG, TAG, "param check error");
  911. i2s_hal_config_t *cfg = &p_i2s[i2s_num]->hal_cfg;
  912. p_i2s[i2s_num]->communication_format = cfg->comm_fmt;
  913. #if SOC_I2S_SUPPORTS_ADC_DAC
  914. if ((cfg->mode & I2S_MODE_DAC_BUILT_IN) || (cfg->mode & I2S_MODE_ADC_BUILT_IN)) {
  915. if (cfg->mode & I2S_MODE_DAC_BUILT_IN) {
  916. i2s_hal_enable_builtin_dac(&(p_i2s[i2s_num]->hal));
  917. }
  918. if (cfg->mode & I2S_MODE_ADC_BUILT_IN) {
  919. //in ADC built-in mode, we need to call i2s_set_adc_mode to
  920. //initialize the specific ADC channel.
  921. //in the current stage, we only support ADC1 and single channel mode.
  922. //In default data mode, the ADC data is in 12-bit resolution mode.
  923. adc_power_acquire();
  924. i2s_hal_enable_builtin_adc(&(p_i2s[i2s_num]->hal));
  925. }
  926. } else {
  927. i2s_hal_disable_builtin_dac(&(p_i2s[i2s_num]->hal));
  928. i2s_hal_disable_builtin_adc(&(p_i2s[i2s_num]->hal));
  929. #endif
  930. // configure I2S data port interface.
  931. i2s_hal_config_param(&(p_i2s[i2s_num]->hal), cfg);
  932. #if SOC_I2S_SUPPORTS_ADC_DAC
  933. }
  934. #endif
  935. if ((p_i2s[i2s_num]->mode & I2S_MODE_RX) && (p_i2s[i2s_num]->mode & I2S_MODE_TX)) {
  936. i2s_hal_enable_sig_loopback(&(p_i2s[i2s_num]->hal));
  937. if (p_i2s[i2s_num]->mode & I2S_MODE_MASTER) {
  938. i2s_hal_enable_master_fd_mode(&(p_i2s[i2s_num]->hal));
  939. } else {
  940. i2s_hal_enable_slave_fd_mode(&(p_i2s[i2s_num]->hal));
  941. }
  942. }
  943. return ESP_OK;
  944. }
  945. esp_err_t i2s_set_clk(i2s_port_t i2s_num, uint32_t rate, uint32_t bits_cfg, i2s_channel_t ch)
  946. {
  947. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  948. ESP_RETURN_ON_FALSE((p_i2s[i2s_num] != NULL), ESP_ERR_INVALID_ARG, TAG, "Not initialized yet");
  949. i2s_hal_config_t *cfg = &p_i2s[i2s_num]->hal_cfg;
  950. int data_bits = 0;
  951. int chan_bits = 0;
  952. int active_chan_num = 0;
  953. int chan_num = 0;
  954. cfg->ch = ch;
  955. cfg->sample_rate = rate;
  956. cfg->bits_cfg.val = bits_cfg;
  957. cfg->bits_cfg.chan_bits = cfg->bits_cfg.chan_bits < cfg->bits_cfg.sample_bits ?
  958. cfg->bits_cfg.sample_bits : cfg->bits_cfg.chan_bits;
  959. chan_bits = cfg->bits_cfg.chan_bits;
  960. data_bits = cfg->bits_cfg.sample_bits;
  961. #if SOC_I2S_SUPPORTS_TDM
  962. cfg->chan_mask = ch & 0xFFFF;
  963. active_chan_num = i2s_get_active_chan_num(cfg);
  964. chan_num = cfg->total_chan;
  965. #else
  966. active_chan_num = i2s_get_active_chan_num(cfg);
  967. chan_num = ch == I2S_CHANNEL_MONO ? 2 : active_chan_num;
  968. #endif
  969. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  970. if ((data_bits % 8 != 0) || (data_bits > I2S_BITS_PER_SAMPLE_32BIT)) {
  971. ESP_LOGE(TAG, "Invalid bits per sample");
  972. return ESP_ERR_INVALID_ARG;
  973. }
  974. //Stop I2S
  975. i2s_stop(i2s_num);
  976. // wait all on-going writing finish
  977. if ((p_i2s[i2s_num]->mode & I2S_MODE_TX) && p_i2s[i2s_num]->tx) {
  978. xSemaphoreTake(p_i2s[i2s_num]->tx->mux, (portTickType)portMAX_DELAY);
  979. }
  980. if ((p_i2s[i2s_num]->mode & I2S_MODE_RX) && p_i2s[i2s_num]->rx) {
  981. xSemaphoreTake(p_i2s[i2s_num]->rx->mux, (portTickType)portMAX_DELAY);
  982. }
  983. //malloc DMA buffer
  984. if (i2s_alloc_dma_buffer(i2s_num, data_bits, active_chan_num) != ESP_OK ) {
  985. return ESP_ERR_NO_MEM;
  986. }
  987. uint32_t i2s_clk = 0; // I2S source clock
  988. uint32_t i2s_bck = 0; // I2S back clock
  989. uint32_t bck_div = 0; // I2S bck div
  990. //calculate bck_div, f_bck and select source clock
  991. if (i2s_fbclk_cal(i2s_num, rate, chan_num, chan_bits, &i2s_clk, &i2s_bck, &bck_div) != ESP_OK) {
  992. return ESP_FAIL;
  993. }
  994. //configure i2s clock
  995. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  996. i2s_hal_tx_clock_config(&(p_i2s[i2s_num]->hal), i2s_clk, i2s_bck, bck_div);
  997. i2s_hal_set_tx_sample_bit(&(p_i2s[i2s_num]->hal), chan_bits, data_bits);
  998. // wait all writing on-going finish
  999. if (p_i2s[i2s_num]->tx) {
  1000. xSemaphoreGive(p_i2s[i2s_num]->tx->mux);
  1001. }
  1002. i2s_hal_tx_set_channel_style(&(p_i2s[i2s_num]->hal), &(p_i2s[i2s_num]->hal_cfg));
  1003. }
  1004. if (p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  1005. i2s_hal_rx_clock_config(&(p_i2s[i2s_num]->hal), i2s_clk, i2s_bck, bck_div);
  1006. i2s_hal_set_rx_sample_bit(&(p_i2s[i2s_num]->hal), chan_bits, data_bits);
  1007. // wait all writing on-going finish
  1008. if (p_i2s[i2s_num]->rx) {
  1009. xSemaphoreGive(p_i2s[i2s_num]->rx->mux);
  1010. }
  1011. i2s_hal_rx_set_channel_style(&(p_i2s[i2s_num]->hal), &(p_i2s[i2s_num]->hal_cfg));
  1012. }
  1013. // Reset message queue to avoid receiving unavailable values because the old dma queque has been destroyed
  1014. if (p_i2s[i2s_num]->tx) {
  1015. xQueueReset(p_i2s[i2s_num]->tx->queue);
  1016. }
  1017. if (p_i2s[i2s_num]->rx) {
  1018. xQueueReset(p_i2s[i2s_num]->rx->queue);
  1019. }
  1020. //I2S start
  1021. i2s_start(i2s_num);
  1022. p_i2s[i2s_num]->sample_rate = rate;
  1023. return ESP_OK;
  1024. }
  1025. /**************************************************************
  1026. * I2S driver operation *
  1027. * - i2s_start *
  1028. * - i2s_stop *
  1029. * - i2s_driver_install *
  1030. * - i2s_write *
  1031. * - i2s_write_expand *
  1032. * - i2s_read *
  1033. **************************************************************/
  1034. esp_err_t i2s_start(i2s_port_t i2s_num)
  1035. {
  1036. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  1037. //start DMA link
  1038. I2S_ENTER_CRITICAL(i2s_num);
  1039. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  1040. i2s_tx_reset(i2s_num);
  1041. i2s_tx_start(i2s_num);
  1042. }
  1043. if (p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  1044. i2s_rx_reset(i2s_num);
  1045. i2s_rx_start(i2s_num);
  1046. }
  1047. #if !SOC_GDMA_SUPPORTED
  1048. esp_intr_enable(p_i2s[i2s_num]->i2s_isr_handle);
  1049. #endif
  1050. I2S_EXIT_CRITICAL(i2s_num);
  1051. return ESP_OK;
  1052. }
  1053. esp_err_t i2s_stop(i2s_port_t i2s_num)
  1054. {
  1055. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  1056. I2S_ENTER_CRITICAL(i2s_num);
  1057. #if !SOC_GDMA_SUPPORTED
  1058. esp_intr_disable(p_i2s[i2s_num]->i2s_isr_handle);
  1059. #endif
  1060. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  1061. i2s_tx_stop(i2s_num);
  1062. }
  1063. if (p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  1064. i2s_rx_stop(i2s_num);
  1065. }
  1066. #if !SOC_GDMA_SUPPORTED
  1067. i2s_hal_clear_intr_status(&(p_i2s[i2s_num]->hal), I2S_INTR_MAX);
  1068. #endif
  1069. I2S_EXIT_CRITICAL(i2s_num);
  1070. return ESP_OK;
  1071. }
  1072. esp_err_t i2s_driver_install(i2s_port_t i2s_num, const i2s_config_t *i2s_config, int queue_size, void *i2s_queue)
  1073. {
  1074. esp_err_t ret = ESP_FAIL;
  1075. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  1076. ESP_RETURN_ON_FALSE((i2s_config != NULL), ESP_ERR_INVALID_ARG, TAG, "I2S configuration must not NULL");
  1077. ESP_RETURN_ON_FALSE((i2s_config->dma_buf_count >= 2 && i2s_config->dma_buf_count <= 128), ESP_ERR_INVALID_ARG, TAG, "I2S buffer count less than 128 and more than 2");
  1078. ESP_RETURN_ON_FALSE((i2s_config->dma_buf_len >= 8 && i2s_config->dma_buf_len <= 1024), ESP_ERR_INVALID_ARG, TAG, "I2S buffer length at most 1024 and more than 8");
  1079. if (p_i2s[i2s_num] != NULL) {
  1080. ESP_LOGW(TAG, "I2S driver already installed");
  1081. return ESP_OK;
  1082. }
  1083. p_i2s[i2s_num] = (i2s_obj_t *) calloc(1, sizeof(i2s_obj_t));
  1084. if (p_i2s[i2s_num] == NULL) {
  1085. ESP_LOGE(TAG, "Malloc I2S driver error");
  1086. return ESP_ERR_NO_MEM;
  1087. }
  1088. portMUX_TYPE i2s_spinlock_unlocked[1] = {portMUX_INITIALIZER_UNLOCKED};
  1089. for (int x = 0; x < I2S_NUM_MAX; x++) {
  1090. i2s_spinlock[x] = i2s_spinlock_unlocked[0];
  1091. }
  1092. //To make sure hardware is enabled before any hardware register operations.
  1093. periph_module_enable(i2s_periph_signal[i2s_num].module);
  1094. i2s_hal_init(&(p_i2s[i2s_num]->hal), i2s_num);
  1095. // Set I2S HAL configurations
  1096. p_i2s[i2s_num]->hal_cfg.mode = i2s_config->mode;
  1097. p_i2s[i2s_num]->hal_cfg.sample_rate = i2s_config->sample_rate;
  1098. p_i2s[i2s_num]->hal_cfg.comm_fmt = i2s_config->communication_format;
  1099. p_i2s[i2s_num]->hal_cfg.chan_fmt = i2s_config->channel_format;
  1100. p_i2s[i2s_num]->hal_cfg.bits_cfg.sample_bits = i2s_config->bits_per_sample;
  1101. p_i2s[i2s_num]->hal_cfg.bits_cfg.chan_bits = i2s_config->bits_per_chan;
  1102. #if SOC_I2S_SUPPORTS_TDM
  1103. int active_chan = 0;
  1104. switch (i2s_config->channel_format) {
  1105. case I2S_CHANNEL_FMT_RIGHT_LEFT:
  1106. case I2S_CHANNEL_FMT_ALL_RIGHT:
  1107. case I2S_CHANNEL_FMT_ALL_LEFT:
  1108. p_i2s[i2s_num]->hal_cfg.chan_mask = I2S_TDM_ACTIVE_CH0 | I2S_TDM_ACTIVE_CH1;
  1109. p_i2s[i2s_num]->hal_cfg.total_chan = 2;
  1110. active_chan = 2;
  1111. break;
  1112. case I2S_CHANNEL_FMT_ONLY_RIGHT:
  1113. p_i2s[i2s_num]->hal_cfg.chan_mask = i2s_config->left_align ? I2S_TDM_ACTIVE_CH1 : I2S_TDM_ACTIVE_CH0;
  1114. p_i2s[i2s_num]->hal_cfg.total_chan = 1;
  1115. active_chan = 1;
  1116. break;
  1117. case I2S_CHANNEL_FMT_ONLY_LEFT:
  1118. p_i2s[i2s_num]->hal_cfg.chan_mask = i2s_config->left_align ? I2S_TDM_ACTIVE_CH0 : I2S_TDM_ACTIVE_CH1;
  1119. p_i2s[i2s_num]->hal_cfg.total_chan = 1;
  1120. active_chan = 1;
  1121. break;
  1122. case I2S_CHANNEL_FMT_MULTIPLE:
  1123. ESP_RETURN_ON_FALSE((i2s_config->chan_mask != 0), ESP_ERR_INVALID_ARG, TAG, "i2s all channel are disabled");
  1124. p_i2s[i2s_num]->hal_cfg.chan_mask = i2s_config->chan_mask;
  1125. i2s_get_active_chan_num(&p_i2s[i2s_num]->hal_cfg);
  1126. break;
  1127. default:
  1128. ESP_LOGE(TAG, "wrong i2s channel format, uninstalled i2s.");
  1129. goto err;
  1130. }
  1131. p_i2s[i2s_num]->hal_cfg.left_align = i2s_config->left_align;
  1132. p_i2s[i2s_num]->hal_cfg.big_edin = i2s_config->big_edin;
  1133. p_i2s[i2s_num]->hal_cfg.bit_order_msb = i2s_config->bit_order_msb;
  1134. p_i2s[i2s_num]->hal_cfg.skip_msk = i2s_config->skip_msk;
  1135. #endif
  1136. // Set I2S driver configurations
  1137. p_i2s[i2s_num]->i2s_num = i2s_num;
  1138. p_i2s[i2s_num]->mode = i2s_config->mode;
  1139. p_i2s[i2s_num]->channel_num = i2s_get_active_chan_num(&p_i2s[i2s_num]->hal_cfg);
  1140. p_i2s[i2s_num]->i2s_queue = i2s_queue;
  1141. p_i2s[i2s_num]->bits_per_sample = 0;
  1142. p_i2s[i2s_num]->bytes_per_sample = 0; // Not initialized yet
  1143. p_i2s[i2s_num]->dma_buf_count = i2s_config->dma_buf_count;
  1144. p_i2s[i2s_num]->dma_buf_len = i2s_config->dma_buf_len;
  1145. p_i2s[i2s_num]->mclk_multiple = i2s_config->mclk_multiple;
  1146. #ifdef CONFIG_PM_ENABLE
  1147. #if SOC_I2S_SUPPORTS_APLL
  1148. if (i2s_config->use_apll) {
  1149. ret = esp_pm_lock_create(ESP_PM_NO_LIGHT_SLEEP, 0, "i2s_driver", &p_i2s[i2s_num]->pm_lock);
  1150. } else
  1151. #endif // SOC_I2S_SUPPORTS_APLL
  1152. {
  1153. ret = esp_pm_lock_create(ESP_PM_APB_FREQ_MAX, 0, "i2s_driver", &p_i2s[i2s_num]->pm_lock);
  1154. }
  1155. if (ret != ESP_OK) {
  1156. free(p_i2s[i2s_num]);
  1157. p_i2s[i2s_num] = NULL;
  1158. ESP_LOGE(TAG, "I2S pm lock error");
  1159. return ret;
  1160. }
  1161. #endif //CONFIG_PM_ENABLE
  1162. #if SOC_GDMA_SUPPORTED
  1163. ret = ESP_OK;
  1164. gdma_trigger_t trig = {.periph = GDMA_TRIG_PERIPH_I2S};
  1165. #if SOC_I2S_NUM > 1
  1166. trig.instance_id = (i2s_num == I2S_NUM_0) ? SOC_GDMA_TRIG_PERIPH_I2S0 : SOC_GDMA_TRIG_PERIPH_I2S1;
  1167. #else
  1168. trig.instance_id = SOC_GDMA_TRIG_PERIPH_I2S0;
  1169. #endif
  1170. gdma_channel_alloc_config_t dma_cfg = {.flags.reserve_sibling = 1};
  1171. if ( p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  1172. dma_cfg.direction = GDMA_CHANNEL_DIRECTION_RX;
  1173. ESP_GOTO_ON_ERROR(gdma_new_channel(&dma_cfg, &p_i2s[i2s_num]->rx_dma_chan), err, TAG, "Register rx dma channel error");
  1174. ESP_GOTO_ON_ERROR(gdma_connect(p_i2s[i2s_num]->rx_dma_chan, trig), err, TAG, "Connect rx dma channel error");
  1175. gdma_rx_event_callbacks_t cb = {.on_recv_eof = i2s_dma_rx_callback};
  1176. gdma_register_rx_event_callbacks(p_i2s[i2s_num]->rx_dma_chan, &cb, p_i2s[i2s_num]);
  1177. }
  1178. if ( p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  1179. dma_cfg.direction = GDMA_CHANNEL_DIRECTION_TX;
  1180. ESP_GOTO_ON_ERROR(gdma_new_channel(&dma_cfg, &p_i2s[i2s_num]->tx_dma_chan), err, TAG, "Register tx dma channel error");
  1181. ESP_GOTO_ON_ERROR(gdma_connect(p_i2s[i2s_num]->tx_dma_chan, trig), err, TAG, "Connect tx dma channel error");
  1182. gdma_tx_event_callbacks_t cb = {.on_trans_eof = i2s_dma_tx_callback};
  1183. gdma_register_tx_event_callbacks(p_i2s[i2s_num]->tx_dma_chan, &cb, p_i2s[i2s_num]);
  1184. }
  1185. #else
  1186. //initial interrupt
  1187. ret = esp_intr_alloc(i2s_periph_signal[i2s_num].irq, i2s_config->intr_alloc_flags, i2s_intr_handler_default, p_i2s[i2s_num], &p_i2s[i2s_num]->i2s_isr_handle);
  1188. ESP_GOTO_ON_ERROR(ret, err, TAG, "Register I2S Interrupt error");
  1189. #endif // SOC_GDMA_SUPPORTED
  1190. i2s_stop(i2s_num);
  1191. p_i2s[i2s_num]->use_apll = i2s_config->use_apll;
  1192. p_i2s[i2s_num]->fixed_mclk = i2s_config->fixed_mclk;
  1193. p_i2s[i2s_num]->tx_desc_auto_clear = i2s_config->tx_desc_auto_clear;
  1194. ret = i2s_param_config(i2s_num);
  1195. ESP_GOTO_ON_ERROR(ret, err, TAG, "I2S param configure error");
  1196. if (i2s_queue) {
  1197. p_i2s[i2s_num]->i2s_queue = xQueueCreate(queue_size, sizeof(i2s_event_t));
  1198. ESP_GOTO_ON_ERROR((p_i2s[i2s_num]->i2s_queue != NULL), err, TAG, "I2S queue create failed");
  1199. *((QueueHandle_t *) i2s_queue) = p_i2s[i2s_num]->i2s_queue;
  1200. ESP_LOGI(TAG, "queue free spaces: %d", uxQueueSpacesAvailable(p_i2s[i2s_num]->i2s_queue));
  1201. } else {
  1202. p_i2s[i2s_num]->i2s_queue = NULL;
  1203. }
  1204. //set clock and start
  1205. #if SOC_I2S_SUPPORTS_TDM
  1206. ret = i2s_set_clk(i2s_num, i2s_config->sample_rate,
  1207. p_i2s[i2s_num]->hal_cfg.bits_cfg.val,
  1208. (i2s_channel_t)active_chan);
  1209. #else
  1210. ret = i2s_set_clk(i2s_num, i2s_config->sample_rate,
  1211. p_i2s[i2s_num]->hal_cfg.bits_cfg.val,
  1212. I2S_CHANNEL_STEREO);
  1213. #endif
  1214. ESP_GOTO_ON_ERROR(ret, err, TAG, "I2S set clock failed");
  1215. return ret;
  1216. err:
  1217. #ifdef CONFIG_PM_ENABLE
  1218. if (p_i2s[i2s_num]->pm_lock) {
  1219. esp_pm_lock_delete(p_i2s[i2s_num]->pm_lock);
  1220. }
  1221. #endif
  1222. i2s_driver_uninstall(i2s_num);
  1223. return ret;
  1224. }
  1225. esp_err_t i2s_driver_uninstall(i2s_port_t i2s_num)
  1226. {
  1227. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  1228. if (p_i2s[i2s_num] == NULL) {
  1229. ESP_LOGI(TAG, "already uninstalled");
  1230. return ESP_OK;
  1231. }
  1232. i2s_stop(i2s_num);
  1233. #if SOC_I2S_SUPPORTS_ADC_DAC
  1234. i2s_set_dac_mode(I2S_DAC_CHANNEL_DISABLE);
  1235. #endif
  1236. #if SOC_GDMA_SUPPORTED
  1237. if (p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  1238. gdma_disconnect(p_i2s[i2s_num]->tx_dma_chan);
  1239. gdma_del_channel(p_i2s[i2s_num]->tx_dma_chan);
  1240. }
  1241. if (p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  1242. gdma_disconnect(p_i2s[i2s_num]->rx_dma_chan);
  1243. gdma_del_channel(p_i2s[i2s_num]->rx_dma_chan);
  1244. }
  1245. #else
  1246. esp_intr_free(p_i2s[i2s_num]->i2s_isr_handle);
  1247. #endif
  1248. if (p_i2s[i2s_num]->tx != NULL && p_i2s[i2s_num]->mode & I2S_MODE_TX) {
  1249. i2s_destroy_dma_queue(i2s_num, p_i2s[i2s_num]->tx);
  1250. p_i2s[i2s_num]->tx = NULL;
  1251. }
  1252. if (p_i2s[i2s_num]->rx != NULL && p_i2s[i2s_num]->mode & I2S_MODE_RX) {
  1253. i2s_destroy_dma_queue(i2s_num, p_i2s[i2s_num]->rx);
  1254. p_i2s[i2s_num]->rx = NULL;
  1255. }
  1256. if (p_i2s[i2s_num]->i2s_queue) {
  1257. vQueueDelete(p_i2s[i2s_num]->i2s_queue);
  1258. p_i2s[i2s_num]->i2s_queue = NULL;
  1259. }
  1260. #if SOC_I2S_SUPPORTS_APLL
  1261. if (p_i2s[i2s_num]->use_apll) {
  1262. // switch back to PLL clock source
  1263. i2s_hal_set_clock_src(&(p_i2s[i2s_num]->hal), I2S_CLK_D2CLK);
  1264. rtc_clk_apll_enable(0, 0, 0, 0, 0);
  1265. }
  1266. #endif
  1267. #ifdef CONFIG_PM_ENABLE
  1268. if (p_i2s[i2s_num]->pm_lock) {
  1269. esp_pm_lock_delete(p_i2s[i2s_num]->pm_lock);
  1270. }
  1271. #endif
  1272. free(p_i2s[i2s_num]);
  1273. p_i2s[i2s_num] = NULL;
  1274. #if !SOC_GDMA_SUPPORTED
  1275. periph_module_disable(i2s_periph_signal[i2s_num].module);
  1276. #endif
  1277. return ESP_OK;
  1278. }
  1279. esp_err_t i2s_write(i2s_port_t i2s_num, const void *src, size_t size, size_t *bytes_written, TickType_t ticks_to_wait)
  1280. {
  1281. char *data_ptr, *src_byte;
  1282. size_t bytes_can_write;
  1283. *bytes_written = 0;
  1284. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  1285. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->tx), ESP_ERR_INVALID_ARG, TAG, "tx NULL");
  1286. xSemaphoreTake(p_i2s[i2s_num]->tx->mux, (portTickType)portMAX_DELAY);
  1287. #ifdef CONFIG_PM_ENABLE
  1288. esp_pm_lock_acquire(p_i2s[i2s_num]->pm_lock);
  1289. #endif
  1290. src_byte = (char *)src;
  1291. while (size > 0) {
  1292. if (p_i2s[i2s_num]->tx->rw_pos == p_i2s[i2s_num]->tx->buf_size || p_i2s[i2s_num]->tx->curr_ptr == NULL) {
  1293. if (xQueueReceive(p_i2s[i2s_num]->tx->queue, &p_i2s[i2s_num]->tx->curr_ptr, ticks_to_wait) == pdFALSE) {
  1294. break;
  1295. }
  1296. p_i2s[i2s_num]->tx->rw_pos = 0;
  1297. }
  1298. ESP_LOGD(TAG, "size: %d, rw_pos: %d, buf_size: %d, curr_ptr: %d", size, p_i2s[i2s_num]->tx->rw_pos, p_i2s[i2s_num]->tx->buf_size, (int)p_i2s[i2s_num]->tx->curr_ptr);
  1299. data_ptr = (char *)p_i2s[i2s_num]->tx->curr_ptr;
  1300. data_ptr += p_i2s[i2s_num]->tx->rw_pos;
  1301. bytes_can_write = p_i2s[i2s_num]->tx->buf_size - p_i2s[i2s_num]->tx->rw_pos;
  1302. if (bytes_can_write > size) {
  1303. bytes_can_write = size;
  1304. }
  1305. memcpy(data_ptr, src_byte, bytes_can_write);
  1306. size -= bytes_can_write;
  1307. src_byte += bytes_can_write;
  1308. p_i2s[i2s_num]->tx->rw_pos += bytes_can_write;
  1309. (*bytes_written) += bytes_can_write;
  1310. }
  1311. #ifdef CONFIG_PM_ENABLE
  1312. esp_pm_lock_release(p_i2s[i2s_num]->pm_lock);
  1313. #endif
  1314. xSemaphoreGive(p_i2s[i2s_num]->tx->mux);
  1315. return ESP_OK;
  1316. }
  1317. esp_err_t i2s_write_expand(i2s_port_t i2s_num, const void *src, size_t size, size_t src_bits, size_t aim_bits, size_t *bytes_written, TickType_t ticks_to_wait)
  1318. {
  1319. char *data_ptr;
  1320. int bytes_can_write, tail;
  1321. int src_bytes, aim_bytes, zero_bytes;
  1322. *bytes_written = 0;
  1323. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  1324. ESP_RETURN_ON_FALSE((size > 0), ESP_ERR_INVALID_ARG, TAG, "size must greater than zero");
  1325. ESP_RETURN_ON_FALSE((aim_bits >= src_bits), ESP_ERR_INVALID_ARG, TAG, "aim_bits mustn't be less than src_bits");
  1326. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->tx), ESP_ERR_INVALID_ARG, TAG, "tx NULL");
  1327. if (src_bits < I2S_BITS_PER_SAMPLE_8BIT || aim_bits < I2S_BITS_PER_SAMPLE_8BIT) {
  1328. ESP_LOGE(TAG, "bits mustn't be less than 8, src_bits %d aim_bits %d", src_bits, aim_bits);
  1329. return ESP_ERR_INVALID_ARG;
  1330. }
  1331. if (src_bits > I2S_BITS_PER_SAMPLE_32BIT || aim_bits > I2S_BITS_PER_SAMPLE_32BIT) {
  1332. ESP_LOGE(TAG, "bits mustn't be greater than 32, src_bits %d aim_bits %d", src_bits, aim_bits);
  1333. return ESP_ERR_INVALID_ARG;
  1334. }
  1335. if ((src_bits == I2S_BITS_PER_SAMPLE_16BIT || src_bits == I2S_BITS_PER_SAMPLE_32BIT) && (size % 2 != 0)) {
  1336. ESP_LOGE(TAG, "size must be a even number while src_bits is even, src_bits %d size %d", src_bits, size);
  1337. return ESP_ERR_INVALID_ARG;
  1338. }
  1339. if (src_bits == I2S_BITS_PER_SAMPLE_24BIT && (size % 3 != 0)) {
  1340. ESP_LOGE(TAG, "size must be a multiple of 3 while src_bits is 24, size %d", size);
  1341. return ESP_ERR_INVALID_ARG;
  1342. }
  1343. src_bytes = src_bits / 8;
  1344. aim_bytes = aim_bits / 8;
  1345. zero_bytes = aim_bytes - src_bytes;
  1346. xSemaphoreTake(p_i2s[i2s_num]->tx->mux, (portTickType)portMAX_DELAY);
  1347. size = size * aim_bytes / src_bytes;
  1348. ESP_LOGD(TAG, "aim_bytes %d src_bytes %d size %d", aim_bytes, src_bytes, size);
  1349. while (size > 0) {
  1350. if (p_i2s[i2s_num]->tx->rw_pos == p_i2s[i2s_num]->tx->buf_size || p_i2s[i2s_num]->tx->curr_ptr == NULL) {
  1351. if (xQueueReceive(p_i2s[i2s_num]->tx->queue, &p_i2s[i2s_num]->tx->curr_ptr, ticks_to_wait) == pdFALSE) {
  1352. break;
  1353. }
  1354. p_i2s[i2s_num]->tx->rw_pos = 0;
  1355. }
  1356. data_ptr = (char *)p_i2s[i2s_num]->tx->curr_ptr;
  1357. data_ptr += p_i2s[i2s_num]->tx->rw_pos;
  1358. bytes_can_write = p_i2s[i2s_num]->tx->buf_size - p_i2s[i2s_num]->tx->rw_pos;
  1359. if (bytes_can_write > (int)size) {
  1360. bytes_can_write = size;
  1361. }
  1362. tail = bytes_can_write % aim_bytes;
  1363. bytes_can_write = bytes_can_write - tail;
  1364. memset(data_ptr, 0, bytes_can_write);
  1365. for (int j = 0; j < bytes_can_write; j += (aim_bytes - zero_bytes)) {
  1366. j += zero_bytes;
  1367. memcpy(&data_ptr[j], (const char *)(src + *bytes_written), aim_bytes - zero_bytes);
  1368. (*bytes_written) += (aim_bytes - zero_bytes);
  1369. }
  1370. size -= bytes_can_write;
  1371. p_i2s[i2s_num]->tx->rw_pos += bytes_can_write;
  1372. }
  1373. xSemaphoreGive(p_i2s[i2s_num]->tx->mux);
  1374. return ESP_OK;
  1375. }
  1376. esp_err_t i2s_read(i2s_port_t i2s_num, void *dest, size_t size, size_t *bytes_read, TickType_t ticks_to_wait)
  1377. {
  1378. esp_err_t ret = ESP_OK;
  1379. char *data_ptr, *dest_byte;
  1380. int bytes_can_read;
  1381. *bytes_read = 0;
  1382. dest_byte = (char *)dest;
  1383. ESP_RETURN_ON_FALSE((i2s_num < I2S_NUM_MAX), ESP_ERR_INVALID_ARG, TAG, "i2s_num error");
  1384. ESP_RETURN_ON_FALSE((p_i2s[i2s_num]->rx), ESP_ERR_INVALID_ARG, TAG, "rx NULL");
  1385. xSemaphoreTake(p_i2s[i2s_num]->rx->mux, (portTickType)portMAX_DELAY);
  1386. #ifdef CONFIG_PM_ENABLE
  1387. esp_pm_lock_acquire(p_i2s[i2s_num]->pm_lock);
  1388. #endif
  1389. while (size > 0) {
  1390. if (p_i2s[i2s_num]->rx->rw_pos == p_i2s[i2s_num]->rx->buf_size || p_i2s[i2s_num]->rx->curr_ptr == NULL) {
  1391. if (xQueueReceive(p_i2s[i2s_num]->rx->queue, &p_i2s[i2s_num]->rx->curr_ptr, ticks_to_wait) == pdFALSE) {
  1392. ret = ESP_ERR_TIMEOUT;
  1393. break;
  1394. }
  1395. p_i2s[i2s_num]->rx->rw_pos = 0;
  1396. }
  1397. data_ptr = (char *)p_i2s[i2s_num]->rx->curr_ptr;
  1398. data_ptr += p_i2s[i2s_num]->rx->rw_pos;
  1399. bytes_can_read = p_i2s[i2s_num]->rx->buf_size - p_i2s[i2s_num]->rx->rw_pos;
  1400. if (bytes_can_read > (int)size) {
  1401. bytes_can_read = size;
  1402. }
  1403. memcpy(dest_byte, data_ptr, bytes_can_read);
  1404. size -= bytes_can_read;
  1405. dest_byte += bytes_can_read;
  1406. p_i2s[i2s_num]->rx->rw_pos += bytes_can_read;
  1407. (*bytes_read) += bytes_can_read;
  1408. }
  1409. #ifdef CONFIG_PM_ENABLE
  1410. esp_pm_lock_release(p_i2s[i2s_num]->pm_lock);
  1411. #endif
  1412. xSemaphoreGive(p_i2s[i2s_num]->rx->mux);
  1413. return ret;
  1414. }