@@ -345,7 +345,6 @@ static esp_err_t i2s_register_channel(i2s_controller_t *i2s_obj, i2s_dir_t dir,
345345 new_chan -> callbacks .on_send_q_ovf = NULL ;
346346 new_chan -> dma .rw_pos = 0 ;
347347 new_chan -> dma .curr_ptr = NULL ;
348- new_chan -> dma .curr_desc = NULL ;
349348 new_chan -> start = NULL ;
350349 new_chan -> stop = NULL ;
351350 new_chan -> reserve_gpio_mask = 0 ;
@@ -1193,8 +1192,11 @@ esp_err_t i2s_channel_enable(i2s_chan_handle_t handle)
11931192#endif
11941193 handle -> start (handle );
11951194 handle -> state = I2S_CHAN_STATE_RUNNING ;
1196- /* Reset queue */
1197- xQueueReset (handle -> msg_queue );
1195+ if (handle -> dir == I2S_DIR_RX ) {
1196+ /* RX queue is reset when the channel is enabled
1197+ In case legacy data received during disable process */
1198+ xQueueReset (handle -> msg_queue );
1199+ }
11981200 xSemaphoreGive (handle -> mutex );
11991201 /* Give the binary semaphore to enable reading / writing task */
12001202 xSemaphoreGive (handle -> binary );
@@ -1222,9 +1224,13 @@ esp_err_t i2s_channel_disable(i2s_chan_handle_t handle)
12221224 xSemaphoreTake (handle -> binary , portMAX_DELAY );
12231225 /* Reset the descriptor pointer */
12241226 handle -> dma .curr_ptr = NULL ;
1225- handle -> dma .curr_desc = NULL ;
12261227 handle -> dma .rw_pos = 0 ;
12271228 handle -> stop (handle );
1229+ if (handle -> dir == I2S_DIR_TX ) {
1230+ /* TX queue is reset when the channel is disabled
1231+ In case the queue is wrongly reset after preload the data */
1232+ xQueueReset (handle -> msg_queue );
1233+ }
12281234#if CONFIG_PM_ENABLE
12291235 esp_pm_lock_release (handle -> pm_lock );
12301236#endif
@@ -1246,18 +1252,30 @@ esp_err_t i2s_channel_preload_data(i2s_chan_handle_t tx_handle, const void *src,
12461252 uint8_t * data_ptr = (uint8_t * )src ;
12471253 size_t remain_bytes = size ;
12481254 size_t total_loaded_bytes = 0 ;
1255+ esp_err_t ret = ESP_OK ;
12491256
12501257 xSemaphoreTake (tx_handle -> mutex , portMAX_DELAY );
12511258
12521259 /* The pre-load data will be loaded from the first descriptor */
1253- if (tx_handle -> dma .curr_desc == NULL ) {
1254- tx_handle -> dma .curr_desc = tx_handle -> dma .desc [0 ];
1260+ if (tx_handle -> dma .curr_ptr == NULL ) {
1261+ xQueueReset (tx_handle -> msg_queue );
1262+ /* Push the rest of descriptors to the queue */
1263+ for (int i = 1 ; i < tx_handle -> dma .desc_num ; i ++ ) {
1264+ ESP_GOTO_ON_FALSE (xQueueSend (tx_handle -> msg_queue , & (tx_handle -> dma .desc [i ]-> buf ), 0 ) == pdTRUE ,
1265+ ESP_FAIL , err , TAG , "Failed to push the descriptor to the queue" );
1266+ }
12551267 tx_handle -> dma .curr_ptr = (void * )tx_handle -> dma .desc [0 ]-> buf ;
12561268 tx_handle -> dma .rw_pos = 0 ;
12571269 }
12581270
12591271 /* Loop until no bytes in source buff remain or the descriptors are full */
12601272 while (remain_bytes ) {
1273+ if (tx_handle -> dma .rw_pos == tx_handle -> dma .buf_size ) {
1274+ if (xQueueReceive (tx_handle -> msg_queue , & (tx_handle -> dma .curr_ptr ), 0 ) == pdFALSE ) {
1275+ break ;
1276+ }
1277+ tx_handle -> dma .rw_pos = 0 ;
1278+ }
12611279 size_t bytes_can_load = remain_bytes > (tx_handle -> dma .buf_size - tx_handle -> dma .rw_pos ) ?
12621280 (tx_handle -> dma .buf_size - tx_handle -> dma .rw_pos ) : remain_bytes ;
12631281 /* When all the descriptors has loaded data, no more bytes can be loaded, break directly */
@@ -1273,25 +1291,13 @@ esp_err_t i2s_channel_preload_data(i2s_chan_handle_t tx_handle, const void *src,
12731291 total_loaded_bytes += bytes_can_load ; // Add to the total loaded bytes
12741292 remain_bytes -= bytes_can_load ; // Update the remaining bytes to be loaded
12751293 tx_handle -> dma .rw_pos += bytes_can_load ; // Move forward the dma buffer position
1276- /* When the current position reach the end of the dma buffer */
1277- if (tx_handle -> dma .rw_pos == tx_handle -> dma .buf_size ) {
1278- /* If the next descriptor is not the first descriptor, keep load to the first descriptor
1279- * otherwise all descriptor has been loaded, break directly, the dma buffer position
1280- * will remain at the end of the last dma buffer */
1281- if (STAILQ_NEXT ((lldesc_t * )tx_handle -> dma .curr_desc , qe ) != tx_handle -> dma .desc [0 ]) {
1282- tx_handle -> dma .curr_desc = STAILQ_NEXT ((lldesc_t * )tx_handle -> dma .curr_desc , qe );
1283- tx_handle -> dma .curr_ptr = (void * )(((lldesc_t * )tx_handle -> dma .curr_desc )-> buf );
1284- tx_handle -> dma .rw_pos = 0 ;
1285- } else {
1286- break ;
1287- }
1288- }
12891294 }
12901295 * bytes_loaded = total_loaded_bytes ;
12911296
1297+ err :
12921298 xSemaphoreGive (tx_handle -> mutex );
12931299
1294- return ESP_OK ;
1300+ return ret ;
12951301}
12961302
12971303esp_err_t i2s_channel_write (i2s_chan_handle_t handle , const void * src , size_t size , size_t * bytes_written , uint32_t timeout_ms )
0 commit comments