|
173 | 173 | #define XILINX_DMA_MAX_TRANS_LEN_MAX 23
|
174 | 174 | #define XILINX_DMA_V2_MAX_TRANS_LEN_MAX 26
|
175 | 175 | #define XILINX_DMA_CR_COALESCE_MAX GENMASK(23, 16)
|
| 176 | +#define XILINX_DMA_CR_DELAY_MAX GENMASK(31, 24) |
176 | 177 | #define XILINX_DMA_CR_CYCLIC_BD_EN_MASK BIT(4)
|
177 | 178 | #define XILINX_DMA_CR_COALESCE_SHIFT 16
|
| 179 | +#define XILINX_DMA_CR_DELAY_SHIFT 24 |
178 | 180 | #define XILINX_DMA_BD_SOP BIT(27)
|
179 | 181 | #define XILINX_DMA_BD_EOP BIT(26)
|
180 | 182 | #define XILINX_DMA_BD_COMP_MASK BIT(31)
|
@@ -411,6 +413,7 @@ struct xilinx_dma_tx_descriptor {
|
411 | 413 | * @stop_transfer: Differentiate b/w DMA IP's quiesce
|
412 | 414 | * @tdest: TDEST value for mcdma
|
413 | 415 | * @has_vflip: S2MM vertical flip
|
| 416 | + * @irq_delay: Interrupt delay timeout |
414 | 417 | */
|
415 | 418 | struct xilinx_dma_chan {
|
416 | 419 | struct xilinx_dma_device *xdev;
|
@@ -449,6 +452,7 @@ struct xilinx_dma_chan {
|
449 | 452 | int (*stop_transfer)(struct xilinx_dma_chan *chan);
|
450 | 453 | u16 tdest;
|
451 | 454 | bool has_vflip;
|
| 455 | + u8 irq_delay; |
452 | 456 | };
|
453 | 457 |
|
454 | 458 | /**
|
@@ -1561,6 +1565,9 @@ static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan)
|
1561 | 1565 | if (chan->has_sg)
|
1562 | 1566 | xilinx_write(chan, XILINX_DMA_REG_CURDESC,
|
1563 | 1567 | head_desc->async_tx.phys);
|
| 1568 | + reg &= ~XILINX_DMA_CR_DELAY_MAX; |
| 1569 | + reg |= chan->irq_delay << XILINX_DMA_CR_DELAY_SHIFT; |
| 1570 | + dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg); |
1564 | 1571 |
|
1565 | 1572 | xilinx_dma_start(chan);
|
1566 | 1573 |
|
@@ -1898,15 +1905,8 @@ static irqreturn_t xilinx_dma_irq_handler(int irq, void *data)
|
1898 | 1905 | }
|
1899 | 1906 | }
|
1900 | 1907 |
|
1901 |
| - if (status & XILINX_DMA_DMASR_DLY_CNT_IRQ) { |
1902 |
| - /* |
1903 |
| - * Device takes too long to do the transfer when user requires |
1904 |
| - * responsiveness. |
1905 |
| - */ |
1906 |
| - dev_dbg(chan->dev, "Inter-packet latency too long\n"); |
1907 |
| - } |
1908 |
| - |
1909 |
| - if (status & XILINX_DMA_DMASR_FRM_CNT_IRQ) { |
| 1908 | + if (status & (XILINX_DMA_DMASR_FRM_CNT_IRQ | |
| 1909 | + XILINX_DMA_DMASR_DLY_CNT_IRQ)) { |
1910 | 1910 | spin_lock(&chan->lock);
|
1911 | 1911 | xilinx_dma_complete_descriptor(chan);
|
1912 | 1912 | chan->idle = true;
|
@@ -2833,6 +2833,8 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev,
|
2833 | 2833 | /* Retrieve the channel properties from the device tree */
|
2834 | 2834 | has_dre = of_property_read_bool(node, "xlnx,include-dre");
|
2835 | 2835 |
|
| 2836 | + of_property_read_u8(node, "xlnx,irq-delay", &chan->irq_delay); |
| 2837 | + |
2836 | 2838 | chan->genlock = of_property_read_bool(node, "xlnx,genlock-mode");
|
2837 | 2839 |
|
2838 | 2840 | err = of_property_read_u32(node, "xlnx,datawidth", &value);
|
|
0 commit comments