@@ -140,6 +140,7 @@ struct pci1xxxx_spi_internal {
140
140
int irq [NUM_VEC_PER_INST ];
141
141
int mode ;
142
142
bool spi_xfer_in_progress ;
143
+ atomic_t dma_completion_count ;
143
144
void * rx_buf ;
144
145
bool dma_aborted_rd ;
145
146
u32 bytes_recvd ;
@@ -163,8 +164,10 @@ struct pci1xxxx_spi {
163
164
u8 dev_rev ;
164
165
void __iomem * reg_base ;
165
166
void __iomem * dma_offset_bar ;
166
- /* lock to safely access the DMA registers in isr */
167
- spinlock_t dma_reg_lock ;
167
+ /* lock to safely access the DMA RD registers in isr */
168
+ spinlock_t dma_rd_reg_lock ;
169
+ /* lock to safely access the DMA RD registers in isr */
170
+ spinlock_t dma_wr_reg_lock ;
168
171
bool can_dma ;
169
172
struct pci1xxxx_spi_internal * spi_int [] __counted_by (total_hw_instances );
170
173
};
@@ -330,7 +333,8 @@ static int pci1xxxx_spi_dma_init(struct pci1xxxx_spi *spi_bus, int hw_inst, int
330
333
if (ret )
331
334
return ret ;
332
335
333
- spin_lock_init (& spi_bus -> dma_reg_lock );
336
+ spin_lock_init (& spi_bus -> dma_rd_reg_lock );
337
+ spin_lock_init (& spi_bus -> dma_wr_reg_lock );
334
338
writel (SPI_DMA_ENGINE_EN , spi_bus -> dma_offset_bar + SPI_DMA_GLOBAL_WR_ENGINE_EN );
335
339
writel (SPI_DMA_ENGINE_EN , spi_bus -> dma_offset_bar + SPI_DMA_GLOBAL_RD_ENGINE_EN );
336
340
@@ -464,6 +468,7 @@ static void pci1xxxx_start_spi_xfer(struct pci1xxxx_spi_internal *p)
464
468
{
465
469
u32 regval ;
466
470
471
+ atomic_set (& p -> dma_completion_count , 0 );
467
472
regval = readl (p -> parent -> reg_base + SPI_MST_CTL_REG_OFFSET (p -> hw_inst ));
468
473
regval |= SPI_MST_CTL_GO ;
469
474
writel (regval , p -> parent -> reg_base + SPI_MST_CTL_REG_OFFSET (p -> hw_inst ));
@@ -536,7 +541,6 @@ static int pci1xxxx_spi_transfer_with_dma(struct spi_controller *spi_ctlr,
536
541
{
537
542
struct pci1xxxx_spi_internal * p = spi_controller_get_devdata (spi_ctlr );
538
543
struct pci1xxxx_spi * par = p -> parent ;
539
- dma_addr_t rx_dma_addr = 0 ;
540
544
dma_addr_t tx_dma_addr = 0 ;
541
545
int ret = 0 ;
542
546
u32 regval ;
@@ -545,6 +549,7 @@ static int pci1xxxx_spi_transfer_with_dma(struct spi_controller *spi_ctlr,
545
549
p -> tx_sgl = xfer -> tx_sg .sgl ;
546
550
p -> rx_sgl = xfer -> rx_sg .sgl ;
547
551
p -> rx_buf = xfer -> rx_buf ;
552
+ atomic_set (& p -> dma_completion_count , 1 );
548
553
regval = readl (par -> reg_base + SPI_MST_EVENT_REG_OFFSET (p -> hw_inst ));
549
554
writel (regval , par -> reg_base + SPI_MST_EVENT_REG_OFFSET (p -> hw_inst ));
550
555
@@ -561,13 +566,9 @@ static int pci1xxxx_spi_transfer_with_dma(struct spi_controller *spi_ctlr,
561
566
writel (regval , par -> reg_base + SPI_MST_EVENT_REG_OFFSET (p -> hw_inst ));
562
567
563
568
tx_dma_addr = sg_dma_address (p -> tx_sgl );
564
- rx_dma_addr = sg_dma_address (p -> rx_sgl );
565
569
p -> tx_sgl_len = sg_dma_len (p -> tx_sgl );
566
- p -> rx_sgl_len = sg_dma_len (p -> rx_sgl );
567
570
pci1xxxx_spi_setup (par , p -> hw_inst , p -> mode , p -> clkdiv , p -> tx_sgl_len );
568
571
pci1xxxx_spi_setup_dma_to_io (p , (tx_dma_addr ), p -> tx_sgl_len );
569
- if (rx_dma_addr )
570
- pci1xxxx_spi_setup_dma_from_io (p , rx_dma_addr , p -> rx_sgl_len );
571
572
writel (p -> hw_inst , par -> dma_offset_bar + SPI_DMA_RD_DOORBELL_REG );
572
573
573
574
reinit_completion (& p -> spi_xfer_done );
@@ -657,32 +658,33 @@ static irqreturn_t pci1xxxx_spi_isr_io(int irq, void *dev)
657
658
return spi_int_fired ;
658
659
}
659
660
660
- static void pci1xxxx_spi_setup_next_dma_transfer (struct pci1xxxx_spi_internal * p )
661
+ static void pci1xxxx_spi_setup_next_dma_to_io_transfer (struct pci1xxxx_spi_internal * p )
661
662
{
662
663
dma_addr_t tx_dma_addr = 0 ;
663
- dma_addr_t rx_dma_addr = 0 ;
664
664
u32 prev_len ;
665
665
666
666
p -> tx_sgl = sg_next (p -> tx_sgl );
667
- if (p -> rx_sgl )
668
- p -> rx_sgl = sg_next (p -> rx_sgl );
669
- if (!p -> tx_sgl ) {
670
- /* Clear xfer_done */
671
- complete (& p -> spi_xfer_done );
672
- } else {
667
+ if (p -> tx_sgl ) {
673
668
tx_dma_addr = sg_dma_address (p -> tx_sgl );
674
669
prev_len = p -> tx_sgl_len ;
675
670
p -> tx_sgl_len = sg_dma_len (p -> tx_sgl );
671
+ pci1xxxx_spi_setup_dma_to_io (p , tx_dma_addr , p -> tx_sgl_len );
672
+ writel (p -> hw_inst , p -> parent -> dma_offset_bar + SPI_DMA_RD_DOORBELL_REG );
676
673
if (prev_len != p -> tx_sgl_len )
677
674
pci1xxxx_spi_setup (p -> parent ,
678
675
p -> hw_inst , p -> mode , p -> clkdiv , p -> tx_sgl_len );
679
- pci1xxxx_spi_setup_dma_to_io (p , tx_dma_addr , p -> tx_sgl_len );
680
- if (p -> rx_sgl ) {
681
- rx_dma_addr = sg_dma_address (p -> rx_sgl );
682
- p -> rx_sgl_len = sg_dma_len (p -> rx_sgl );
683
- pci1xxxx_spi_setup_dma_from_io (p , rx_dma_addr , p -> rx_sgl_len );
684
- }
685
- writel (p -> hw_inst , p -> parent -> dma_offset_bar + SPI_DMA_RD_DOORBELL_REG );
676
+ }
677
+ }
678
+
679
+ static void pci1xxxx_spi_setup_next_dma_from_io_transfer (struct pci1xxxx_spi_internal * p )
680
+ {
681
+ dma_addr_t rx_dma_addr = 0 ;
682
+
683
+ if (p -> rx_sgl ) {
684
+ rx_dma_addr = sg_dma_address (p -> rx_sgl );
685
+ p -> rx_sgl_len = sg_dma_len (p -> rx_sgl );
686
+ pci1xxxx_spi_setup_dma_from_io (p , rx_dma_addr , p -> rx_sgl_len );
687
+ writel (p -> hw_inst , p -> parent -> dma_offset_bar + SPI_DMA_WR_DOORBELL_REG );
686
688
}
687
689
}
688
690
@@ -693,22 +695,24 @@ static irqreturn_t pci1xxxx_spi_isr_dma_rd(int irq, void *dev)
693
695
unsigned long flags ;
694
696
u32 regval ;
695
697
696
- spin_lock_irqsave (& p -> parent -> dma_reg_lock , flags );
697
698
/* Clear the DMA RD INT and start spi xfer*/
698
699
regval = readl (p -> parent -> dma_offset_bar + SPI_DMA_INTR_RD_STS );
699
700
if (regval ) {
700
701
if (regval & SPI_DMA_DONE_INT_MASK (p -> hw_inst )) {
701
- pci1xxxx_start_spi_xfer (p );
702
+ /* Start the SPI transfer only if both DMA read and write are completed */
703
+ if (atomic_inc_return (& p -> dma_completion_count ) == 2 )
704
+ pci1xxxx_start_spi_xfer (p );
702
705
spi_int_fired = IRQ_HANDLED ;
703
706
}
704
707
if (regval & SPI_DMA_ABORT_INT_MASK (p -> hw_inst )) {
705
708
p -> dma_aborted_rd = true;
706
709
spi_int_fired = IRQ_HANDLED ;
707
710
}
711
+ spin_lock_irqsave (& p -> parent -> dma_rd_reg_lock , flags );
712
+ writel ((SPI_DMA_DONE_INT_MASK (p -> hw_inst ) | SPI_DMA_ABORT_INT_MASK (p -> hw_inst )),
713
+ p -> parent -> dma_offset_bar + SPI_DMA_INTR_RD_CLR );
714
+ spin_unlock_irqrestore (& p -> parent -> dma_rd_reg_lock , flags );
708
715
}
709
- writel ((SPI_DMA_DONE_INT_MASK (p -> hw_inst ) | SPI_DMA_ABORT_INT_MASK (p -> hw_inst )),
710
- p -> parent -> dma_offset_bar + SPI_DMA_INTR_RD_CLR );
711
- spin_unlock_irqrestore (& p -> parent -> dma_reg_lock , flags );
712
716
return spi_int_fired ;
713
717
}
714
718
@@ -719,22 +723,29 @@ static irqreturn_t pci1xxxx_spi_isr_dma_wr(int irq, void *dev)
719
723
unsigned long flags ;
720
724
u32 regval ;
721
725
722
- spin_lock_irqsave (& p -> parent -> dma_reg_lock , flags );
723
726
/* Clear the DMA WR INT */
724
727
regval = readl (p -> parent -> dma_offset_bar + SPI_DMA_INTR_WR_STS );
725
728
if (regval ) {
726
729
if (regval & SPI_DMA_DONE_INT_MASK (p -> hw_inst )) {
727
- pci1xxxx_spi_setup_next_dma_transfer (p );
728
730
spi_int_fired = IRQ_HANDLED ;
731
+ if (sg_is_last (p -> rx_sgl )) {
732
+ complete (& p -> spi_xfer_done );
733
+ } else {
734
+ p -> rx_sgl = sg_next (p -> rx_sgl );
735
+ if (atomic_inc_return (& p -> dma_completion_count ) == 2 )
736
+ pci1xxxx_start_spi_xfer (p );
737
+ }
738
+
729
739
}
730
740
if (regval & SPI_DMA_ABORT_INT_MASK (p -> hw_inst )) {
731
741
p -> dma_aborted_wr = true;
732
742
spi_int_fired = IRQ_HANDLED ;
733
743
}
744
+ spin_lock_irqsave (& p -> parent -> dma_wr_reg_lock , flags );
745
+ writel ((SPI_DMA_DONE_INT_MASK (p -> hw_inst ) | SPI_DMA_ABORT_INT_MASK (p -> hw_inst )),
746
+ p -> parent -> dma_offset_bar + SPI_DMA_INTR_WR_CLR );
747
+ spin_unlock_irqrestore (& p -> parent -> dma_wr_reg_lock , flags );
734
748
}
735
- writel ((SPI_DMA_DONE_INT_MASK (p -> hw_inst ) | SPI_DMA_ABORT_INT_MASK (p -> hw_inst )),
736
- p -> parent -> dma_offset_bar + SPI_DMA_INTR_WR_CLR );
737
- spin_unlock_irqrestore (& p -> parent -> dma_reg_lock , flags );
738
749
return spi_int_fired ;
739
750
}
740
751
@@ -747,10 +758,11 @@ static irqreturn_t pci1xxxx_spi_isr_dma(int irq, void *dev)
747
758
/* Clear the SPI GO_BIT Interrupt */
748
759
regval = readl (p -> parent -> reg_base + SPI_MST_EVENT_REG_OFFSET (p -> hw_inst ));
749
760
if (regval & SPI_INTR ) {
750
- writel (p -> hw_inst , p -> parent -> dma_offset_bar + SPI_DMA_WR_DOORBELL_REG );
761
+ pci1xxxx_spi_setup_next_dma_from_io_transfer (p );
762
+ pci1xxxx_spi_setup_next_dma_to_io_transfer (p );
751
763
spi_int_fired = IRQ_HANDLED ;
764
+ writel (regval , p -> parent -> reg_base + SPI_MST_EVENT_REG_OFFSET (p -> hw_inst ));
752
765
}
753
- writel (regval , p -> parent -> reg_base + SPI_MST_EVENT_REG_OFFSET (p -> hw_inst ));
754
766
return spi_int_fired ;
755
767
}
756
768
0 commit comments