@@ -57,8 +57,13 @@
/* DMA Related Registers */
#define SPI_DMA_ADDR_BASE (0x1000)
#define SPI_DMA_GLOBAL_WR_ENGINE_EN (SPI_DMA_ADDR_BASE + 0x0C)
+#define SPI_DMA_WR_DOORBELL_REG (SPI_DMA_ADDR_BASE + 0x10)
#define SPI_DMA_GLOBAL_RD_ENGINE_EN (SPI_DMA_ADDR_BASE + 0x2C)
#define SPI_DMA_RD_DOORBELL_REG (SPI_DMA_ADDR_BASE + 0x30)
+#define SPI_DMA_INTR_WR_STS (SPI_DMA_ADDR_BASE + 0x4C)
+#define SPI_DMA_WR_INT_MASK (SPI_DMA_ADDR_BASE + 0x54)
+#define SPI_DMA_INTR_WR_CLR (SPI_DMA_ADDR_BASE + 0x58)
+#define SPI_DMA_ERR_WR_STS (SPI_DMA_ADDR_BASE + 0x5C)
#define SPI_DMA_INTR_IMWR_WDONE_LOW (SPI_DMA_ADDR_BASE + 0x60)
#define SPI_DMA_INTR_IMWR_WDONE_HIGH (SPI_DMA_ADDR_BASE + 0x64)
#define SPI_DMA_INTR_IMWR_WABORT_LOW (SPI_DMA_ADDR_BASE + 0x68)
@@ -74,7 +79,9 @@
#define SPI_DMA_INTR_IMWR_RABORT_HIGH (SPI_DMA_ADDR_BASE + 0xD8)
#define SPI_DMA_INTR_RD_IMWR_DATA (SPI_DMA_ADDR_BASE + 0xDC)
+#define SPI_DMA_CH0_WR_BASE (SPI_DMA_ADDR_BASE + 0x200)
#define SPI_DMA_CH0_RD_BASE (SPI_DMA_ADDR_BASE + 0x300)
+#define SPI_DMA_CH1_WR_BASE (SPI_DMA_ADDR_BASE + 0x400)
#define SPI_DMA_CH1_RD_BASE (SPI_DMA_ADDR_BASE + 0x500)
#define SPI_DMA_CH_CTL1_OFFSET (0x00)
@@ -129,15 +136,17 @@ struct pci1xxxx_spi_internal {
int irq;
int mode;
bool spi_xfer_in_progress;
+ void *rx_buf;
bool dma_aborted_rd;
u32 bytes_recvd;
u32 tx_sgl_len;
+ u32 rx_sgl_len;
struct scatterlist *tx_sgl, *rx_sgl;
+ bool dma_aborted_wr;
struct completion spi_xfer_done;
struct spi_controller *spi_host;
struct pci1xxxx_spi *parent;
struct spi_transfer *xfer;
- void *rx_buf;
struct {
unsigned int dev_sel : 3;
unsigned int msi_vector_sel : 1;
@@ -351,6 +360,26 @@ static void pci1xxxx_spi_setup_dma_read(struct pci1xxxx_spi_internal *p,
base + SPI_DMA_CH_DAR_HI_OFFSET);
}
+static void pci1xxxx_spi_setup_dma_write(struct pci1xxxx_spi_internal *p,
+ dma_addr_t dma_addr, u32 len)
+{
+ void *base;
+
+ if (!p->hw_inst)
+ base = p->parent->dma_offset_bar + SPI_DMA_CH0_WR_BASE;
+ else
+ base = p->parent->dma_offset_bar + SPI_DMA_CH1_WR_BASE;
+
+ writel(DMA_INTR_EN, base + SPI_DMA_CH_CTL1_OFFSET);
+ writel(len, base + SPI_DMA_CH_XFER_LEN_OFFSET);
+ writel(lower_32_bits(dma_addr), base + SPI_DMA_CH_DAR_LO_OFFSET);
+ writel(upper_32_bits(dma_addr), base + SPI_DMA_CH_DAR_HI_OFFSET);
+ writel(lower_32_bits(SPI_PERI_ADDR_BASE + SPI_MST_RSP_BUF_OFFSET(p->hw_inst)),
+ base + SPI_DMA_CH_SAR_LO_OFFSET);
+ writel(upper_32_bits(SPI_PERI_ADDR_BASE + SPI_MST_RSP_BUF_OFFSET(p->hw_inst)),
+ base + SPI_DMA_CH_SAR_HI_OFFSET);
+}
+
static void pci1xxxx_spi_setup(struct pci1xxxx_spi *par, u8 hw_inst, u32 mode,
u8 clkdiv, u32 len)
{
@@ -444,12 +473,17 @@ static int pci1xxxx_spi_transfer_with_dma(struct spi_controller *spi_ctlr,
{
struct pci1xxxx_spi_internal *p = spi_controller_get_devdata(spi_ctlr);
struct pci1xxxx_spi *par = p->parent;
+ dma_addr_t rx_dma_addr = 0;
dma_addr_t tx_dma_addr = 0;
int ret = 0;
u32 regval;
p->spi_xfer_in_progress = true;
p->tx_sgl = xfer->tx_sg.sgl;
+ p->rx_sgl = xfer->rx_sg.sgl;
+ p->rx_buf = xfer->rx_buf;
+ regval = readl(par->reg_base + SPI_MST_EVENT_REG_OFFSET(p->hw_inst));
+ writel(regval, par->reg_base + SPI_MST_EVENT_REG_OFFSET(p->hw_inst));
if (!xfer->tx_buf || !p->tx_sgl) {
ret = -EINVAL;
@@ -464,10 +498,13 @@ static int pci1xxxx_spi_transfer_with_dma(struct spi_controller *spi_ctlr,
writel(regval, par->reg_base + SPI_MST_EVENT_REG_OFFSET(p->hw_inst));
tx_dma_addr = sg_dma_address(p->tx_sgl);
+ rx_dma_addr = sg_dma_address(p->rx_sgl);
p->tx_sgl_len = sg_dma_len(p->tx_sgl);
+ p->rx_sgl_len = sg_dma_len(p->rx_sgl);
pci1xxxx_spi_setup(par, p->hw_inst, p->mode, p->clkdiv, p->tx_sgl_len);
-
pci1xxxx_spi_setup_dma_read(p, (tx_dma_addr), p->tx_sgl_len);
+ if (rx_dma_addr)
+ pci1xxxx_spi_setup_dma_write(p, rx_dma_addr, p->rx_sgl_len);
writel(p->hw_inst, par->dma_offset_bar + SPI_DMA_RD_DOORBELL_REG);
reinit_completion(&p->spi_xfer_done);
@@ -494,8 +531,30 @@ static int pci1xxxx_spi_transfer_with_dma(struct spi_controller *spi_ctlr,
par->dma_offset_bar + SPI_DMA_GLOBAL_RD_ENGINE_EN);
p->dma_aborted_rd = false;
ret = -ECANCELED;
- goto error;
}
+ if (p->dma_aborted_wr) {
+ writel(SPI_DMA_ENGINE_DIS,
+ par->dma_offset_bar + SPI_DMA_GLOBAL_WR_ENGINE_EN);
+
+ /*
+ * DMA ENGINE reset takes time if any TLP
+ * completeion in progress, should wait
+ * till DMA Engine reset is completed.
+ */
+ ret = readl_poll_timeout(par->dma_offset_bar +
+ SPI_DMA_GLOBAL_WR_ENGINE_EN, regval,
+ (regval == 0x0), 0, USEC_PER_MSEC);
+ if (ret) {
+ ret = -ECANCELED;
+ goto error;
+ }
+
+ writel(SPI_DMA_ENGINE_EN,
+ par->dma_offset_bar + SPI_DMA_GLOBAL_WR_ENGINE_EN);
+ p->dma_aborted_wr = false;
+ ret = -ECANCELED;
+ }
+ goto error;
}
ret = 0;
@@ -524,19 +583,50 @@ static irqreturn_t pci1xxxx_spi_isr_io(int irq, void *dev)
regval = readl(p->parent->reg_base + SPI_MST_EVENT_REG_OFFSET(p->hw_inst));
if (regval & SPI_INTR) {
/* Clear xfer_done */
- complete(&p->spi_xfer_done);
+ if (p->parent->can_dma && p->rx_buf)
+ writel(p->hw_inst, p->parent->dma_offset_bar +
+ SPI_DMA_WR_DOORBELL_REG);
+ else
+ complete(&p->parent->spi_int[p->hw_inst]->spi_xfer_done);
spi_int_fired = IRQ_HANDLED;
}
writel(regval, p->parent->reg_base + SPI_MST_EVENT_REG_OFFSET(p->hw_inst));
return spi_int_fired;
}
+static void pci1xxxx_spi_setup_next_dma_transfer(struct pci1xxxx_spi_internal *p)
+{
+ dma_addr_t tx_dma_addr = 0;
+ dma_addr_t rx_dma_addr = 0;
+ u32 prev_len;
+
+ p->tx_sgl = sg_next(p->tx_sgl);
+ if (p->rx_sgl)
+ p->rx_sgl = sg_next(p->rx_sgl);
+ if (!p->tx_sgl) {
+ /* Clear xfer_done */
+ complete(&p->spi_xfer_done);
+ } else {
+ tx_dma_addr = sg_dma_address(p->tx_sgl);
+ prev_len = p->tx_sgl_len;
+ p->tx_sgl_len = sg_dma_len(p->tx_sgl);
+ if (prev_len != p->tx_sgl_len)
+ pci1xxxx_spi_setup(p->parent,
+ p->hw_inst, p->mode, p->clkdiv, p->tx_sgl_len);
+ pci1xxxx_spi_setup_dma_read(p, tx_dma_addr, p->tx_sgl_len);
+ if (p->rx_sgl) {
+ rx_dma_addr = sg_dma_address(p->rx_sgl);
+ p->rx_sgl_len = sg_dma_len(p->rx_sgl);
+ pci1xxxx_spi_setup_dma_write(p, rx_dma_addr, p->rx_sgl_len);
+ }
+ writel(p->hw_inst, p->parent->dma_offset_bar + SPI_DMA_RD_DOORBELL_REG);
+ }
+}
+
static irqreturn_t pci1xxxx_spi_isr_dma(int irq, void *dev)
{
struct pci1xxxx_spi_internal *p = dev;
irqreturn_t spi_int_fired = IRQ_NONE;
- dma_addr_t tx_dma_addr = 0;
- void *rx_buf = NULL;
unsigned long flags;
u32 regval;
@@ -555,30 +645,30 @@ static irqreturn_t pci1xxxx_spi_isr_dma(int irq, void *dev)
spi_int_fired = IRQ_HANDLED;
}
writel(regval, p->parent->dma_offset_bar + SPI_DMA_INTR_RD_CLR);
+
+ /* Clear the DMA WR INT */
+ regval = readl(p->parent->dma_offset_bar + SPI_DMA_INTR_WR_STS);
+ if (regval & SPI_DMA_DONE_INT_MASK) {
+ if (regval & SPI_DMA_CH0_DONE_INT)
+ pci1xxxx_spi_setup_next_dma_transfer(p->parent->spi_int[SPI0]);
+
+ if (regval & SPI_DMA_CH1_DONE_INT)
+ pci1xxxx_spi_setup_next_dma_transfer(p->parent->spi_int[SPI1]);
+
+ spi_int_fired = IRQ_HANDLED;
+ }
+ if (regval & SPI_DMA_ABORT_INT_MASK) {
+ p->dma_aborted_wr = true;
+ spi_int_fired = IRQ_HANDLED;
+ }
+ writel(regval, p->parent->dma_offset_bar + SPI_DMA_INTR_WR_CLR);
spin_unlock_irqrestore(&p->parent->dma_reg_lock, flags);
/* Clear the SPI GO_BIT Interrupt */
regval = readl(p->parent->reg_base + SPI_MST_EVENT_REG_OFFSET(p->hw_inst));
if (regval & SPI_INTR) {
- rx_buf = p->rx_buf;
- memcpy_fromio(rx_buf + p->bytes_recvd, p->parent->reg_base +
- SPI_MST_RSP_BUF_OFFSET(p->hw_inst), p->tx_sgl_len);
- p->bytes_recvd += p->tx_sgl_len;
-
- p->tx_sgl = sg_next(p->tx_sgl);
- if (!p->tx_sgl) {
- /* Clear xfer_done */
- complete(&p->spi_xfer_done);
- spi_int_fired = IRQ_HANDLED;
- } else {
- tx_dma_addr = sg_dma_address(p->tx_sgl);
- p->tx_sgl_len = sg_dma_len(p->tx_sgl);
- pci1xxxx_spi_setup(p->parent, p->hw_inst, p->mode, p->clkdiv,
- p->tx_sgl_len);
- pci1xxxx_spi_setup_dma_read(p, tx_dma_addr, p->tx_sgl_len);
- writel(p->hw_inst, p->parent->dma_offset_bar +
- SPI_DMA_RD_DOORBELL_REG);
- }
+ writel(p->hw_inst, p->parent->dma_offset_bar + SPI_DMA_WR_DOORBELL_REG);
+ spi_int_fired = IRQ_HANDLED;
}
writel(regval, p->parent->reg_base + SPI_MST_EVENT_REG_OFFSET(p->hw_inst));
return spi_int_fired;