@@ -208,6 +208,24 @@ static void dw_edma_start_transfer(struct dw_edma_chan *chan)
desc->chunks_alloc--;
}
+static void dw_edma_device_caps(struct dma_chan *dchan,
+ struct dma_slave_caps *caps)
+{
+ struct dw_edma_chan *chan = dchan2dw_edma_chan(dchan);
+
+ if (chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) {
+ if (chan->dir == EDMA_DIR_READ)
+ caps->directions = BIT(DMA_DEV_TO_MEM);
+ else
+ caps->directions = BIT(DMA_MEM_TO_DEV);
+ } else {
+ if (chan->dir == EDMA_DIR_WRITE)
+ caps->directions = BIT(DMA_DEV_TO_MEM);
+ else
+ caps->directions = BIT(DMA_MEM_TO_DEV);
+ }
+}
+
static int dw_edma_device_config(struct dma_chan *dchan,
struct dma_slave_config *config)
{
@@ -717,8 +735,7 @@ static void dw_edma_free_chan_resources(struct dma_chan *dchan)
}
}
-static int dw_edma_channel_setup(struct dw_edma *dw, bool write,
- u32 wr_alloc, u32 rd_alloc)
+static int dw_edma_channel_setup(struct dw_edma *dw, u32 wr_alloc, u32 rd_alloc)
{
struct dw_edma_chip *chip = dw->chip;
struct dw_edma_region *dt_region;
@@ -726,27 +743,15 @@ static int dw_edma_channel_setup(struct dw_edma *dw, bool write,
struct dw_edma_chan *chan;
struct dw_edma_irq *irq;
struct dma_device *dma;
- u32 alloc, off_alloc;
- u32 i, j, cnt;
- int err = 0;
+ u32 i, ch_cnt;
u32 pos;
- if (write) {
- i = 0;
- cnt = dw->wr_ch_cnt;
- dma = &dw->wr_edma;
- alloc = wr_alloc;
- off_alloc = 0;
- } else {
- i = dw->wr_ch_cnt;
- cnt = dw->rd_ch_cnt;
- dma = &dw->rd_edma;
- alloc = rd_alloc;
- off_alloc = wr_alloc;
- }
+ ch_cnt = dw->wr_ch_cnt + dw->rd_ch_cnt;
+ dma = &dw->dma;
INIT_LIST_HEAD(&dma->channels);
- for (j = 0; (alloc || dw->nr_irqs == 1) && j < cnt; j++, i++) {
+
+ for (i = 0; i < ch_cnt; i++) {
chan = &dw->chan[i];
dt_region = devm_kzalloc(dev, sizeof(*dt_region), GFP_KERNEL);
@@ -756,52 +761,62 @@ static int dw_edma_channel_setup(struct dw_edma *dw, bool write,
chan->vc.chan.private = dt_region;
chan->dw = dw;
- chan->id = j;
- chan->dir = write ? EDMA_DIR_WRITE : EDMA_DIR_READ;
+
+ if (i < dw->wr_ch_cnt) {
+ chan->id = i;
+ chan->dir = EDMA_DIR_WRITE;
+ } else {
+ chan->id = i - dw->wr_ch_cnt;
+ chan->dir = EDMA_DIR_READ;
+ }
+
chan->configured = false;
chan->request = EDMA_REQ_NONE;
chan->status = EDMA_ST_IDLE;
- if (write)
- chan->ll_max = (chip->ll_region_wr[j].sz / EDMA_LL_SZ);
+ if (chan->dir == EDMA_DIR_WRITE)
+ chan->ll_max = (chip->ll_region_wr[chan->id].sz / EDMA_LL_SZ);
else
- chan->ll_max = (chip->ll_region_rd[j].sz / EDMA_LL_SZ);
+ chan->ll_max = (chip->ll_region_rd[chan->id].sz / EDMA_LL_SZ);
chan->ll_max -= 1;
dev_vdbg(dev, "L. List:\tChannel %s[%u] max_cnt=%u\n",
- write ? "write" : "read", j, chan->ll_max);
+ chan->dir == EDMA_DIR_WRITE ? "write" : "read",
+ chan->id, chan->ll_max);
if (dw->nr_irqs == 1)
pos = 0;
+ else if (chan->dir == EDMA_DIR_WRITE)
+ pos = chan->id % wr_alloc;
else
- pos = off_alloc + (j % alloc);
+ pos = wr_alloc + chan->id % rd_alloc;
irq = &dw->irq[pos];
- if (write)
- irq->wr_mask |= BIT(j);
+ if (chan->dir == EDMA_DIR_WRITE)
+ irq->wr_mask |= BIT(chan->id);
else
- irq->rd_mask |= BIT(j);
+ irq->rd_mask |= BIT(chan->id);
irq->dw = dw;
memcpy(&chan->msi, &irq->msi, sizeof(chan->msi));
dev_vdbg(dev, "MSI:\t\tChannel %s[%u] addr=0x%.8x%.8x, data=0x%.8x\n",
- write ? "write" : "read", j,
+ chan->dir == EDMA_DIR_WRITE ? "write" : "read", chan->id,
chan->msi.address_hi, chan->msi.address_lo,
chan->msi.data);
chan->vc.desc_free = vchan_free_desc;
vchan_init(&chan->vc, dma);
- if (write) {
- dt_region->paddr = chip->dt_region_wr[j].paddr;
- dt_region->vaddr = chip->dt_region_wr[j].vaddr;
- dt_region->sz = chip->dt_region_wr[j].sz;
+ if (chan->dir == EDMA_DIR_WRITE) {
+ dt_region->paddr = chip->dt_region_wr[chan->id].paddr;
+ dt_region->vaddr = chip->dt_region_wr[chan->id].vaddr;
+ dt_region->sz = chip->dt_region_wr[chan->id].sz;
} else {
- dt_region->paddr = chip->dt_region_rd[j].paddr;
- dt_region->vaddr = chip->dt_region_rd[j].vaddr;
- dt_region->sz = chip->dt_region_rd[j].sz;
+ dt_region->paddr = chip->dt_region_rd[chan->id].paddr;
+ dt_region->vaddr = chip->dt_region_rd[chan->id].vaddr;
+ dt_region->sz = chip->dt_region_rd[chan->id].sz;
}
dw_edma_v0_core_device_config(chan);
@@ -813,7 +828,7 @@ static int dw_edma_channel_setup(struct dw_edma *dw, bool write,
dma_cap_set(DMA_CYCLIC, dma->cap_mask);
dma_cap_set(DMA_PRIVATE, dma->cap_mask);
dma_cap_set(DMA_INTERLEAVE, dma->cap_mask);
- dma->directions = BIT(write ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV);
+ dma->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
dma->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
dma->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
dma->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR;
@@ -822,6 +837,7 @@ static int dw_edma_channel_setup(struct dw_edma *dw, bool write,
dma->dev = chip->dev;
dma->device_alloc_chan_resources = dw_edma_alloc_chan_resources;
dma->device_free_chan_resources = dw_edma_free_chan_resources;
+ dma->device_caps = dw_edma_device_caps;
dma->device_config = dw_edma_device_config;
dma->device_pause = dw_edma_device_pause;
dma->device_resume = dw_edma_device_resume;
@@ -835,9 +851,7 @@ static int dw_edma_channel_setup(struct dw_edma *dw, bool write,
dma_set_max_seg_size(dma->dev, U32_MAX);
/* Register DMA device */
- err = dma_async_device_register(dma);
-
- return err;
+ return dma_async_device_register(dma);
}
static inline void dw_edma_dec_irq_alloc(int *nr_irqs, u32 *alloc, u16 cnt)
@@ -982,13 +996,8 @@ int dw_edma_probe(struct dw_edma_chip *chip)
if (err)
return err;
- /* Setup write channels */
- err = dw_edma_channel_setup(dw, true, wr_alloc, rd_alloc);
- if (err)
- goto err_irq_free;
-
- /* Setup read channels */
- err = dw_edma_channel_setup(dw, false, wr_alloc, rd_alloc);
+ /* Setup write/read channels */
+ err = dw_edma_channel_setup(dw, wr_alloc, rd_alloc);
if (err)
goto err_irq_free;
@@ -1022,15 +1031,8 @@ int dw_edma_remove(struct dw_edma_chip *chip)
free_irq(chip->ops->irq_vector(dev, i), &dw->irq[i]);
/* Deregister eDMA device */
- dma_async_device_unregister(&dw->wr_edma);
- list_for_each_entry_safe(chan, _chan, &dw->wr_edma.channels,
- vc.chan.device_node) {
- tasklet_kill(&chan->vc.task);
- list_del(&chan->vc.chan.device_node);
- }
-
- dma_async_device_unregister(&dw->rd_edma);
- list_for_each_entry_safe(chan, _chan, &dw->rd_edma.channels,
+ dma_async_device_unregister(&dw->dma);
+ list_for_each_entry_safe(chan, _chan, &dw->dma.channels,
vc.chan.device_node) {
tasklet_kill(&chan->vc.task);
list_del(&chan->vc.chan.device_node);
@@ -98,10 +98,9 @@ struct dw_edma_irq {
struct dw_edma {
char name[20];
- struct dma_device wr_edma;
- u16 wr_ch_cnt;
+ struct dma_device dma;
- struct dma_device rd_edma;
+ u16 wr_ch_cnt;
u16 rd_ch_cnt;
struct dw_edma_irq *irq;