On 27-10-20, 14:38, Sia Jee Heng wrote:
> Add support for device_prep_dma_cyclic() callback function to benefit
> DMA cyclic client, for example ALSA.
> 
> Existing AxiDMA driver only support data transfer between memory to memory.
> Data transfer between device to memory and memory to device in cyclic mode
> would failed if this interface is not supported by the AxiDMA driver.
> 
> Reviewed-by: Andy Shevchenko <[email protected]>
> Signed-off-by: Sia Jee Heng <[email protected]>
> ---
>  .../dma/dw-axi-dmac/dw-axi-dmac-platform.c    | 182 +++++++++++++++++-
>  drivers/dma/dw-axi-dmac/dw-axi-dmac.h         |   2 +
>  2 files changed, 177 insertions(+), 7 deletions(-)
> 
> diff --git a/drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 
> b/drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
> index 1124c97025f2..9e574753aaf0 100644
> --- a/drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
> +++ b/drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
> @@ -15,6 +15,8 @@
>  #include <linux/err.h>
>  #include <linux/interrupt.h>
>  #include <linux/io.h>
> +#include <linux/iopoll.h>
> +#include <linux/io-64-nonatomic-lo-hi.h>
>  #include <linux/kernel.h>
>  #include <linux/module.h>
>  #include <linux/of.h>
> @@ -575,6 +577,135 @@ dma_chan_prep_dma_memcpy(struct dma_chan *dchan, 
> dma_addr_t dst_adr,
>       return NULL;
>  }
>  
> +static struct dma_async_tx_descriptor *
> +dw_axi_dma_chan_prep_cyclic(struct dma_chan *dchan, dma_addr_t dma_addr,
> +                         size_t buf_len, size_t period_len,
> +                         enum dma_transfer_direction direction,
> +                         unsigned long flags)
> +{
> +     struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan);
> +     u32 data_width = BIT(chan->chip->dw->hdata->m_data_width);
> +     struct axi_dma_hw_desc *hw_desc = NULL;
> +     struct axi_dma_desc *desc = NULL;
> +     dma_addr_t src_addr = dma_addr;
> +     u32 num_periods = buf_len / period_len;
> +     unsigned int reg_width;
> +     unsigned int mem_width;
> +     dma_addr_t reg;
> +     unsigned int i;
> +     u32 ctllo, ctlhi;
> +     size_t block_ts;
> +     u64 llp = 0;
> +     u8 lms = 0; /* Select AXI0 master for LLI fetching */
> +
> +     block_ts = chan->chip->dw->hdata->block_size[chan->id];
> +
> +     mem_width = __ffs(data_width | dma_addr | period_len);
> +     if (mem_width > DWAXIDMAC_TRANS_WIDTH_32)
> +             mem_width = DWAXIDMAC_TRANS_WIDTH_32;
> +
> +     desc = axi_desc_alloc(num_periods);
> +     if (unlikely(!desc))
> +             goto err_desc_get;
> +
> +     chan->direction = direction;
> +     desc->chan = chan;
> +     chan->cyclic = true;
> +
> +     switch (direction) {
> +     case DMA_MEM_TO_DEV:
> +             reg_width = __ffs(chan->config.dst_addr_width);
> +             reg = chan->config.dst_addr;
> +             ctllo = reg_width << CH_CTL_L_DST_WIDTH_POS |
> +                     DWAXIDMAC_CH_CTL_L_NOINC << CH_CTL_L_DST_INC_POS |
> +                     DWAXIDMAC_CH_CTL_L_INC << CH_CTL_L_SRC_INC_POS;
> +             break;
> +     case DMA_DEV_TO_MEM:
> +             reg_width = __ffs(chan->config.src_addr_width);
> +             reg = chan->config.src_addr;
> +             ctllo = reg_width << CH_CTL_L_SRC_WIDTH_POS |
> +                     DWAXIDMAC_CH_CTL_L_INC << CH_CTL_L_DST_INC_POS |
> +                     DWAXIDMAC_CH_CTL_L_NOINC << CH_CTL_L_SRC_INC_POS;
> +             break;
> +     default:
> +             return NULL;
> +     }
> +
> +     for (i = 0; i < num_periods; i++) {
> +             hw_desc = &desc->hw_desc[i];
> +
> +             hw_desc->lli = axi_desc_get(chan, &hw_desc->llp);
> +             if (unlikely(!hw_desc->lli))
> +                     goto err_desc_get;
> +
> +             if (direction == DMA_MEM_TO_DEV)
> +                     block_ts = period_len >> mem_width;
> +             else
> +                     block_ts = period_len >> reg_width;
> +
> +             ctlhi = CH_CTL_H_LLI_VALID;
> +             if (chan->chip->dw->hdata->restrict_axi_burst_len) {
> +                     u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len;
> +
> +                     ctlhi |= (CH_CTL_H_ARLEN_EN |
> +                             burst_len << CH_CTL_H_ARLEN_POS |
> +                             CH_CTL_H_AWLEN_EN |
> +                             burst_len << CH_CTL_H_AWLEN_POS);
> +             }
> +
> +             hw_desc->lli->ctl_hi = cpu_to_le32(ctlhi);
> +
> +             if (direction == DMA_MEM_TO_DEV)
> +                     ctllo |= mem_width << CH_CTL_L_SRC_WIDTH_POS;
> +             else
> +                     ctllo |= mem_width << CH_CTL_L_DST_WIDTH_POS;
> +
> +             if (direction == DMA_MEM_TO_DEV) {
> +                     write_desc_sar(hw_desc, src_addr);
> +                     write_desc_dar(hw_desc, reg);
> +             } else {
> +                     write_desc_sar(hw_desc, reg);
> +                     write_desc_dar(hw_desc, src_addr);
> +             }
> +
> +             hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1);
> +
> +             ctllo |= (DWAXIDMAC_BURST_TRANS_LEN_4 << CH_CTL_L_DST_MSIZE_POS 
> |
> +                       DWAXIDMAC_BURST_TRANS_LEN_4 << 
> CH_CTL_L_SRC_MSIZE_POS);
> +             hw_desc->lli->ctl_lo = cpu_to_le32(ctllo);
> +
> +             set_desc_src_master(hw_desc);
> +
> +             /*
> +              * Set end-of-link to the linked descriptor, so that cyclic
> +              * callback function can be triggered during interrupt.
> +              */
> +             set_desc_last(hw_desc);
> +
> +             src_addr += period_len;
> +     }

apart from this bit and use of periods instead of sg_list this seems
very similar to slave handler, so can you please move common bits to
helpers and remove/reduce duplicate code

-- 
~Vinod

Reply via email to