提交 a4b0d348 编写于 作者: M Maxime Ripard 提交者: Vinod Koul

dmaengine: dw: Split device_control

Split the device_control callback of the DesignWare DMA driver to make use
of the newly introduced callbacks, that will eventually be used to retrieve
slave capabilities.
Signed-off-by: NMaxime Ripard <maxime.ripard@free-electrons.com>
Signed-off-by: NVinod Koul <vinod.koul@intel.com>
上级 1d4c0b8c
...@@ -955,8 +955,7 @@ static inline void convert_burst(u32 *maxburst) ...@@ -955,8 +955,7 @@ static inline void convert_burst(u32 *maxburst)
*maxburst = 0; *maxburst = 0;
} }
static int static int dwc_config(struct dma_chan *chan, struct dma_slave_config *sconfig)
set_runtime_config(struct dma_chan *chan, struct dma_slave_config *sconfig)
{ {
struct dw_dma_chan *dwc = to_dw_dma_chan(chan); struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
...@@ -973,16 +972,25 @@ set_runtime_config(struct dma_chan *chan, struct dma_slave_config *sconfig) ...@@ -973,16 +972,25 @@ set_runtime_config(struct dma_chan *chan, struct dma_slave_config *sconfig)
return 0; return 0;
} }
static inline void dwc_chan_pause(struct dw_dma_chan *dwc) static int dwc_pause(struct dma_chan *chan)
{ {
u32 cfglo = channel_readl(dwc, CFG_LO); struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
unsigned int count = 20; /* timeout iterations */ unsigned long flags;
unsigned int count = 20; /* timeout iterations */
u32 cfglo;
spin_lock_irqsave(&dwc->lock, flags);
cfglo = channel_readl(dwc, CFG_LO);
channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP); channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP);
while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--)
udelay(2); udelay(2);
dwc->paused = true; dwc->paused = true;
spin_unlock_irqrestore(&dwc->lock, flags);
return 0;
} }
static inline void dwc_chan_resume(struct dw_dma_chan *dwc) static inline void dwc_chan_resume(struct dw_dma_chan *dwc)
...@@ -994,53 +1002,48 @@ static inline void dwc_chan_resume(struct dw_dma_chan *dwc) ...@@ -994,53 +1002,48 @@ static inline void dwc_chan_resume(struct dw_dma_chan *dwc)
dwc->paused = false; dwc->paused = false;
} }
static int dwc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd, static int dwc_resume(struct dma_chan *chan)
unsigned long arg)
{ {
struct dw_dma_chan *dwc = to_dw_dma_chan(chan); struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
struct dw_dma *dw = to_dw_dma(chan->device);
struct dw_desc *desc, *_desc;
unsigned long flags; unsigned long flags;
LIST_HEAD(list);
if (cmd == DMA_PAUSE) { if (!dwc->paused)
spin_lock_irqsave(&dwc->lock, flags); return 0;
dwc_chan_pause(dwc); spin_lock_irqsave(&dwc->lock, flags);
spin_unlock_irqrestore(&dwc->lock, flags); dwc_chan_resume(dwc);
} else if (cmd == DMA_RESUME) {
if (!dwc->paused)
return 0;
spin_lock_irqsave(&dwc->lock, flags); spin_unlock_irqrestore(&dwc->lock, flags);
dwc_chan_resume(dwc); return 0;
}
spin_unlock_irqrestore(&dwc->lock, flags); static int dwc_terminate_all(struct dma_chan *chan)
} else if (cmd == DMA_TERMINATE_ALL) { {
spin_lock_irqsave(&dwc->lock, flags); struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
struct dw_dma *dw = to_dw_dma(chan->device);
struct dw_desc *desc, *_desc;
unsigned long flags;
LIST_HEAD(list);
spin_lock_irqsave(&dwc->lock, flags);
clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags);
dwc_chan_disable(dw, dwc); dwc_chan_disable(dw, dwc);
dwc_chan_resume(dwc); dwc_chan_resume(dwc);
/* active_list entries will end up before queued entries */ /* active_list entries will end up before queued entries */
list_splice_init(&dwc->queue, &list); list_splice_init(&dwc->queue, &list);
list_splice_init(&dwc->active_list, &list); list_splice_init(&dwc->active_list, &list);
spin_unlock_irqrestore(&dwc->lock, flags); spin_unlock_irqrestore(&dwc->lock, flags);
/* Flush all pending and queued descriptors */ /* Flush all pending and queued descriptors */
list_for_each_entry_safe(desc, _desc, &list, desc_node) list_for_each_entry_safe(desc, _desc, &list, desc_node)
dwc_descriptor_complete(dwc, desc, false); dwc_descriptor_complete(dwc, desc, false);
} else if (cmd == DMA_SLAVE_CONFIG) {
return set_runtime_config(chan, (struct dma_slave_config *)arg);
} else {
return -ENXIO;
}
return 0; return 0;
} }
...@@ -1659,7 +1662,10 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata) ...@@ -1659,7 +1662,10 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
dw->dma.device_prep_dma_memcpy = dwc_prep_dma_memcpy; dw->dma.device_prep_dma_memcpy = dwc_prep_dma_memcpy;
dw->dma.device_prep_slave_sg = dwc_prep_slave_sg; dw->dma.device_prep_slave_sg = dwc_prep_slave_sg;
dw->dma.device_control = dwc_control; dw->dma.device_config = dwc_config;
dw->dma.device_pause = dwc_pause;
dw->dma.device_resume = dwc_resume;
dw->dma.device_terminate_all = dwc_terminate_all;
dw->dma.device_tx_status = dwc_tx_status; dw->dma.device_tx_status = dwc_tx_status;
dw->dma.device_issue_pending = dwc_issue_pending; dw->dma.device_issue_pending = dwc_issue_pending;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册