static void ls2x_dma_issue_pending(struct dma_chan *chan)
{
struct ls2x_dma_chan *lchan = to_ldma_chan(chan);
- unsigned long flags;
- spin_lock_irqsave(&lchan->vchan.lock, flags);
+ guard(spinlock_irqsave)(&lchan->vchan.lock);
+
if (vchan_issue_pending(&lchan->vchan) && !lchan->desc)
ls2x_dma_start_transfer(lchan);
- spin_unlock_irqrestore(&lchan->vchan.lock, flags);
}
/*
static int ls2x_dma_terminate_all(struct dma_chan *chan)
{
struct ls2x_dma_chan *lchan = to_ldma_chan(chan);
- unsigned long flags;
LIST_HEAD(head);
- spin_lock_irqsave(&lchan->vchan.lock, flags);
- /* Setting stop cmd */
- ls2x_dma_write_cmd(lchan, LDMA_STOP);
- if (lchan->desc) {
- vchan_terminate_vdesc(&lchan->desc->vdesc);
- lchan->desc = NULL;
- }
+ scoped_guard(spinlock_irqsave, &lchan->vchan.lock) {
+ /* Setting stop cmd */
+ ls2x_dma_write_cmd(lchan, LDMA_STOP);
+ if (lchan->desc) {
+ vchan_terminate_vdesc(&lchan->desc->vdesc);
+ lchan->desc = NULL;
+ }
- vchan_get_all_descriptors(&lchan->vchan, &head);
- spin_unlock_irqrestore(&lchan->vchan.lock, flags);
+ vchan_get_all_descriptors(&lchan->vchan, &head);
+ }
vchan_dma_desc_free_list(&lchan->vchan, &head);
return 0;
static int ls2x_dma_pause(struct dma_chan *chan)
{
struct ls2x_dma_chan *lchan = to_ldma_chan(chan);
- unsigned long flags;
- spin_lock_irqsave(&lchan->vchan.lock, flags);
+ guard(spinlock_irqsave)(&lchan->vchan.lock);
+
if (lchan->desc && lchan->desc->status == DMA_IN_PROGRESS) {
ls2x_dma_write_cmd(lchan, LDMA_STOP);
lchan->desc->status = DMA_PAUSED;
}
- spin_unlock_irqrestore(&lchan->vchan.lock, flags);
return 0;
}
static int ls2x_dma_resume(struct dma_chan *chan)
{
struct ls2x_dma_chan *lchan = to_ldma_chan(chan);
- unsigned long flags;
- spin_lock_irqsave(&lchan->vchan.lock, flags);
+ guard(spinlock_irqsave)(&lchan->vchan.lock);
+
if (lchan->desc && lchan->desc->status == DMA_PAUSED) {
lchan->desc->status = DMA_IN_PROGRESS;
ls2x_dma_write_cmd(lchan, LDMA_START);
}
- spin_unlock_irqrestore(&lchan->vchan.lock, flags);
return 0;
}
struct ls2x_dma_chan *lchan = dev_id;
struct ls2x_dma_desc *desc;
- spin_lock(&lchan->vchan.lock);
- desc = lchan->desc;
- if (desc) {
- if (desc->cyclic) {
- vchan_cyclic_callback(&desc->vdesc);
- } else {
- desc->status = DMA_COMPLETE;
- vchan_cookie_complete(&desc->vdesc);
- ls2x_dma_start_transfer(lchan);
+ scoped_guard(spinlock, &lchan->vchan.lock) {
+ desc = lchan->desc;
+ if (desc) {
+ if (desc->cyclic) {
+ vchan_cyclic_callback(&desc->vdesc);
+ } else {
+ desc->status = DMA_COMPLETE;
+ vchan_cookie_complete(&desc->vdesc);
+ ls2x_dma_start_transfer(lchan);
+ }
+
+ /* ls2x_dma_start_transfer() updates lchan->desc */
+ if (!lchan->desc)
+ ls2x_dma_write_cmd(lchan, LDMA_STOP);
}
-
- /* ls2x_dma_start_transfer() updates lchan->desc */
- if (!lchan->desc)
- ls2x_dma_write_cmd(lchan, LDMA_STOP);
}
- spin_unlock(&lchan->vchan.lock);
return IRQ_HANDLED;
}