Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit fc514460 authored by Lars-Peter Clausen's avatar Lars-Peter Clausen Committed by Vinod Koul
Browse files

dma: pl330: Fix cyclic transfers



Allocate a descriptor for each period of a cyclic transfer, not just the first.
Also since the callback needs to be called for each finished period make sure to
initialize the callback and callback_param fields of each descriptor in a cyclic
transfer.

Cc: stable@vger.kernel.org
Signed-off-by: default avatarLars-Peter Clausen <lars@metafoo.de>
Signed-off-by: default avatarVinod Koul <vinod.koul@intel.com>
parent 27abb2ff
Loading
Loading
Loading
Loading
+67 −26
Original line number Original line Diff line number Diff line
@@ -2505,6 +2505,10 @@ static dma_cookie_t pl330_tx_submit(struct dma_async_tx_descriptor *tx)
	/* Assign cookies to all nodes */
	/* Assign cookies to all nodes */
	while (!list_empty(&last->node)) {
	while (!list_empty(&last->node)) {
		desc = list_entry(last->node.next, struct dma_pl330_desc, node);
		desc = list_entry(last->node.next, struct dma_pl330_desc, node);
		if (pch->cyclic) {
			desc->txd.callback = last->txd.callback;
			desc->txd.callback_param = last->txd.callback_param;
		}


		dma_cookie_assign(&desc->txd);
		dma_cookie_assign(&desc->txd);


@@ -2688,15 +2692,43 @@ static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
		size_t period_len, enum dma_transfer_direction direction,
		size_t period_len, enum dma_transfer_direction direction,
		unsigned long flags, void *context)
		unsigned long flags, void *context)
{
{
	struct dma_pl330_desc *desc;
	struct dma_pl330_desc *desc = NULL, *first = NULL;
	struct dma_pl330_chan *pch = to_pchan(chan);
	struct dma_pl330_chan *pch = to_pchan(chan);
	struct dma_pl330_dmac *pdmac = pch->dmac;
	unsigned int i;
	dma_addr_t dst;
	dma_addr_t dst;
	dma_addr_t src;
	dma_addr_t src;


	if (len % period_len != 0)
		return NULL;

	if (!is_slave_direction(direction)) {
		dev_err(pch->dmac->pif.dev, "%s:%d Invalid dma direction\n",
		__func__, __LINE__);
		return NULL;
	}

	for (i = 0; i < len / period_len; i++) {
		desc = pl330_get_desc(pch);
		desc = pl330_get_desc(pch);
		if (!desc) {
		if (!desc) {
			dev_err(pch->dmac->pif.dev, "%s:%d Unable to fetch desc\n",
			dev_err(pch->dmac->pif.dev, "%s:%d Unable to fetch desc\n",
				__func__, __LINE__);
				__func__, __LINE__);

			if (!first)
				return NULL;

			spin_lock_irqsave(&pdmac->pool_lock, flags);

			while (!list_empty(&first->node)) {
				desc = list_entry(first->node.next,
						struct dma_pl330_desc, node);
				list_move_tail(&desc->node, &pdmac->desc_pool);
			}

			list_move_tail(&first->node, &pdmac->desc_pool);

			spin_unlock_irqrestore(&pdmac->pool_lock, flags);

			return NULL;
			return NULL;
		}
		}


@@ -2716,17 +2748,26 @@ static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
			dst = dma_addr;
			dst = dma_addr;
			break;
			break;
		default:
		default:
		dev_err(pch->dmac->pif.dev, "%s:%d Invalid dma direction\n",
			break;
		__func__, __LINE__);
		return NULL;
		}
		}


		desc->rqcfg.brst_size = pch->burst_sz;
		desc->rqcfg.brst_size = pch->burst_sz;
		desc->rqcfg.brst_len = 1;
		desc->rqcfg.brst_len = 1;
		fill_px(&desc->px, dst, src, period_len);


	pch->cyclic = true;
		if (!first)
			first = desc;
		else
			list_add_tail(&desc->node, &first->node);


	fill_px(&desc->px, dst, src, period_len);
		dma_addr += period_len;
	}

	if (!desc)
		return NULL;

	pch->cyclic = true;
	desc->txd.flags = flags;


	return &desc->txd;
	return &desc->txd;
}
}