Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f2889fee authored by Vinod Koul's avatar Vinod Koul
Browse files

Merge branch 'next' into for-linus

parents 6221f222 aecb7b64
Loading
Loading
Loading
Loading
+7 −0
Original line number Diff line number Diff line
@@ -5401,6 +5401,13 @@ L: linux-serial@vger.kernel.org
S:	Maintained
F:	drivers/tty/serial

SYNOPSYS DESIGNWARE DMAC DRIVER
M:	Viresh Kumar <viresh.kumar@st.com>
S:	Maintained
F:	include/linux/dw_dmac.h
F:	drivers/dma/dw_dmac_regs.h
F:	drivers/dma/dw_dmac.c

TIMEKEEPING, NTP
M:	John Stultz <johnstul@us.ibm.com>
M:	Thomas Gleixner <tglx@linutronix.de>
+3 −1
Original line number Diff line number Diff line
@@ -17,6 +17,9 @@ obj-$(CONFIG_SFI) += sfi/
# was used and do nothing if so
obj-$(CONFIG_PNP)		+= pnp/
obj-$(CONFIG_ARM_AMBA)		+= amba/
# Many drivers will want to use DMA so this has to be made available
# really early.
obj-$(CONFIG_DMA_ENGINE)	+= dma/

obj-$(CONFIG_VIRTIO)		+= virtio/
obj-$(CONFIG_XEN)		+= xen/
@@ -92,7 +95,6 @@ obj-$(CONFIG_EISA) += eisa/
obj-y				+= lguest/
obj-$(CONFIG_CPU_FREQ)		+= cpufreq/
obj-$(CONFIG_CPU_IDLE)		+= cpuidle/
obj-$(CONFIG_DMA_ENGINE)	+= dma/
obj-$(CONFIG_MMC)		+= mmc/
obj-$(CONFIG_MEMSTICK)		+= memstick/
obj-$(CONFIG_NEW_LEDS)		+= leds/
+7 −5
Original line number Diff line number Diff line
@@ -200,16 +200,18 @@ config PL330_DMA
	  platform_data for a dma-pl330 device.

config PCH_DMA
	tristate "Intel EG20T PCH / OKI SEMICONDUCTOR ML7213 IOH DMA support"
	tristate "Intel EG20T PCH / OKI Semi IOH(ML7213/ML7223) DMA support"
	depends on PCI && X86
	select DMA_ENGINE
	help
	  Enable support for Intel EG20T PCH DMA engine.

	  This driver also can be used for OKI SEMICONDUCTOR ML7213 IOH(Input/
	  Output Hub) which is for IVI(In-Vehicle Infotainment) use.
	  ML7213 is companion chip for Intel Atom E6xx series.
	  ML7213 is completely compatible for Intel EG20T PCH.
	  This driver also can be used for OKI SEMICONDUCTOR IOH(Input/
	  Output Hub), ML7213 and ML7223.
	  ML7213 IOH is for IVI(In-Vehicle Infotainment) use and ML7223 IOH is
	  for MP(Media Phone) use.
	  ML7213/ML7223 is companion chip for Intel Atom E6xx series.
	  ML7213/ML7223 is completely compatible for Intel EG20T PCH.

config IMX_SDMA
	tristate "i.MX SDMA support"
+291 −85
Original line number Diff line number Diff line
@@ -37,8 +37,8 @@

#define	ATC_DEFAULT_CFG		(ATC_FIFOCFG_HALFFIFO)
#define	ATC_DEFAULT_CTRLA	(0)
#define	ATC_DEFAULT_CTRLB	(ATC_SIF(0)	\
				|ATC_DIF(1))
#define	ATC_DEFAULT_CTRLB	(ATC_SIF(AT_DMA_MEM_IF) \
				|ATC_DIF(AT_DMA_MEM_IF))

/*
 * Initial number of descriptors to allocate for each channel. This could
@@ -164,6 +164,29 @@ static void atc_desc_put(struct at_dma_chan *atchan, struct at_desc *desc)
	}
}

/**
 * atc_desc_chain - build chain adding a descripor
 * @first: address of first descripor of the chain
 * @prev: address of previous descripor of the chain
 * @desc: descriptor to queue
 *
 * Called from prep_* functions
 */
static void atc_desc_chain(struct at_desc **first, struct at_desc **prev,
			   struct at_desc *desc)
{
	if (!(*first)) {
		*first = desc;
	} else {
		/* inform the HW lli about chaining */
		(*prev)->lli.dscr = desc->txd.phys;
		/* insert the link descriptor to the LD ring */
		list_add_tail(&desc->desc_node,
				&(*first)->tx_list);
	}
	*prev = desc;
}

/**
 * atc_assign_cookie - compute and assign new cookie
 * @atchan: channel we work on
@@ -237,16 +260,12 @@ static void atc_dostart(struct at_dma_chan *atchan, struct at_desc *first)
static void
atc_chain_complete(struct at_dma_chan *atchan, struct at_desc *desc)
{
	dma_async_tx_callback		callback;
	void				*param;
	struct dma_async_tx_descriptor	*txd = &desc->txd;

	dev_vdbg(chan2dev(&atchan->chan_common),
		"descriptor %u complete\n", txd->cookie);

	atchan->completed_cookie = txd->cookie;
	callback = txd->callback;
	param = txd->callback_param;

	/* move children to free_list */
	list_splice_init(&desc->tx_list, &atchan->free_list);
@@ -278,12 +297,19 @@ atc_chain_complete(struct at_dma_chan *atchan, struct at_desc *desc)
		}
	}

	/* for cyclic transfers,
	 * no need to replay callback function while stopping */
	if (!test_bit(ATC_IS_CYCLIC, &atchan->status)) {
		dma_async_tx_callback	callback = txd->callback;
		void			*param = txd->callback_param;

		/*
		 * The API requires that no submissions are done from a
		 * callback, so we don't need to drop the lock here
		 */
		if (callback)
			callback(param);
	}

	dma_run_dependencies(txd);
}
@@ -419,6 +445,26 @@ static void atc_handle_error(struct at_dma_chan *atchan)
	atc_chain_complete(atchan, bad_desc);
}

/**
 * atc_handle_cyclic - at the end of a period, run callback function
 * @atchan: channel used for cyclic operations
 *
 * Called with atchan->lock held and bh disabled
 */
static void atc_handle_cyclic(struct at_dma_chan *atchan)
{
	struct at_desc			*first = atc_first_active(atchan);
	struct dma_async_tx_descriptor	*txd = &first->txd;
	dma_async_tx_callback		callback = txd->callback;
	void				*param = txd->callback_param;

	dev_vdbg(chan2dev(&atchan->chan_common),
			"new cyclic period llp 0x%08x\n",
			channel_readl(atchan, DSCR));

	if (callback)
		callback(param);
}

/*--  IRQ & Tasklet  ---------------------------------------------------*/

@@ -426,16 +472,11 @@ static void atc_tasklet(unsigned long data)
{
	struct at_dma_chan *atchan = (struct at_dma_chan *)data;

	/* Channel cannot be enabled here */
	if (atc_chan_is_enabled(atchan)) {
		dev_err(chan2dev(&atchan->chan_common),
			"BUG: channel enabled in tasklet\n");
		return;
	}

	spin_lock(&atchan->lock);
	if (test_and_clear_bit(0, &atchan->error_status))
	if (test_and_clear_bit(ATC_IS_ERROR, &atchan->status))
		atc_handle_error(atchan);
	else if (test_bit(ATC_IS_CYCLIC, &atchan->status))
		atc_handle_cyclic(atchan);
	else
		atc_advance_work(atchan);

@@ -464,12 +505,13 @@ static irqreturn_t at_dma_interrupt(int irq, void *dev_id)

		for (i = 0; i < atdma->dma_common.chancnt; i++) {
			atchan = &atdma->chan[i];
			if (pending & (AT_DMA_CBTC(i) | AT_DMA_ERR(i))) {
			if (pending & (AT_DMA_BTC(i) | AT_DMA_ERR(i))) {
				if (pending & AT_DMA_ERR(i)) {
					/* Disable channel on AHB error */
					dma_writel(atdma, CHDR, atchan->mask);
					dma_writel(atdma, CHDR,
						AT_DMA_RES(i) | atchan->mask);
					/* Give information to tasklet */
					set_bit(0, &atchan->error_status);
					set_bit(ATC_IS_ERROR, &atchan->status);
				}
				tasklet_schedule(&atchan->tasklet);
				ret = IRQ_HANDLED;
@@ -549,7 +591,7 @@ atc_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
	}

	ctrla =   ATC_DEFAULT_CTRLA;
	ctrlb =   ATC_DEFAULT_CTRLB
	ctrlb =   ATC_DEFAULT_CTRLB | ATC_IEN
		| ATC_SRC_ADDR_MODE_INCR
		| ATC_DST_ADDR_MODE_INCR
		| ATC_FC_MEM2MEM;
@@ -584,16 +626,7 @@ atc_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,

		desc->txd.cookie = 0;

		if (!first) {
			first = desc;
		} else {
			/* inform the HW lli about chaining */
			prev->lli.dscr = desc->txd.phys;
			/* insert the link descriptor to the LD ring */
			list_add_tail(&desc->desc_node,
					&first->tx_list);
		}
		prev = desc;
		atc_desc_chain(&first, &prev, desc);
	}

	/* First descriptor of the chain embedds additional information */
@@ -639,7 +672,8 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
	struct scatterlist	*sg;
	size_t			total_len = 0;

	dev_vdbg(chan2dev(chan), "prep_slave_sg: %s f0x%lx\n",
	dev_vdbg(chan2dev(chan), "prep_slave_sg (%d): %s f0x%lx\n",
			sg_len,
			direction == DMA_TO_DEVICE ? "TO DEVICE" : "FROM DEVICE",
			flags);

@@ -651,14 +685,15 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
	reg_width = atslave->reg_width;

	ctrla = ATC_DEFAULT_CTRLA | atslave->ctrla;
	ctrlb = ATC_DEFAULT_CTRLB | ATC_IEN;
	ctrlb = ATC_IEN;

	switch (direction) {
	case DMA_TO_DEVICE:
		ctrla |=  ATC_DST_WIDTH(reg_width);
		ctrlb |=  ATC_DST_ADDR_MODE_FIXED
			| ATC_SRC_ADDR_MODE_INCR
			| ATC_FC_MEM2PER;
			| ATC_FC_MEM2PER
			| ATC_SIF(AT_DMA_MEM_IF) | ATC_DIF(AT_DMA_PER_IF);
		reg = atslave->tx_reg;
		for_each_sg(sgl, sg, sg_len, i) {
			struct at_desc	*desc;
@@ -682,16 +717,7 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
					| len >> mem_width;
			desc->lli.ctrlb = ctrlb;

			if (!first) {
				first = desc;
			} else {
				/* inform the HW lli about chaining */
				prev->lli.dscr = desc->txd.phys;
				/* insert the link descriptor to the LD ring */
				list_add_tail(&desc->desc_node,
						&first->tx_list);
			}
			prev = desc;
			atc_desc_chain(&first, &prev, desc);
			total_len += len;
		}
		break;
@@ -699,7 +725,8 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
		ctrla |=  ATC_SRC_WIDTH(reg_width);
		ctrlb |=  ATC_DST_ADDR_MODE_INCR
			| ATC_SRC_ADDR_MODE_FIXED
			| ATC_FC_PER2MEM;
			| ATC_FC_PER2MEM
			| ATC_SIF(AT_DMA_PER_IF) | ATC_DIF(AT_DMA_MEM_IF);

		reg = atslave->rx_reg;
		for_each_sg(sgl, sg, sg_len, i) {
@@ -724,16 +751,7 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
					| len >> reg_width;
			desc->lli.ctrlb = ctrlb;

			if (!first) {
				first = desc;
			} else {
				/* inform the HW lli about chaining */
				prev->lli.dscr = desc->txd.phys;
				/* insert the link descriptor to the LD ring */
				list_add_tail(&desc->desc_node,
						&first->tx_list);
			}
			prev = desc;
			atc_desc_chain(&first, &prev, desc);
			total_len += len;
		}
		break;
@@ -759,18 +777,180 @@ atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
	return NULL;
}

/**
 * atc_dma_cyclic_check_values
 * Check for too big/unaligned periods and unaligned DMA buffer
 */
static int
atc_dma_cyclic_check_values(unsigned int reg_width, dma_addr_t buf_addr,
		size_t period_len, enum dma_data_direction direction)
{
	if (period_len > (ATC_BTSIZE_MAX << reg_width))
		goto err_out;
	if (unlikely(period_len & ((1 << reg_width) - 1)))
		goto err_out;
	if (unlikely(buf_addr & ((1 << reg_width) - 1)))
		goto err_out;
	if (unlikely(!(direction & (DMA_TO_DEVICE | DMA_FROM_DEVICE))))
		goto err_out;

	return 0;

err_out:
	return -EINVAL;
}

/**
 * atc_dma_cyclic_fill_desc - Fill one period decriptor
 */
static int
atc_dma_cyclic_fill_desc(struct at_dma_slave *atslave, struct at_desc *desc,
		unsigned int period_index, dma_addr_t buf_addr,
		size_t period_len, enum dma_data_direction direction)
{
	u32		ctrla;
	unsigned int	reg_width = atslave->reg_width;

	/* prepare common CRTLA value */
	ctrla =   ATC_DEFAULT_CTRLA | atslave->ctrla
		| ATC_DST_WIDTH(reg_width)
		| ATC_SRC_WIDTH(reg_width)
		| period_len >> reg_width;

	switch (direction) {
	case DMA_TO_DEVICE:
		desc->lli.saddr = buf_addr + (period_len * period_index);
		desc->lli.daddr = atslave->tx_reg;
		desc->lli.ctrla = ctrla;
		desc->lli.ctrlb = ATC_DST_ADDR_MODE_FIXED
				| ATC_SRC_ADDR_MODE_INCR
				| ATC_FC_MEM2PER
				| ATC_SIF(AT_DMA_MEM_IF)
				| ATC_DIF(AT_DMA_PER_IF);
		break;

	case DMA_FROM_DEVICE:
		desc->lli.saddr = atslave->rx_reg;
		desc->lli.daddr = buf_addr + (period_len * period_index);
		desc->lli.ctrla = ctrla;
		desc->lli.ctrlb = ATC_DST_ADDR_MODE_INCR
				| ATC_SRC_ADDR_MODE_FIXED
				| ATC_FC_PER2MEM
				| ATC_SIF(AT_DMA_PER_IF)
				| ATC_DIF(AT_DMA_MEM_IF);
		break;

	default:
		return -EINVAL;
	}

	return 0;
}

/**
 * atc_prep_dma_cyclic - prepare the cyclic DMA transfer
 * @chan: the DMA channel to prepare
 * @buf_addr: physical DMA address where the buffer starts
 * @buf_len: total number of bytes for the entire buffer
 * @period_len: number of bytes for each period
 * @direction: transfer direction, to or from device
 */
static struct dma_async_tx_descriptor *
atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
		size_t period_len, enum dma_data_direction direction)
{
	struct at_dma_chan	*atchan = to_at_dma_chan(chan);
	struct at_dma_slave	*atslave = chan->private;
	struct at_desc		*first = NULL;
	struct at_desc		*prev = NULL;
	unsigned long		was_cyclic;
	unsigned int		periods = buf_len / period_len;
	unsigned int		i;

	dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@0x%08x - %d (%d/%d)\n",
			direction == DMA_TO_DEVICE ? "TO DEVICE" : "FROM DEVICE",
			buf_addr,
			periods, buf_len, period_len);

	if (unlikely(!atslave || !buf_len || !period_len)) {
		dev_dbg(chan2dev(chan), "prep_dma_cyclic: length is zero!\n");
		return NULL;
	}

	was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status);
	if (was_cyclic) {
		dev_dbg(chan2dev(chan), "prep_dma_cyclic: channel in use!\n");
		return NULL;
	}

	/* Check for too big/unaligned periods and unaligned DMA buffer */
	if (atc_dma_cyclic_check_values(atslave->reg_width, buf_addr,
					period_len, direction))
		goto err_out;

	/* build cyclic linked list */
	for (i = 0; i < periods; i++) {
		struct at_desc	*desc;

		desc = atc_desc_get(atchan);
		if (!desc)
			goto err_desc_get;

		if (atc_dma_cyclic_fill_desc(atslave, desc, i, buf_addr,
						period_len, direction))
			goto err_desc_get;

		atc_desc_chain(&first, &prev, desc);
	}

	/* lets make a cyclic list */
	prev->lli.dscr = first->txd.phys;

	/* First descriptor of the chain embedds additional information */
	first->txd.cookie = -EBUSY;
	first->len = buf_len;

	return &first->txd;

err_desc_get:
	dev_err(chan2dev(chan), "not enough descriptors available\n");
	atc_desc_put(atchan, first);
err_out:
	clear_bit(ATC_IS_CYCLIC, &atchan->status);
	return NULL;
}


static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
		       unsigned long arg)
{
	struct at_dma_chan	*atchan = to_at_dma_chan(chan);
	struct at_dma		*atdma = to_at_dma(chan->device);
	struct at_desc		*desc, *_desc;
	int			chan_id = atchan->chan_common.chan_id;

	LIST_HEAD(list);

	/* Only supports DMA_TERMINATE_ALL */
	if (cmd != DMA_TERMINATE_ALL)
		return -ENXIO;
	dev_vdbg(chan2dev(chan), "atc_control (%d)\n", cmd);

	if (cmd == DMA_PAUSE) {
		spin_lock_bh(&atchan->lock);

		dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id));
		set_bit(ATC_IS_PAUSED, &atchan->status);

		spin_unlock_bh(&atchan->lock);
	} else if (cmd == DMA_RESUME) {
		if (!test_bit(ATC_IS_PAUSED, &atchan->status))
			return 0;

		spin_lock_bh(&atchan->lock);

		dma_writel(atdma, CHDR, AT_DMA_RES(chan_id));
		clear_bit(ATC_IS_PAUSED, &atchan->status);

		spin_unlock_bh(&atchan->lock);
	} else if (cmd == DMA_TERMINATE_ALL) {
		struct at_desc	*desc, *_desc;
		/*
		 * This is only called when something went wrong elsewhere, so
		 * we don't really care about the data. Just disable the
@@ -779,7 +959,8 @@ static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
		 */
		spin_lock_bh(&atchan->lock);

	dma_writel(atdma, CHDR, atchan->mask);
		/* disabling channel: must also remove suspend state */
		dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);

		/* confirm that this channel is disabled */
		while (dma_readl(atdma, CHSR) & atchan->mask)
@@ -793,7 +974,14 @@ static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
		list_for_each_entry_safe(desc, _desc, &list, desc_node)
			atc_chain_complete(atchan, desc);

		clear_bit(ATC_IS_PAUSED, &atchan->status);
		/* if channel dedicated to cyclic operations, free it */
		clear_bit(ATC_IS_CYCLIC, &atchan->status);

		spin_unlock_bh(&atchan->lock);
	} else {
		return -ENXIO;
	}

	return 0;
}
@@ -835,9 +1023,17 @@ atc_tx_status(struct dma_chan *chan,

	spin_unlock_bh(&atchan->lock);

	if (ret != DMA_SUCCESS)
		dma_set_tx_state(txstate, last_complete, last_used,
			atc_first_active(atchan)->len);
	else
		dma_set_tx_state(txstate, last_complete, last_used, 0);
	dev_vdbg(chan2dev(chan), "tx_status: %d (d%d, u%d)\n",
		 cookie, last_complete ? last_complete : 0,

	if (test_bit(ATC_IS_PAUSED, &atchan->status))
		ret = DMA_PAUSED;

	dev_vdbg(chan2dev(chan), "tx_status %d: cookie = %d (d%d, u%d)\n",
		 ret, cookie, last_complete ? last_complete : 0,
		 last_used ? last_used : 0);

	return ret;
@@ -853,6 +1049,10 @@ static void atc_issue_pending(struct dma_chan *chan)

	dev_vdbg(chan2dev(chan), "issue_pending\n");

	/* Not needed for cyclic transfers */
	if (test_bit(ATC_IS_CYCLIC, &atchan->status))
		return;

	spin_lock_bh(&atchan->lock);
	if (!atc_chan_is_enabled(atchan)) {
		atc_advance_work(atchan);
@@ -959,6 +1159,7 @@ static void atc_free_chan_resources(struct dma_chan *chan)
	}
	list_splice_init(&atchan->free_list, &list);
	atchan->descs_allocated = 0;
	atchan->status = 0;

	dev_vdbg(chan2dev(chan), "free_chan_resources: done\n");
}
@@ -1092,10 +1293,15 @@ static int __init at_dma_probe(struct platform_device *pdev)
	if (dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask))
		atdma->dma_common.device_prep_dma_memcpy = atc_prep_dma_memcpy;

	if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask)) {
	if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask))
		atdma->dma_common.device_prep_slave_sg = atc_prep_slave_sg;

	if (dma_has_cap(DMA_CYCLIC, atdma->dma_common.cap_mask))
		atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic;

	if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask) ||
	    dma_has_cap(DMA_CYCLIC, atdma->dma_common.cap_mask))
		atdma->dma_common.device_control = atc_control;
	}

	dma_writel(atdma, EN, AT_DMA_ENABLE);

+25 −5
Original line number Diff line number Diff line
@@ -103,6 +103,10 @@
/* Bitfields in CTRLB */
#define	ATC_SIF(i)		(0x3 & (i))	/* Src tx done via AHB-Lite Interface i */
#define	ATC_DIF(i)		((0x3 & (i)) <<  4)	/* Dst tx done via AHB-Lite Interface i */
				  /* Specify AHB interfaces */
#define AT_DMA_MEM_IF		0 /* interface 0 as memory interface */
#define AT_DMA_PER_IF		1 /* interface 1 as peripheral interface */

#define	ATC_SRC_PIP		(0x1 <<  8)	/* Source Picture-in-Picture enabled */
#define	ATC_DST_PIP		(0x1 << 12)	/* Destination Picture-in-Picture enabled */
#define	ATC_SRC_DSCR_DIS	(0x1 << 16)	/* Src Descriptor fetch disable */
@@ -180,13 +184,24 @@ txd_to_at_desc(struct dma_async_tx_descriptor *txd)

/*--  Channels  --------------------------------------------------------*/

/**
 * atc_status - information bits stored in channel status flag
 *
 * Manipulated with atomic operations.
 */
enum atc_status {
	ATC_IS_ERROR = 0,
	ATC_IS_PAUSED = 1,
	ATC_IS_CYCLIC = 24,
};

/**
 * struct at_dma_chan - internal representation of an Atmel HDMAC channel
 * @chan_common: common dmaengine channel object members
 * @device: parent device
 * @ch_regs: memory mapped register base
 * @mask: channel index in a mask
 * @error_status: transmit error status information from irq handler
 * @status: transmit status information from irq/prep* functions
 *                to tasklet (use atomic operations)
 * @tasklet: bottom half to finish transaction work
 * @lock: serializes enqueue/dequeue operations to descriptors lists
@@ -201,7 +216,7 @@ struct at_dma_chan {
	struct at_dma		*device;
	void __iomem		*ch_regs;
	u8			mask;
	unsigned long		error_status;
	unsigned long		status;
	struct tasklet_struct	tasklet;

	spinlock_t		lock;
@@ -309,8 +324,8 @@ static void atc_setup_irq(struct at_dma_chan *atchan, int on)
	struct at_dma	*atdma = to_at_dma(atchan->chan_common.device);
	u32		ebci;

	/* enable interrupts on buffer chain completion & error */
	ebci =    AT_DMA_CBTC(atchan->chan_common.chan_id)
	/* enable interrupts on buffer transfer completion & error */
	ebci =    AT_DMA_BTC(atchan->chan_common.chan_id)
		| AT_DMA_ERR(atchan->chan_common.chan_id);
	if (on)
		dma_writel(atdma, EBCIER, ebci);
@@ -347,7 +362,12 @@ static inline int atc_chan_is_enabled(struct at_dma_chan *atchan)
 */
static void set_desc_eol(struct at_desc *desc)
{
	desc->lli.ctrlb |= ATC_SRC_DSCR_DIS | ATC_DST_DSCR_DIS;
	u32 ctrlb = desc->lli.ctrlb;

	ctrlb &= ~ATC_IEN;
	ctrlb |= ATC_SRC_DSCR_DIS | ATC_DST_DSCR_DIS;

	desc->lli.ctrlb = ctrlb;
	desc->lli.dscr = 0;
}

Loading