Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit b5e55556 authored by Christoph Hellwig's avatar Christoph Hellwig Committed by Jens Axboe
Browse files

libata: switch remaining drivers to use dma_set_mask_and_coherent



Use dma_set_mask_and_coherent instead of separate dma_set_mask and
dma_set_coherent_mask calls.

Signed-off-by: default avatarChristoph Hellwig <hch@lst.de>
Signed-off-by: default avatarJens Axboe <axboe@kernel.dk>
parent dcc02c19
Loading
Loading
Loading
Loading
+1 −7
Original line number Diff line number Diff line
@@ -3147,15 +3147,9 @@ void ata_pci_bmdma_init(struct ata_host *host)
	 * ->sff_irq_clear method.  Try to initialize bmdma_addr
	 * regardless of dma masks.
	 */
	rc = dma_set_mask(&pdev->dev, ATA_DMA_MASK);
	rc = dma_set_mask_and_coherent(&pdev->dev, ATA_DMA_MASK);
	if (rc)
		ata_bmdma_nodma(host, "failed to set dma mask");
	if (!rc) {
		rc = dma_set_coherent_mask(&pdev->dev, ATA_DMA_MASK);
		if (rc)
			ata_bmdma_nodma(host,
					"failed to set consistent dma mask");
	}

	/* request and iomap DMA region */
	rc = pcim_iomap_regions(pdev, 1 << 4, dev_driver_string(gdev));
+1 −6
Original line number Diff line number Diff line
@@ -463,12 +463,7 @@ static int atp867x_ata_pci_sff_init_host(struct ata_host *host)

	atp867x_fixup(host);

	rc = dma_set_mask(&pdev->dev, ATA_DMA_MASK);
	if (rc)
		return rc;

	rc = dma_set_coherent_mask(&pdev->dev, ATA_DMA_MASK);
	return rc;
	return dma_set_mask_and_coherent(&pdev->dev, ATA_DMA_MASK);
}

static int atp867x_init_one(struct pci_dev *pdev,
+1 −5
Original line number Diff line number Diff line
@@ -155,14 +155,10 @@ static int cs5520_init_one(struct pci_dev *pdev, const struct pci_device_id *id)
		return -ENODEV;
	}

	if (dma_set_mask(&pdev->dev, DMA_BIT_MASK(32))) {
	if (dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(32))) {
		printk(KERN_ERR DRV_NAME ": unable to configure DMA mask.\n");
		return -ENODEV;
	}
	if (dma_set_coherent_mask(&pdev->dev, DMA_BIT_MASK(32))) {
		printk(KERN_ERR DRV_NAME ": unable to configure consistent DMA mask.\n");
		return -ENODEV;
	}

	/* Map IO ports and initialize host accordingly */
	iomap[0] = devm_ioport_map(&pdev->dev, cmd_port[0], 8);
+1 −4
Original line number Diff line number Diff line
@@ -221,10 +221,7 @@ static int hpt3x3_init_one(struct pci_dev *pdev, const struct pci_device_id *id)
	if (rc)
		return rc;
	host->iomap = pcim_iomap_table(pdev);
	rc = dma_set_mask(&pdev->dev, ATA_DMA_MASK);
	if (rc)
		return rc;
	rc = dma_set_coherent_mask(&pdev->dev, ATA_DMA_MASK);
	rc = dma_set_mask_and_coherent(&pdev->dev, ATA_DMA_MASK);
	if (rc)
		return rc;

+1 −4
Original line number Diff line number Diff line
@@ -123,10 +123,7 @@ static int ninja32_init_one(struct pci_dev *dev, const struct pci_device_id *id)
		return rc;

	host->iomap = pcim_iomap_table(dev);
	rc = dma_set_mask(&dev->dev, ATA_DMA_MASK);
	if (rc)
		return rc;
	rc = dma_set_coherent_mask(&dev->dev, ATA_DMA_MASK);
	rc = dma_set_mask_and_coherent(&dev->dev, ATA_DMA_MASK);
	if (rc)
		return rc;
	pci_set_master(dev);
Loading