Loading kernel/dma/direct.c +2 −1 Original line number Diff line number Diff line Loading @@ -85,7 +85,8 @@ void *dma_direct_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, page = NULL; } } if (!page) if (!page && !(attrs & (DMA_ATTR_STRONGLY_ORDERED | DMA_ATTR_NO_KERNEL_MAPPING))) page = alloc_pages_node(dev_to_node(dev), gfp, page_order); if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) { Loading kernel/dma/swiotlb.c +2 −1 Original line number Diff line number Diff line Loading @@ -1026,7 +1026,8 @@ void *swiotlb_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, gfp |= __GFP_NOWARN; vaddr = dma_direct_alloc(dev, size, dma_handle, gfp, attrs); if (!vaddr) if (!vaddr && !(attrs & (DMA_ATTR_STRONGLY_ORDERED | DMA_ATTR_NO_KERNEL_MAPPING))) vaddr = swiotlb_alloc_buffer(dev, size, dma_handle, attrs); return vaddr; } Loading Loading
kernel/dma/direct.c +2 −1 Original line number Diff line number Diff line Loading @@ -85,7 +85,8 @@ void *dma_direct_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, page = NULL; } } if (!page) if (!page && !(attrs & (DMA_ATTR_STRONGLY_ORDERED | DMA_ATTR_NO_KERNEL_MAPPING))) page = alloc_pages_node(dev_to_node(dev), gfp, page_order); if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) { Loading
kernel/dma/swiotlb.c +2 −1 Original line number Diff line number Diff line Loading @@ -1026,7 +1026,8 @@ void *swiotlb_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, gfp |= __GFP_NOWARN; vaddr = dma_direct_alloc(dev, size, dma_handle, gfp, attrs); if (!vaddr) if (!vaddr && !(attrs & (DMA_ATTR_STRONGLY_ORDERED | DMA_ATTR_NO_KERNEL_MAPPING))) vaddr = swiotlb_alloc_buffer(dev, size, dma_handle, attrs); return vaddr; } Loading