Loading kernel/dma/direct.c +5 −7 Original line number Diff line number Diff line Loading @@ -108,7 +108,7 @@ static inline bool dma_should_free_from_pool(struct device *dev, } static struct page *__dma_direct_alloc_pages(struct device *dev, size_t size, gfp_t gfp, unsigned long attrs) gfp_t gfp) { int node = dev_to_node(dev); struct page *page = NULL; Loading @@ -116,11 +116,6 @@ static struct page *__dma_direct_alloc_pages(struct device *dev, size_t size, WARN_ON_ONCE(!PAGE_ALIGNED(size)); if (attrs & DMA_ATTR_NO_WARN) gfp |= __GFP_NOWARN; /* we always manually zero the memory once we are done: */ gfp &= ~__GFP_ZERO; gfp |= dma_direct_optimal_gfp_mask(dev, dev->coherent_dma_mask, &phys_limit); page = dma_alloc_contiguous(dev, size, gfp); Loading Loading @@ -164,6 +159,8 @@ void *dma_direct_alloc(struct device *dev, size_t size, return arch_dma_alloc(dev, size, dma_handle, gfp, attrs); size = PAGE_ALIGN(size); if (attrs & DMA_ATTR_NO_WARN) gfp |= __GFP_NOWARN; if (dma_should_alloc_from_pool(dev, gfp, attrs)) { u64 phys_mask; Loading @@ -177,7 +174,8 @@ void *dma_direct_alloc(struct device *dev, size_t size, goto done; } page = __dma_direct_alloc_pages(dev, size, gfp, attrs); /* we always manually zero the memory once we are done */ page = __dma_direct_alloc_pages(dev, size, gfp & ~__GFP_ZERO); if (!page) return NULL; Loading Loading
kernel/dma/direct.c +5 −7 Original line number Diff line number Diff line Loading @@ -108,7 +108,7 @@ static inline bool dma_should_free_from_pool(struct device *dev, } static struct page *__dma_direct_alloc_pages(struct device *dev, size_t size, gfp_t gfp, unsigned long attrs) gfp_t gfp) { int node = dev_to_node(dev); struct page *page = NULL; Loading @@ -116,11 +116,6 @@ static struct page *__dma_direct_alloc_pages(struct device *dev, size_t size, WARN_ON_ONCE(!PAGE_ALIGNED(size)); if (attrs & DMA_ATTR_NO_WARN) gfp |= __GFP_NOWARN; /* we always manually zero the memory once we are done: */ gfp &= ~__GFP_ZERO; gfp |= dma_direct_optimal_gfp_mask(dev, dev->coherent_dma_mask, &phys_limit); page = dma_alloc_contiguous(dev, size, gfp); Loading Loading @@ -164,6 +159,8 @@ void *dma_direct_alloc(struct device *dev, size_t size, return arch_dma_alloc(dev, size, dma_handle, gfp, attrs); size = PAGE_ALIGN(size); if (attrs & DMA_ATTR_NO_WARN) gfp |= __GFP_NOWARN; if (dma_should_alloc_from_pool(dev, gfp, attrs)) { u64 phys_mask; Loading @@ -177,7 +174,8 @@ void *dma_direct_alloc(struct device *dev, size_t size, goto done; } page = __dma_direct_alloc_pages(dev, size, gfp, attrs); /* we always manually zero the memory once we are done */ page = __dma_direct_alloc_pages(dev, size, gfp & ~__GFP_ZERO); if (!page) return NULL; Loading