| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _LINUX_DMA_NONCOHERENT_H |
| 3 | #define _LINUX_DMA_NONCOHERENT_H 1 |
| 4 | |
| 5 | #include <linux/dma-mapping.h> |
| Christoph Hellwig | 419e2f1 | 2019-08-26 09:03:44 +0200 | [diff] [blame] | 6 | #include <asm/pgtable.h> |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 7 | |
| Christoph Hellwig | f3ecc0f | 2018-08-19 14:53:20 +0200 | [diff] [blame] | 8 | #ifdef CONFIG_ARCH_HAS_DMA_COHERENCE_H |
| 9 | #include <asm/dma-coherence.h> |
| 10 | #elif defined(CONFIG_ARCH_HAS_SYNC_DMA_FOR_DEVICE) || \ |
| 11 | defined(CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU) || \ |
| 12 | defined(CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU_ALL) |
| 13 | static inline bool dev_is_dma_coherent(struct device *dev) |
| 14 | { |
| 15 | return dev->dma_coherent; |
| 16 | } |
| 17 | #else |
| 18 | static inline bool dev_is_dma_coherent(struct device *dev) |
| 19 | { |
| 20 | return true; |
| 21 | } |
| 22 | #endif /* CONFIG_ARCH_HAS_DMA_COHERENCE_H */ |
| 23 | |
| Liam Mark | 707e9d3 | 2020-02-20 10:19:06 -0800 | [diff] [blame] | 24 | #ifdef CONFIG_DMA_COHERENT_HINT_CACHED |
| 25 | static inline bool dev_is_dma_coherent_hint_cached(struct device *dev) |
| 26 | { |
| 27 | return dev->dma_coherent_hint_cached; |
| 28 | } |
| 29 | #else |
| 30 | static inline bool dev_is_dma_coherent_hint_cached(struct device *dev) |
| 31 | { |
| 32 | return false; |
| 33 | } |
| 34 | #endif |
| 35 | |
| Christoph Hellwig | 4b85fae | 2019-06-14 16:06:10 +0200 | [diff] [blame] | 36 | /* |
| 37 | * Check if an allocation needs to be marked uncached to be coherent. |
| 38 | */ |
| Christoph Hellwig | 15ffe5e | 2019-07-08 12:55:27 -0700 | [diff] [blame] | 39 | static __always_inline bool dma_alloc_need_uncached(struct device *dev, |
| Christoph Hellwig | 4b85fae | 2019-06-14 16:06:10 +0200 | [diff] [blame] | 40 | unsigned long attrs) |
| 41 | { |
| Raghavendra Rao Ananta | 6784375 | 2019-08-02 10:55:28 -0700 | [diff] [blame] | 42 | if (attrs & DMA_ATTR_FORCE_NON_COHERENT) |
| 43 | return true; |
| 44 | if (attrs & DMA_ATTR_FORCE_COHERENT) |
| 45 | return false; |
| Christoph Hellwig | 4b85fae | 2019-06-14 16:06:10 +0200 | [diff] [blame] | 46 | if (dev_is_dma_coherent(dev)) |
| 47 | return false; |
| Christoph Hellwig | d98849a | 2019-06-14 16:17:27 +0200 | [diff] [blame] | 48 | if (attrs & DMA_ATTR_NO_KERNEL_MAPPING) |
| 49 | return false; |
| Christoph Hellwig | 4b85fae | 2019-06-14 16:06:10 +0200 | [diff] [blame] | 50 | if (IS_ENABLED(CONFIG_DMA_NONCOHERENT_CACHE_SYNC) && |
| 51 | (attrs & DMA_ATTR_NON_CONSISTENT)) |
| 52 | return false; |
| 53 | return true; |
| 54 | } |
| 55 | |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 56 | void *arch_dma_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, |
| 57 | gfp_t gfp, unsigned long attrs); |
| 58 | void arch_dma_free(struct device *dev, size_t size, void *cpu_addr, |
| 59 | dma_addr_t dma_addr, unsigned long attrs); |
| Christoph Hellwig | 58b0440 | 2018-09-11 08:55:28 +0200 | [diff] [blame] | 60 | long arch_dma_coherent_to_pfn(struct device *dev, void *cpu_addr, |
| 61 | dma_addr_t dma_addr); |
| Christoph Hellwig | 33dcb37 | 2019-07-26 09:26:40 +0200 | [diff] [blame] | 62 | |
| 63 | #ifdef CONFIG_MMU |
| Christoph Hellwig | 419e2f1 | 2019-08-26 09:03:44 +0200 | [diff] [blame] | 64 | /* |
| 65 | * Page protection so that devices that can't snoop CPU caches can use the |
| 66 | * memory coherently. We default to pgprot_noncached which is usually used |
| 67 | * for ioremap as a safe bet, but architectures can override this with less |
| 68 | * strict semantics if possible. |
| 69 | */ |
| 70 | #ifndef pgprot_dmacoherent |
| 71 | #define pgprot_dmacoherent(prot) pgprot_noncached(prot) |
| 72 | #endif |
| 73 | |
| Christoph Hellwig | 33dcb37 | 2019-07-26 09:26:40 +0200 | [diff] [blame] | 74 | pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, unsigned long attrs); |
| Christoph Hellwig | 58b0440 | 2018-09-11 08:55:28 +0200 | [diff] [blame] | 75 | #else |
| Christoph Hellwig | 33dcb37 | 2019-07-26 09:26:40 +0200 | [diff] [blame] | 76 | static inline pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, |
| 77 | unsigned long attrs) |
| 78 | { |
| 79 | return prot; /* no protection bits supported without page tables */ |
| 80 | } |
| 81 | #endif /* CONFIG_MMU */ |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 82 | |
| 83 | #ifdef CONFIG_DMA_NONCOHERENT_CACHE_SYNC |
| 84 | void arch_dma_cache_sync(struct device *dev, void *vaddr, size_t size, |
| 85 | enum dma_data_direction direction); |
| 86 | #else |
| Christoph Hellwig | 356da6d | 2018-12-06 13:39:32 -0800 | [diff] [blame] | 87 | static inline void arch_dma_cache_sync(struct device *dev, void *vaddr, |
| 88 | size_t size, enum dma_data_direction direction) |
| 89 | { |
| 90 | } |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 91 | #endif /* CONFIG_DMA_NONCOHERENT_CACHE_SYNC */ |
| 92 | |
| 93 | #ifdef CONFIG_ARCH_HAS_SYNC_DMA_FOR_DEVICE |
| Christoph Hellwig | 9690e34 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 94 | void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, |
| 95 | enum dma_data_direction dir); |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 96 | #else |
| Christoph Hellwig | 9690e34 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 97 | static inline void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, |
| 98 | enum dma_data_direction dir) |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 99 | { |
| 100 | } |
| 101 | #endif /* ARCH_HAS_SYNC_DMA_FOR_DEVICE */ |
| 102 | |
| 103 | #ifdef CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU |
| Christoph Hellwig | 9690e34 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 104 | void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, |
| 105 | enum dma_data_direction dir); |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 106 | #else |
| Christoph Hellwig | 9690e34 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 107 | static inline void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, |
| 108 | enum dma_data_direction dir) |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 109 | { |
| 110 | } |
| 111 | #endif /* ARCH_HAS_SYNC_DMA_FOR_CPU */ |
| 112 | |
| Christoph Hellwig | faef877 | 2018-06-15 13:08:51 +0200 | [diff] [blame] | 113 | #ifdef CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU_ALL |
| Christoph Hellwig | 9690e34 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 114 | void arch_sync_dma_for_cpu_all(void); |
| Christoph Hellwig | faef877 | 2018-06-15 13:08:51 +0200 | [diff] [blame] | 115 | #else |
| Christoph Hellwig | 9690e34 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 116 | static inline void arch_sync_dma_for_cpu_all(void) |
| Christoph Hellwig | faef877 | 2018-06-15 13:08:51 +0200 | [diff] [blame] | 117 | { |
| 118 | } |
| 119 | #endif /* CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU_ALL */ |
| 120 | |
| Christoph Hellwig | 13bf5ce | 2019-03-25 15:44:06 +0100 | [diff] [blame] | 121 | #ifdef CONFIG_ARCH_HAS_DMA_PREP_COHERENT |
| Christoph Hellwig | 0c3b317 | 2018-11-04 20:29:28 +0100 | [diff] [blame] | 122 | void arch_dma_prep_coherent(struct page *page, size_t size); |
| Christoph Hellwig | 13bf5ce | 2019-03-25 15:44:06 +0100 | [diff] [blame] | 123 | #else |
| 124 | static inline void arch_dma_prep_coherent(struct page *page, size_t size) |
| 125 | { |
| 126 | } |
| 127 | #endif /* CONFIG_ARCH_HAS_DMA_PREP_COHERENT */ |
| Christoph Hellwig | 0c3b317 | 2018-11-04 20:29:28 +0100 | [diff] [blame] | 128 | |
| Christoph Hellwig | c30700d | 2019-06-03 08:43:51 +0200 | [diff] [blame] | 129 | void *uncached_kernel_address(void *addr); |
| 130 | void *cached_kernel_address(void *addr); |
| 131 | |
| Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 132 | #endif /* _LINUX_DMA_NONCOHERENT_H */ |