summaryrefslogtreecommitdiff
path: root/arch/arm64/include
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm64/include')
-rw-r--r--arch/arm64/include/asm/cache.h6
-rw-r--r--arch/arm64/include/asm/dma-direct.h43
2 files changed, 3 insertions, 46 deletions
diff --git a/arch/arm64/include/asm/cache.h b/arch/arm64/include/asm/cache.h
index 5df5cfe1c143..9bbffc7a301f 100644
--- a/arch/arm64/include/asm/cache.h
+++ b/arch/arm64/include/asm/cache.h
@@ -33,7 +33,7 @@
#define ICACHE_POLICY_VIPT 2
#define ICACHE_POLICY_PIPT 3
-#define L1_CACHE_SHIFT (6)
+#define L1_CACHE_SHIFT 7
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
/*
@@ -43,7 +43,7 @@
* cache before the transfer is done, causing old data to be seen by
* the CPU.
*/
-#define ARCH_DMA_MINALIGN (128)
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#ifndef __ASSEMBLY__
@@ -77,7 +77,7 @@ static inline u32 cache_type_cwg(void)
static inline int cache_line_size(void)
{
u32 cwg = cache_type_cwg();
- return cwg ? 4 << cwg : ARCH_DMA_MINALIGN;
+ return cwg ? 4 << cwg : L1_CACHE_BYTES;
}
#endif /* __ASSEMBLY__ */
diff --git a/arch/arm64/include/asm/dma-direct.h b/arch/arm64/include/asm/dma-direct.h
deleted file mode 100644
index abb1b40ec751..000000000000
--- a/arch/arm64/include/asm/dma-direct.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/* SPDX-License-Identifier: GPL-2.0 */
-#ifndef __ASM_DMA_DIRECT_H
-#define __ASM_DMA_DIRECT_H
-
-#include <linux/jump_label.h>
-#include <linux/swiotlb.h>
-
-#include <asm/cache.h>
-
-DECLARE_STATIC_KEY_FALSE(swiotlb_noncoherent_bounce);
-
-static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
-{
- dma_addr_t dev_addr = (dma_addr_t)paddr;
-
- return dev_addr - ((dma_addr_t)dev->dma_pfn_offset << PAGE_SHIFT);
-}
-
-static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dev_addr)
-{
- phys_addr_t paddr = (phys_addr_t)dev_addr;
-
- return paddr + ((phys_addr_t)dev->dma_pfn_offset << PAGE_SHIFT);
-}
-
-static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
-{
- if (!dev->dma_mask)
- return false;
-
- /*
- * Force swiotlb buffer bouncing when ARCH_DMA_MINALIGN < CWG. The
- * swiotlb bounce buffers are aligned to (1 << IO_TLB_SHIFT).
- */
- if (static_branch_unlikely(&swiotlb_noncoherent_bounce) &&
- !is_device_dma_coherent(dev) &&
- !is_swiotlb_buffer(dma_to_phys(dev, addr)))
- return false;
-
- return addr + size - 1 <= *dev->dma_mask;
-}
-
-#endif /* __ASM_DMA_DIRECT_H */