--- /dev/null
+From 78615c4ddb73bd4a7f13ec4bab60b974b8fc6faa Mon Sep 17 00:00:00 2001
+From: Catalin Marinas <catalin.marinas@arm.com>
+Date: Tue, 13 Jun 2023 16:52:43 +0100
+Subject: powerpc: move the ARCH_DMA_MINALIGN definition to asm/cache.h
+
+From: Catalin Marinas <catalin.marinas@arm.com>
+
+commit 78615c4ddb73bd4a7f13ec4bab60b974b8fc6faa upstream.
+
+Patch series "Move the ARCH_DMA_MINALIGN definition to asm/cache.h".
+
+The ARCH_KMALLOC_MINALIGN reduction series defines a generic
+ARCH_DMA_MINALIGN in linux/cache.h:
+
+https://lore.kernel.org/r/20230612153201.554742-2-catalin.marinas@arm.com/
+
+Unfortunately, this causes a duplicate definition warning for
+microblaze, powerpc (32-bit only) and sh as these architectures define
+ARCH_DMA_MINALIGN in a different file than asm/cache.h. Move the macro
+to asm/cache.h to avoid this issue and also bring them in line with the
+other architectures.
+
+
+This patch (of 3):
+
+The powerpc architecture defines ARCH_DMA_MINALIGN in asm/page_32.h and
+only if CONFIG_NOT_COHERENT_CACHE is enabled (32-bit platforms only).
+Move this macro to asm/cache.h to allow a generic ARCH_DMA_MINALIGN
+definition in linux/cache.h without redefine errors/warnings.
+
+Link: https://lkml.kernel.org/r/20230613155245.1228274-1-catalin.marinas@arm.com
+Link: https://lkml.kernel.org/r/20230613155245.1228274-2-catalin.marinas@arm.com
+Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
+Reported-by: kernel test robot <lkp@intel.com>
+Closes: https://lore.kernel.org/oe-kbuild-all/202306131053.1ybvRRhO-lkp@intel.com/
+Cc: Michael Ellerman <mpe@ellerman.id.au>
+Cc: Nicholas Piggin <npiggin@gmail.com>
+Cc: Christophe Leroy <christophe.leroy@csgroup.eu>
+Cc: John Paul Adrian Glaubitz <glaubitz@physik.fu-berlin.de>
+Cc: Michal Simek <monstr@monstr.eu>
+Cc: Rich Felker <dalias@libc.org>
+Cc: Vlastimil Babka <vbabka@suse.cz>
+Cc: Yoshinori Sato <ysato@users.sourceforge.jp>
+Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
+Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
+---
+ arch/powerpc/include/asm/cache.h | 4 ++++
+ arch/powerpc/include/asm/page_32.h | 4 ----
+ 2 files changed, 4 insertions(+), 4 deletions(-)
+
+--- a/arch/powerpc/include/asm/cache.h
++++ b/arch/powerpc/include/asm/cache.h
+@@ -33,6 +33,10 @@
+
+ #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT)
+
++#ifdef CONFIG_NOT_COHERENT_CACHE
++#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
++#endif
++
+ #if !defined(__ASSEMBLY__)
+ #ifdef CONFIG_PPC64
+
+--- a/arch/powerpc/include/asm/page_32.h
++++ b/arch/powerpc/include/asm/page_32.h
+@@ -12,10 +12,6 @@
+
+ #define VM_DATA_DEFAULT_FLAGS VM_DATA_DEFAULT_FLAGS32
+
+-#ifdef CONFIG_NOT_COHERENT_CACHE
+-#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
+-#endif
+-
+ #if defined(CONFIG_PPC_256K_PAGES) || \
+ (defined(CONFIG_PPC_8xx) && defined(CONFIG_PPC_16K_PAGES))
+ #define PTE_SHIFT (PAGE_SHIFT - PTE_T_LOG2 - 2) /* 1/4 of a page */