*
*/
+/** Minimum alignment for coherent DMA allocations
+ *
+ * We set this sufficiently high to ensure that we do not end up with
+ * both cached and uncached uses in the same cacheline.
+ */
+#define RISCV_DMA_ALIGN 256
+
/**
* Map buffer for DMA
*
void *addr;
void *caddr;
+ /* Round up length and alignment */
+ len = ( ( len + RISCV_DMA_ALIGN - 1 ) & ~( RISCV_DMA_ALIGN - 1 ) );
+ if ( align < RISCV_DMA_ALIGN )
+ align = RISCV_DMA_ALIGN;
+
/* Allocate from heap */
addr = malloc_phys ( len, align );
if ( ! addr )
/* Sanity check */
assert ( virt_to_phys ( addr ) == virt_to_phys ( map->token ) );
+ /* Round up length to match allocation */
+ len = ( ( len + RISCV_DMA_ALIGN - 1 ) & ~( RISCV_DMA_ALIGN - 1 ) );
+
/* Free original allocation */
free_phys ( map->token, len );