|
@@ -280,3 +280,57 @@ void __init early_dma_memcpy(void *pdst, const void *psrc, size_t size)
|
|
|
/* We assume that everything is 4 byte aligned, so include
|
|
|
* a basic sanity check
|
|
|
*/
|
|
|
+ BUG_ON(dst % 4);
|
|
|
+ BUG_ON(src % 4);
|
|
|
+ BUG_ON(size % 4);
|
|
|
+
|
|
|
+ src_ch = 0;
|
|
|
+ /* Find an avalible memDMA channel */
|
|
|
+ while (1) {
|
|
|
+ if (src_ch == (struct dma_register *)MDMA_S0_NEXT_DESC_PTR) {
|
|
|
+ dst_ch = (struct dma_register *)MDMA_D1_NEXT_DESC_PTR;
|
|
|
+ src_ch = (struct dma_register *)MDMA_S1_NEXT_DESC_PTR;
|
|
|
+ } else {
|
|
|
+ dst_ch = (struct dma_register *)MDMA_D0_NEXT_DESC_PTR;
|
|
|
+ src_ch = (struct dma_register *)MDMA_S0_NEXT_DESC_PTR;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (!DMA_MMR_READ(&src_ch->cfg))
|
|
|
+ break;
|
|
|
+ else if (DMA_MMR_READ(&dst_ch->irq_status) & DMA_DONE) {
|
|
|
+ DMA_MMR_WRITE(&src_ch->cfg, 0);
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ /* Force a sync in case a previous config reset on this channel
|
|
|
+ * occurred. This is needed so subsequent writes to DMA registers
|
|
|
+ * are not spuriously lost/corrupted.
|
|
|
+ */
|
|
|
+ __builtin_bfin_ssync();
|
|
|
+
|
|
|
+ /* Destination */
|
|
|
+ bfin_write32(&dst_ch->start_addr, dst);
|
|
|
+ DMA_MMR_WRITE(&dst_ch->x_count, size >> 2);
|
|
|
+ DMA_MMR_WRITE(&dst_ch->x_modify, 1 << 2);
|
|
|
+ DMA_MMR_WRITE(&dst_ch->irq_status, DMA_DONE | DMA_ERR);
|
|
|
+
|
|
|
+ /* Source */
|
|
|
+ bfin_write32(&src_ch->start_addr, src);
|
|
|
+ DMA_MMR_WRITE(&src_ch->x_count, size >> 2);
|
|
|
+ DMA_MMR_WRITE(&src_ch->x_modify, 1 << 2);
|
|
|
+ DMA_MMR_WRITE(&src_ch->irq_status, DMA_DONE | DMA_ERR);
|
|
|
+
|
|
|
+ /* Enable */
|
|
|
+ DMA_MMR_WRITE(&src_ch->cfg, DMAEN | WDSIZE_32);
|
|
|
+ DMA_MMR_WRITE(&dst_ch->cfg, WNR | DI_EN_X | DMAEN | WDSIZE_32);
|
|
|
+
|
|
|
+ /* Since we are atomic now, don't use the workaround ssync */
|
|
|
+ __builtin_bfin_ssync();
|
|
|
+
|
|
|
+#ifdef CONFIG_BF60x
|
|
|
+ /* Work around a possible MDMA anomaly. Running 2 MDMA channels to
|
|
|
+ * transfer DDR data to L1 SRAM may corrupt data.
|
|
|
+ * Should be reverted after this issue is root caused.
|
|
|
+ */
|
|
|
+ while (!(DMA_MMR_READ(&dst_ch->irq_status) & DMA_DONE))
|