The changes here ensure correct cache maintenance around DMA operations. One cache flush was missing and two cache invalidations occurred before the corresponding read that would make them necessary. --- bsps/shared/dev/nand/xnandpsu.c | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+)
diff --git a/bsps/shared/dev/nand/xnandpsu.c b/bsps/shared/dev/nand/xnandpsu.c index 89451d19f8..85e6ba8532 100644 --- a/bsps/shared/dev/nand/xnandpsu.c +++ b/bsps/shared/dev/nand/xnandpsu.c @@ -2002,6 +2002,14 @@ static s32 XNandPsu_ReadPage(XNandPsu *InstancePtr, u32 Target, u32 Page, Status = XNandPsu_Data_ReadWrite(InstancePtr, Buf, PktCount, PktSize, 0, 1); +#ifdef __rtems__ + if (InstancePtr->DmaMode == XNANDPSU_MDMA) { + if (InstancePtr->Config.IsCacheCoherent == 0) { + Xil_DCacheInvalidateRange((INTPTR)(void *)Buf, (PktSize * PktCount)); + } + } +#endif + /* Check ECC Errors */ if (InstancePtr->EccMode == XNANDPSU_HWECC) { /* Hamming Multi Bit Errors */ @@ -2115,6 +2123,14 @@ s32 XNandPsu_ReadSpareBytes(XNandPsu *InstancePtr, u32 Page, u8 *Buf) Status = XNandPsu_Data_ReadWrite(InstancePtr, Buf, PktCount, PktSize, 0, 1); +#ifdef __rtems__ + if (InstancePtr->DmaMode == XNANDPSU_MDMA) { + if (InstancePtr->Config.IsCacheCoherent == 0) { + Xil_DCacheInvalidateRange((INTPTR)(void *)Buf, (PktSize * PktCount)); + } + } +#endif + return Status; } @@ -2557,6 +2573,11 @@ static s32 XNandPsu_ChangeWriteColumn(XNandPsu *InstancePtr, u32 Target, if (InstancePtr->DmaMode == XNANDPSU_MDMA) { RegVal = XNANDPSU_INTR_STS_EN_TRANS_COMP_STS_EN_MASK | XNANDPSU_INTR_STS_EN_DMA_INT_STS_EN_MASK; +#ifdef __rtems__ + if (InstancePtr->Config.IsCacheCoherent == 0) { + Xil_DCacheFlushRange((INTPTR)(void *)Buf, (PktSize * PktCount)); + } +#endif XNandPsu_Update_DmaAddr(InstancePtr, Buf); } else { RegVal = XNANDPSU_INTR_STS_EN_BUFF_WR_RDY_STS_EN_MASK; -- 2.39.2 _______________________________________________ devel mailing list devel@rtems.org http://lists.rtems.org/mailman/listinfo/devel