lkml.org 
[lkml]   [2020]   [Nov]   [20]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
Patch in this message
/
From
Subject[PATCH 3/4] dmaengine: stm32-dma: take address into account when computing max width
Date
DMA_SxPAR or DMA_SxM0AR/M1AR registers have to be aligned on PSIZE or MSIZE
respectively. This means that bus width needs to be forced to 1 byte when
computed width is not aligned with address.

Signed-off-by: Amelie Delaunay <amelie.delaunay@st.com>
---
drivers/dma/stm32-dma.c | 19 ++++++++++++++-----
1 file changed, 14 insertions(+), 5 deletions(-)

diff --git a/drivers/dma/stm32-dma.c b/drivers/dma/stm32-dma.c
index 62501e5d9e9d..f54ecb123a52 100644
--- a/drivers/dma/stm32-dma.c
+++ b/drivers/dma/stm32-dma.c
@@ -264,9 +264,11 @@ static int stm32_dma_get_width(struct stm32_dma_chan *chan,
}

static enum dma_slave_buswidth stm32_dma_get_max_width(u32 buf_len,
+ dma_addr_t buf_addr,
u32 threshold)
{
enum dma_slave_buswidth max_width;
+ u64 addr = buf_addr;

if (threshold == STM32_DMA_FIFO_THRESHOLD_FULL)
max_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
@@ -277,6 +279,9 @@ static enum dma_slave_buswidth stm32_dma_get_max_width(u32 buf_len,
max_width > DMA_SLAVE_BUSWIDTH_1_BYTE)
max_width = max_width >> 1;

+ if (do_div(addr, max_width))
+ max_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
+
return max_width;
}

@@ -707,7 +712,7 @@ static void stm32_dma_issue_pending(struct dma_chan *c)
static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan,
enum dma_transfer_direction direction,
enum dma_slave_buswidth *buswidth,
- u32 buf_len)
+ u32 buf_len, dma_addr_t buf_addr)
{
enum dma_slave_buswidth src_addr_width, dst_addr_width;
int src_bus_width, dst_bus_width;
@@ -739,7 +744,8 @@ static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan,
return dst_burst_size;

/* Set memory data size */
- src_addr_width = stm32_dma_get_max_width(buf_len, fifoth);
+ src_addr_width = stm32_dma_get_max_width(buf_len, buf_addr,
+ fifoth);
chan->mem_width = src_addr_width;
src_bus_width = stm32_dma_get_width(chan, src_addr_width);
if (src_bus_width < 0)
@@ -788,7 +794,8 @@ static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan,
return src_burst_size;

/* Set memory data size */
- dst_addr_width = stm32_dma_get_max_width(buf_len, fifoth);
+ dst_addr_width = stm32_dma_get_max_width(buf_len, buf_addr,
+ fifoth);
chan->mem_width = dst_addr_width;
dst_bus_width = stm32_dma_get_width(chan, dst_addr_width);
if (dst_bus_width < 0)
@@ -876,7 +883,8 @@ static struct dma_async_tx_descriptor *stm32_dma_prep_slave_sg(

for_each_sg(sgl, sg, sg_len, i) {
ret = stm32_dma_set_xfer_param(chan, direction, &buswidth,
- sg_dma_len(sg));
+ sg_dma_len(sg),
+ sg_dma_address(sg));
if (ret < 0)
goto err;

@@ -944,7 +952,8 @@ static struct dma_async_tx_descriptor *stm32_dma_prep_dma_cyclic(
return NULL;
}

- ret = stm32_dma_set_xfer_param(chan, direction, &buswidth, period_len);
+ ret = stm32_dma_set_xfer_param(chan, direction, &buswidth, period_len,
+ buf_addr);
if (ret < 0)
return NULL;

--
2.17.1
\
 
 \ /
  Last update: 2020-11-20 15:35    [W:0.053 / U:0.204 seconds]
©2003-2020 Jasper Spaans|hosted at Digital Ocean and TransIP|Read the blog|Advertise on this site