Commit db89728a authored by Valentin Caron's avatar Valentin Caron Committed by Greg Kroah-Hartman
Browse files

serial: stm32: avoid clearing DMAT bit during transfer



It's rather advised to rely on DMA pause / resume instead of
clearing/setting DMA request enable bit for the same purpose. Some DMA
request/acknowledge race may encountered by doing so. We prefer to use
dmaengine_pause and resume instead to pause a dma transfer when it is
necessary.

It is also safer to close DMA before reset DMAT in stm32_usart_shutdown.

Signed-off-by: default avatarValentin Caron <valentin.caron@foss.st.com>
Link: https://lore.kernel.org/r/20230808161906.178996-2-valentin.caron@foss.st.com


Signed-off-by: default avatarGreg Kroah-Hartman <gregkh@linuxfoundation.org>
parent e112ec42
Loading
Loading
Loading
Loading
+44 −32
Original line number Diff line number Diff line
@@ -506,13 +506,6 @@ static bool stm32_usart_tx_dma_started(struct stm32_port *stm32_port)
	return stm32_port->tx_dma_busy;
}

static bool stm32_usart_tx_dma_enabled(struct stm32_port *stm32_port)
{
	const struct stm32_usart_offsets *ofs = &stm32_port->info->ofs;

	return !!(readl_relaxed(stm32_port->port.membase + ofs->cr3) & USART_CR3_DMAT);
}

static void stm32_usart_tx_dma_complete(void *arg)
{
	struct uart_port *port = arg;
@@ -591,9 +584,6 @@ static void stm32_usart_transmit_chars_pio(struct uart_port *port)
	const struct stm32_usart_offsets *ofs = &stm32_port->info->ofs;
	struct circ_buf *xmit = &port->state->xmit;

	if (stm32_usart_tx_dma_enabled(stm32_port))
		stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAT);

	while (!uart_circ_empty(xmit)) {
		/* Check that TDR is empty before filling FIFO */
		if (!(readl_relaxed(port->membase + ofs->isr) & USART_SR_TXE))
@@ -616,10 +606,16 @@ static void stm32_usart_transmit_chars_dma(struct uart_port *port)
	struct circ_buf *xmit = &port->state->xmit;
	struct dma_async_tx_descriptor *desc = NULL;
	unsigned int count;
	int ret;

	if (stm32_usart_tx_dma_started(stm32port)) {
		if (!stm32_usart_tx_dma_enabled(stm32port))
			stm32_usart_set_bits(port, ofs->cr3, USART_CR3_DMAT);
		if (dmaengine_tx_status(stm32port->tx_ch,
					stm32port->tx_ch->cookie,
					NULL) == DMA_PAUSED) {
			ret = dmaengine_resume(stm32port->tx_ch);
			if (ret < 0)
				goto dma_err;
		}
		return;
	}

@@ -664,11 +660,9 @@ static void stm32_usart_transmit_chars_dma(struct uart_port *port)
	desc->callback_param = port;

	/* Push current DMA TX transaction in the pending queue */
	if (dma_submit_error(dmaengine_submit(desc))) {
		/* dma no yet started, safe to free resources */
		stm32_usart_tx_dma_terminate(stm32port);
		goto fallback_err;
	}
	/* DMA no yet started, safe to free resources */
	if (dma_submit_error(dmaengine_submit(desc)))
		goto dma_err;

	/* Issue pending DMA TX requests */
	dma_async_issue_pending(stm32port->tx_ch);
@@ -679,6 +673,10 @@ static void stm32_usart_transmit_chars_dma(struct uart_port *port)

	return;

dma_err:
	dev_err(port->dev, "DMA failed with error code: %d\n", ret);
	stm32_usart_tx_dma_terminate(stm32port);

fallback_err:
	stm32_usart_transmit_chars_pio(port);
}
@@ -701,9 +699,15 @@ static void stm32_usart_transmit_chars(struct uart_port *port)

	if (port->x_char) {
		if (stm32_usart_tx_dma_started(stm32_port) &&
		    stm32_usart_tx_dma_enabled(stm32_port))
			stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAT);

		    dmaengine_tx_status(stm32_port->tx_ch,
					stm32_port->tx_ch->cookie,
					NULL) == DMA_IN_PROGRESS) {
			ret = dmaengine_pause(stm32_port->tx_ch);
			if (ret < 0) {
				dev_err(port->dev, "DMA failed with error code: %d\n", ret);
				stm32_usart_tx_dma_terminate(stm32_port);
			}
		}
		/* Check that TDR is empty before filling FIFO */
		ret =
		readl_relaxed_poll_timeout_atomic(port->membase + ofs->isr,
@@ -716,8 +720,14 @@ static void stm32_usart_transmit_chars(struct uart_port *port)
		writel_relaxed(port->x_char, port->membase + ofs->tdr);
		port->x_char = 0;
		port->icount.tx++;
		if (stm32_usart_tx_dma_started(stm32_port))
			stm32_usart_set_bits(port, ofs->cr3, USART_CR3_DMAT);

		if (stm32_usart_tx_dma_started(stm32_port)) {
			ret = dmaengine_resume(stm32_port->tx_ch);
			if (ret < 0) {
				dev_err(port->dev, "DMA failed with error code: %d\n", ret);
				stm32_usart_tx_dma_terminate(stm32_port);
			}
		}
		return;
	}

@@ -850,11 +860,16 @@ static void stm32_usart_disable_ms(struct uart_port *port)
static void stm32_usart_stop_tx(struct uart_port *port)
{
	struct stm32_port *stm32_port = to_stm32_port(port);
	const struct stm32_usart_offsets *ofs = &stm32_port->info->ofs;
	int ret;

	stm32_usart_tx_interrupt_disable(port);
	if (stm32_usart_tx_dma_started(stm32_port) && stm32_usart_tx_dma_enabled(stm32_port))
		stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAT);
	if (stm32_usart_tx_dma_started(stm32_port)) {
		ret = dmaengine_pause(stm32_port->tx_ch);
		if (ret < 0) {
			dev_err(port->dev, "DMA failed with error code: %d\n", ret);
			stm32_usart_tx_dma_terminate(stm32_port);
		}
	}

	stm32_usart_rs485_rts_disable(port);
}
@@ -878,12 +893,9 @@ static void stm32_usart_start_tx(struct uart_port *port)
static void stm32_usart_flush_buffer(struct uart_port *port)
{
	struct stm32_port *stm32_port = to_stm32_port(port);
	const struct stm32_usart_offsets *ofs = &stm32_port->info->ofs;

	if (stm32_port->tx_ch) {
	if (stm32_port->tx_ch)
		stm32_usart_tx_dma_terminate(stm32_port);
		stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAT);
	}
}

/* Throttle the remote when input buffer is about to overflow. */
@@ -1042,12 +1054,12 @@ static void stm32_usart_shutdown(struct uart_port *port)
	u32 val, isr;
	int ret;

	if (stm32_usart_tx_dma_enabled(stm32_port))
		stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAT);

	if (stm32_usart_tx_dma_started(stm32_port))
		stm32_usart_tx_dma_terminate(stm32_port);

	if (stm32_port->tx_ch)
		stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAT);

	/* Disable modem control interrupts */
	stm32_usart_disable_ms(port);