return secs_to_jiffies(2 * timeout);
}
+static int spi_imx_dma_submit(struct spi_imx_data *spi_imx,
+ struct spi_transfer *transfer)
+{
+ struct sg_table *tx = &transfer->tx_sg, *rx = &transfer->rx_sg;
+ struct spi_controller *controller = spi_imx->controller;
+ struct dma_async_tx_descriptor *desc_tx, *desc_rx;
+ unsigned long transfer_timeout;
+ unsigned long time_left;
+
+ /*
+ * The TX DMA setup starts the transfer, so make sure RX is configured
+ * before TX.
+ */
+ desc_rx = dmaengine_prep_slave_sg(controller->dma_rx,
+ rx->sgl, rx->nents, DMA_DEV_TO_MEM,
+ DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
+ if (!desc_rx) {
+ transfer->error |= SPI_TRANS_FAIL_NO_START;
+ return -EINVAL;
+ }
+
+ desc_rx->callback = spi_imx_dma_rx_callback;
+ desc_rx->callback_param = (void *)spi_imx;
+ dmaengine_submit(desc_rx);
+ reinit_completion(&spi_imx->dma_rx_completion);
+ dma_async_issue_pending(controller->dma_rx);
+
+ desc_tx = dmaengine_prep_slave_sg(controller->dma_tx,
+ tx->sgl, tx->nents, DMA_MEM_TO_DEV,
+ DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
+ if (!desc_tx) {
+ dmaengine_terminate_all(controller->dma_tx);
+ dmaengine_terminate_all(controller->dma_rx);
+ return -EINVAL;
+ }
+
+ desc_tx->callback = spi_imx_dma_tx_callback;
+ desc_tx->callback_param = (void *)spi_imx;
+ dmaengine_submit(desc_tx);
+ reinit_completion(&spi_imx->dma_tx_completion);
+ dma_async_issue_pending(controller->dma_tx);
+
+ spi_imx->devtype_data->trigger(spi_imx);
+
+ transfer_timeout = spi_imx_calculate_timeout(spi_imx, transfer->len);
+
+ /* Wait SDMA to finish the data transfer.*/
+ time_left = wait_for_completion_timeout(&spi_imx->dma_tx_completion,
+ transfer_timeout);
+ if (!time_left) {
+ dev_err(spi_imx->dev, "I/O Error in DMA TX\n");
+ dmaengine_terminate_all(controller->dma_tx);
+ dmaengine_terminate_all(controller->dma_rx);
+ return -ETIMEDOUT;
+ }
+
+ time_left = wait_for_completion_timeout(&spi_imx->dma_rx_completion,
+ transfer_timeout);
+ if (!time_left) {
+ dev_err(&controller->dev, "I/O Error in DMA RX\n");
+ spi_imx->devtype_data->reset(spi_imx);
+ dmaengine_terminate_all(controller->dma_rx);
+ return -ETIMEDOUT;
+ }
+
+ return 0;
+}
+
+static void spi_imx_dma_max_wml_find(struct spi_imx_data *spi_imx,
+ struct spi_transfer *transfer)
+{
+ struct sg_table *rx = &transfer->rx_sg;
+ struct scatterlist *last_sg = sg_last(rx->sgl, rx->nents);
+ unsigned int bytes_per_word, i;
+
+ /* Get the right burst length from the last sg to ensure no tail data */
+ bytes_per_word = spi_imx_bytes_per_word(transfer->bits_per_word);
+ for (i = spi_imx->devtype_data->fifo_size / 2; i > 0; i--) {
+ if (!(sg_dma_len(last_sg) % (i * bytes_per_word)))
+ break;
+ }
+ /* Use 1 as wml in case no available burst length got */
+ if (i == 0)
+ i = 1;
+
+ spi_imx->wml = i;
+}
+
static int spi_imx_dma_configure(struct spi_controller *controller)
{
int ret;
static int spi_imx_dma_transfer(struct spi_imx_data *spi_imx,
struct spi_transfer *transfer)
{
- struct dma_async_tx_descriptor *desc_tx, *desc_rx;
- unsigned long transfer_timeout;
- unsigned long time_left;
struct spi_controller *controller = spi_imx->controller;
- struct sg_table *tx = &transfer->tx_sg, *rx = &transfer->rx_sg;
- struct scatterlist *last_sg = sg_last(rx->sgl, rx->nents);
- unsigned int bytes_per_word, i;
int ret;
- /* Get the right burst length from the last sg to ensure no tail data */
- bytes_per_word = spi_imx_bytes_per_word(transfer->bits_per_word);
- for (i = spi_imx->devtype_data->fifo_size / 2; i > 0; i--) {
- if (!(sg_dma_len(last_sg) % (i * bytes_per_word)))
- break;
- }
- /* Use 1 as wml in case no available burst length got */
- if (i == 0)
- i = 1;
-
- spi_imx->wml = i;
+ spi_imx_dma_max_wml_find(spi_imx, transfer);
ret = spi_imx_dma_configure(controller);
if (ret)
}
spi_imx->devtype_data->setup_wml(spi_imx);
- /*
- * The TX DMA setup starts the transfer, so make sure RX is configured
- * before TX.
- */
- desc_rx = dmaengine_prep_slave_sg(controller->dma_rx,
- rx->sgl, rx->nents, DMA_DEV_TO_MEM,
- DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
- if (!desc_rx) {
- ret = -EINVAL;
- goto dma_failure_no_start;
- }
-
- desc_rx->callback = spi_imx_dma_rx_callback;
- desc_rx->callback_param = (void *)spi_imx;
- dmaengine_submit(desc_rx);
- reinit_completion(&spi_imx->dma_rx_completion);
- dma_async_issue_pending(controller->dma_rx);
-
- desc_tx = dmaengine_prep_slave_sg(controller->dma_tx,
- tx->sgl, tx->nents, DMA_MEM_TO_DEV,
- DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
- if (!desc_tx) {
- dmaengine_terminate_all(controller->dma_tx);
- dmaengine_terminate_all(controller->dma_rx);
- return -EINVAL;
- }
-
- desc_tx->callback = spi_imx_dma_tx_callback;
- desc_tx->callback_param = (void *)spi_imx;
- dmaengine_submit(desc_tx);
- reinit_completion(&spi_imx->dma_tx_completion);
- dma_async_issue_pending(controller->dma_tx);
-
- spi_imx->devtype_data->trigger(spi_imx);
-
- transfer_timeout = spi_imx_calculate_timeout(spi_imx, transfer->len);
-
- /* Wait SDMA to finish the data transfer.*/
- time_left = wait_for_completion_timeout(&spi_imx->dma_tx_completion,
- transfer_timeout);
- if (!time_left) {
- dev_err(spi_imx->dev, "I/O Error in DMA TX\n");
- dmaengine_terminate_all(controller->dma_tx);
- dmaengine_terminate_all(controller->dma_rx);
- return -ETIMEDOUT;
- }
-
- time_left = wait_for_completion_timeout(&spi_imx->dma_rx_completion,
- transfer_timeout);
- if (!time_left) {
- dev_err(&controller->dev, "I/O Error in DMA RX\n");
- spi_imx->devtype_data->reset(spi_imx);
- dmaengine_terminate_all(controller->dma_rx);
- return -ETIMEDOUT;
- }
+ ret = spi_imx_dma_submit(spi_imx, transfer);
+ if (ret)
+ return ret;
return 0;
/* fallback to pio */