{
struct stm32f7_i2c_dev *i2c_dev = arg;
struct stm32_i2c_dma *dma = i2c_dev->dma;
+ struct stm32f7_i2c_msg *f7_msg = &i2c_dev->f7_msg;
stm32f7_i2c_disable_dma_req(i2c_dev);
dmaengine_terminate_async(dma->chan_using);
dma_unmap_single(i2c_dev->dev, dma->dma_buf, dma->dma_len,
dma->dma_data_dir);
+ if (!f7_msg->smbus)
+ i2c_put_dma_safe_msg_buf(f7_msg->buf, i2c_dev->msg, true);
complete(&dma->dma_complete);
}
{
struct stm32f7_i2c_msg *f7_msg = &i2c_dev->f7_msg;
void __iomem *base = i2c_dev->base;
+ u8 *dma_buf;
u32 cr1, cr2;
int ret;
/* Configure DMA or enable RX/TX interrupt */
i2c_dev->use_dma = false;
- if (i2c_dev->dma && f7_msg->count >= STM32F7_I2C_DMA_LEN_MIN
- && !i2c_dev->atomic) {
- ret = stm32_i2c_prep_dma_xfer(i2c_dev->dev, i2c_dev->dma,
- msg->flags & I2C_M_RD,
- f7_msg->count, f7_msg->buf,
- stm32f7_i2c_dma_callback,
- i2c_dev);
- if (!ret)
- i2c_dev->use_dma = true;
- else
- dev_warn(i2c_dev->dev, "can't use DMA\n");
+ if (i2c_dev->dma && !i2c_dev->atomic) {
+ dma_buf = i2c_get_dma_safe_msg_buf(msg, STM32F7_I2C_DMA_LEN_MIN);
+ if (dma_buf) {
+ f7_msg->buf = dma_buf;
+ ret = stm32_i2c_prep_dma_xfer(i2c_dev->dev, i2c_dev->dma,
+ msg->flags & I2C_M_RD,
+ f7_msg->count, f7_msg->buf,
+ stm32f7_i2c_dma_callback,
+ i2c_dev);
+ if (ret) {
+ dev_warn(i2c_dev->dev, "can't use DMA\n");
+ i2c_put_dma_safe_msg_buf(f7_msg->buf, msg, false);
+ f7_msg->buf = msg->buf;
+ } else {
+ i2c_dev->use_dma = true;
+ }
+ }
}
if (!i2c_dev->use_dma) {