return rc;
}
+static void bnxt_free_tx_inline_buf(struct bnxt_tx_ring_info *txr,
+ struct pci_dev *pdev)
+{
+ if (!txr->tx_inline_buf)
+ return;
+
+ dma_unmap_single(&pdev->dev, txr->tx_inline_dma,
+ txr->tx_inline_size, DMA_TO_DEVICE);
+ kfree(txr->tx_inline_buf);
+ txr->tx_inline_buf = NULL;
+ txr->tx_inline_size = 0;
+}
+
+static int __maybe_unused bnxt_alloc_tx_inline_buf(struct bnxt_tx_ring_info *txr,
+ struct pci_dev *pdev,
+ unsigned int size)
+{
+ txr->tx_inline_buf = kmalloc(size, GFP_KERNEL);
+ if (!txr->tx_inline_buf)
+ return -ENOMEM;
+
+ txr->tx_inline_dma = dma_map_single(&pdev->dev, txr->tx_inline_buf,
+ size, DMA_TO_DEVICE);
+ if (dma_mapping_error(&pdev->dev, txr->tx_inline_dma)) {
+ kfree(txr->tx_inline_buf);
+ txr->tx_inline_buf = NULL;
+ return -ENOMEM;
+ }
+ txr->tx_inline_size = size;
+
+ return 0;
+}
+
static void bnxt_free_tx_rings(struct bnxt *bp)
{
int i;
txr->tx_push = NULL;
}
+ bnxt_free_tx_inline_buf(txr, pdev);
+
ring = &txr->tx_ring_struct;
bnxt_free_ring(bp, &ring->ring_mem);