return 0;
 }
 
+static struct sk_buff *
+bnx2x_build_skb(const struct bnx2x_fastpath *fp, void *data)
+{
+       struct sk_buff *skb;
+
+       if (fp->rx_frag_size)
+               skb = build_skb(data, fp->rx_frag_size);
+       else
+               skb = slab_build_skb(data);
+       return skb;
+}
+
 static void bnx2x_frag_free(const struct bnx2x_fastpath *fp, void *data)
 {
        if (fp->rx_frag_size)
        dma_unmap_single(&bp->pdev->dev, dma_unmap_addr(rx_buf, mapping),
                         fp->rx_buf_size, DMA_FROM_DEVICE);
        if (likely(new_data))
-               skb = build_skb(data, fp->rx_frag_size);
+               skb = bnx2x_build_skb(fp, data);
 
        if (likely(skb)) {
 #ifdef BNX2X_STOP_ON_ERROR
                                                 dma_unmap_addr(rx_buf, mapping),
                                                 fp->rx_buf_size,
                                                 DMA_FROM_DEVICE);
-                               skb = build_skb(data, fp->rx_frag_size);
+                               skb = bnx2x_build_skb(fp, data);
                                if (unlikely(!skb)) {
                                        bnx2x_frag_free(fp, data);
                                        bnx2x_fp_qstats(bp, fp)->