return -ENODEV;
}
- hw_dequeue = xhci_get_hw_deq(xhci, dev, ep_index, stream_id);
+ hw_dequeue = xhci_get_hw_deq(xhci, dev, ep_index, stream_id) & TR_DEQ_PTR_MASK;
new_seg = ep_ring->deq_seg;
new_deq = ep_ring->dequeue;
new_cycle = le32_to_cpu(td->end_trb->generic.field[3]) & TRB_CYCLE;
*/
do {
if (!hw_dequeue_found && xhci_trb_virt_to_dma(new_seg, new_deq)
- == (dma_addr_t)(hw_dequeue & ~0xf)) {
+ == (dma_addr_t)hw_dequeue) {
hw_dequeue_found = true;
if (td_last_trb_found)
break;
*/
hw_deq = xhci_get_hw_deq(xhci, ep->vdev, ep->ep_index,
td->urb->stream_id);
- hw_deq &= ~0xf;
+ hw_deq &= TR_DEQ_PTR_MASK;
if (td->cancel_status == TD_HALTED || trb_in_td(td, hw_deq)) {
switch (td->cancel_status) {
if (!list_empty(&ep->ring->td_list)) { /* Not streams compatible */
hw_deq = xhci_get_hw_deq(ep->xhci, ep->vdev, ep->ep_index, 0);
- hw_deq &= ~0xf;
+ hw_deq &= TR_DEQ_PTR_MASK;
td = list_first_entry(&ep->ring->td_list, struct xhci_td, td_list);
if (trb_in_td(td, hw_deq))
return td;
u64 deq;
/* 4.6.10 deq ptr is written to the stream ctx for streams */
if (ep->ep_state & EP_HAS_STREAMS) {
- deq = le64_to_cpu(stream_ctx->stream_ring) & SCTX_DEQ_MASK;
+ deq = le64_to_cpu(stream_ctx->stream_ring) & TR_DEQ_PTR_MASK;
/*
* Cadence xHCI controllers store some endpoint state
stream_ctx->reserved[1] = 0;
}
} else {
- deq = le64_to_cpu(ep_ctx->deq) & ~EP_CTX_CYCLE_MASK;
+ deq = le64_to_cpu(ep_ctx->deq) & TR_DEQ_PTR_MASK;
}
xhci_dbg_trace(xhci, trace_xhci_dbg_cancel_urb,
"Successful Set TR Deq Ptr cmd, deq = @%08llx", deq);