struct i40e_tx_queue *txq = tx_queue;
volatile uint64_t *status;
uint64_t mask, expect;
- uint32_t desc;
+ int32_t desc, dd;
if (unlikely(offset >= txq->nb_tx_desc))
return -EINVAL;
+ if (offset >= txq->nb_tx_desc - txq->nb_tx_free)
+ return RTE_ETH_TX_DESC_DONE;
+
+ desc = txq->tx_tail - offset - 1;
+ if (desc < 0)
+ desc += txq->nb_tx_desc;
- desc = txq->tx_tail + offset;
- /* go to next desc that has the RS bit */
- desc = ((desc + txq->tx_rs_thresh - 1) / txq->tx_rs_thresh) *
- txq->tx_rs_thresh;
- if (desc >= txq->nb_tx_desc) {
- desc -= txq->nb_tx_desc;
- if (desc >= txq->nb_tx_desc)
- desc -= txq->nb_tx_desc;
+ /* offset is too small, no other way than reading PCI reg */
+ if (unlikely(offset < txq->tx_rs_thresh)) {
+ int16_t tx_head, queue_size;
+ tx_head = I40E_READ_REG(I40E_VSI_TO_HW(txq->vsi),
+ I40E_QTX_HEAD(txq->reg_idx));
+ queue_size = txq->tx_tail - tx_head;
+ if (queue_size < 0)
+ queue_size += txq->nb_tx_desc;
+ return queue_size > offset ? RTE_ETH_TX_DESC_FULL :
+ RTE_ETH_TX_DESC_DONE;
}
- status = &txq->tx_ring[desc].cmd_type_offset_bsz;
+ /* index of the dd bit to look at */
+ dd = (desc / txq->tx_rs_thresh + 1) * txq->tx_rs_thresh - 1;
+
+ /* In full featured mode, RS bit is only set in the last descriptor */
+ /* of a multisegments packet */
+ if (!((txq->offloads == 0) &&
+ (txq->tx_rs_thresh >= RTE_PMD_I40E_TX_MAX_BURST)))
+ dd = txq->sw_ring[dd].last_id;
+
+ status = &txq->tx_ring[dd].cmd_type_offset_bsz;
mask = rte_le_to_cpu_64(I40E_TXD_QW1_DTYPE_MASK);
expect = rte_cpu_to_le_64(
I40E_TX_DESC_DTYPE_DESC_DONE << I40E_TXD_QW1_DTYPE_SHIFT);