for (i = 0; i < lif->nrxqcqs; i++) {
struct ionic_qcq *rxq = lif->rxqcqs[i];
- if (!rxq->deferred_start) {
+ if (!(rxq->flags & IONIC_QCQ_F_DEFERRED)) {
err = ionic_dev_rx_queue_start(lif->eth_dev, i);
if (err)
for (i = 0; i < lif->ntxqcqs; i++) {
struct ionic_qcq *txq = lif->txqcqs[i];
- if (!txq->deferred_start) {
+ if (!(txq->flags & IONIC_QCQ_F_DEFERRED)) {
err = ionic_dev_tx_queue_start(lif->eth_dev, i);
if (err)
#define IONIC_QCQ_F_SG BIT(1)
#define IONIC_QCQ_F_INTR BIT(2)
#define IONIC_QCQ_F_NOTIFYQ BIT(3)
+#define IONIC_QCQ_F_DEFERRED BIT(4)
/* Queue / Completion Queue */
struct ionic_qcq {
uint32_t total_size;
uint32_t flags;
struct ionic_intr_info intr;
- bool deferred_start;
};
#define IONIC_Q_TO_QCQ(q) container_of(q, struct ionic_qcq, q)
qinfo->nb_desc = q->num_descs;
qinfo->conf.offloads = txq->offloads;
- qinfo->conf.tx_deferred_start = txq->deferred_start;
+ qinfo->conf.tx_deferred_start = txq->flags & IONIC_QCQ_F_DEFERRED;
}
static inline void __rte_cold
}
/* Do not start queue with rte_eth_dev_start() */
- txq->deferred_start = tx_conf->tx_deferred_start;
+ if (tx_conf->tx_deferred_start)
+ txq->flags |= IONIC_QCQ_F_DEFERRED;
txq->offloads = offloads;
qinfo->mp = rxq->mb_pool;
qinfo->scattered_rx = dev->data->scattered_rx;
qinfo->nb_desc = q->num_descs;
- qinfo->conf.rx_deferred_start = rxq->deferred_start;
+ qinfo->conf.rx_deferred_start = rxq->flags & IONIC_QCQ_F_DEFERRED;
qinfo->conf.offloads = rxq->offloads;
}
*/
/* Do not start queue with rte_eth_dev_start() */
- rxq->deferred_start = rx_conf->rx_deferred_start;
+ if (rx_conf->rx_deferred_start)
+ rxq->flags |= IONIC_QCQ_F_DEFERRED;
rxq->offloads = offloads;