A future patch will allow Tx scatter/gather to be disabled. Store the
value in the queue so it can be changed at runtime based on the
configuration.
Signed-off-by: Andrew Boyer <aboyer@pensando.io>
uint16_t ntxq_descs, struct ionic_tx_qcq **txq_out)
{
struct ionic_tx_qcq *txq;
- uint16_t flags;
+ uint16_t flags, num_segs_fw;
int err;
flags = IONIC_QCQ_F_SG;
+
+ num_segs_fw = IONIC_TX_MAX_SG_ELEMS_V1 + 1;
+
err = ionic_qcq_alloc(lif,
IONIC_QTYPE_TXQ,
sizeof(struct ionic_tx_qcq),
return err;
txq->flags = flags;
+ txq->num_segs_fw = num_segs_fw;
lif->txqcqs[index] = txq;
*txq_out = txq;
struct ionic_qcq qcq;
/* cacheline2 */
+ uint16_t num_segs_fw; /* # segs supported by current FW */
uint16_t flags;
struct ionic_tx_stats stats;
(PKT_TX_OFFLOAD_MASK ^ IONIC_TX_OFFLOAD_MASK)
uint16_t
-ionic_prep_pkts(void *tx_queue __rte_unused, struct rte_mbuf **tx_pkts,
- uint16_t nb_pkts)
+ionic_prep_pkts(void *tx_queue, struct rte_mbuf **tx_pkts, uint16_t nb_pkts)
{
+ struct ionic_tx_qcq *txq = tx_queue;
struct rte_mbuf *txm;
uint64_t offloads;
int i = 0;
for (i = 0; i < nb_pkts; i++) {
txm = tx_pkts[i];
- if (txm->nb_segs > IONIC_TX_MAX_SG_ELEMS_V1 + 1) {
+ if (txm->nb_segs > txq->num_segs_fw) {
rte_errno = -EINVAL;
break;
}