aq->rq.qint_idx = rq->qid % nix->qints;
aq->rq.xqe_drop_ena = 1;
+ /* If RED enabled, then fill enable for all cases */
+ if (rq->red_pass && (rq->red_pass >= rq->red_drop)) {
+ aq->rq.spb_aura_pass = rq->spb_red_pass;
+ aq->rq.lpb_aura_pass = rq->red_pass;
+
+ aq->rq.spb_aura_drop = rq->spb_red_drop;
+ aq->rq.lpb_aura_drop = rq->red_drop;
+ }
+
if (cfg) {
if (rq->sso_ena) {
/* SSO mode */
aq->rq_mask.rq_int_ena = ~aq->rq_mask.rq_int_ena;
aq->rq_mask.qint_idx = ~aq->rq_mask.qint_idx;
aq->rq_mask.xqe_drop_ena = ~aq->rq_mask.xqe_drop_ena;
+
+ if (rq->red_pass && (rq->red_pass >= rq->red_drop)) {
+ aq->rq_mask.spb_aura_pass = ~aq->rq_mask.spb_aura_pass;
+ aq->rq_mask.lpb_aura_pass = ~aq->rq_mask.lpb_aura_pass;
+
+ aq->rq_mask.spb_aura_drop = ~aq->rq_mask.spb_aura_drop;
+ aq->rq_mask.lpb_aura_drop = ~aq->rq_mask.lpb_aura_drop;
+ }
}
return 0;
aq->rq.qint_idx = rq->qid % nix->qints;
aq->rq.xqe_drop_ena = 1;
+ /* If RED enabled, then fill enable for all cases */
+ if (rq->red_pass && (rq->red_pass >= rq->red_drop)) {
+ aq->rq.spb_pool_pass = rq->red_pass;
+ aq->rq.spb_aura_pass = rq->red_pass;
+ aq->rq.lpb_pool_pass = rq->red_pass;
+ aq->rq.lpb_aura_pass = rq->red_pass;
+ aq->rq.wqe_pool_pass = rq->red_pass;
+ aq->rq.xqe_pass = rq->red_pass;
+
+ aq->rq.spb_pool_drop = rq->red_drop;
+ aq->rq.spb_aura_drop = rq->red_drop;
+ aq->rq.lpb_pool_drop = rq->red_drop;
+ aq->rq.lpb_aura_drop = rq->red_drop;
+ aq->rq.wqe_pool_drop = rq->red_drop;
+ aq->rq.xqe_drop = rq->red_drop;
+ }
+
if (cfg) {
if (rq->sso_ena) {
/* SSO mode */
aq->rq_mask.rq_int_ena = ~aq->rq_mask.rq_int_ena;
aq->rq_mask.qint_idx = ~aq->rq_mask.qint_idx;
aq->rq_mask.xqe_drop_ena = ~aq->rq_mask.xqe_drop_ena;
+
+ if (rq->red_pass && (rq->red_pass >= rq->red_drop)) {
+ aq->rq_mask.spb_pool_pass = ~aq->rq_mask.spb_pool_pass;
+ aq->rq_mask.spb_aura_pass = ~aq->rq_mask.spb_aura_pass;
+ aq->rq_mask.lpb_pool_pass = ~aq->rq_mask.lpb_pool_pass;
+ aq->rq_mask.lpb_aura_pass = ~aq->rq_mask.lpb_aura_pass;
+ aq->rq_mask.wqe_pool_pass = ~aq->rq_mask.wqe_pool_pass;
+ aq->rq_mask.xqe_pass = ~aq->rq_mask.xqe_pass;
+
+ aq->rq_mask.spb_pool_drop = ~aq->rq_mask.spb_pool_drop;
+ aq->rq_mask.spb_aura_drop = ~aq->rq_mask.spb_aura_drop;
+ aq->rq_mask.lpb_pool_drop = ~aq->rq_mask.lpb_pool_drop;
+ aq->rq_mask.lpb_aura_drop = ~aq->rq_mask.lpb_aura_drop;
+ aq->rq_mask.wqe_pool_drop = ~aq->rq_mask.wqe_pool_drop;
+ aq->rq_mask.xqe_drop = ~aq->rq_mask.xqe_drop;
+ }
}
return 0;
/* Update aura fields */
aura->pool_addr = pool_id; /* AF will translate to associated poolctx */
aura->ena = 1;
- aura->shift = __builtin_clz(block_count) - 8;
+ aura->shift = plt_log2_u32(block_count);
+ aura->shift = aura->shift < 8 ? 0 : aura->shift - 8;
aura->limit = block_count;
aura->pool_caching = 1;
aura->err_int_ena = BIT(NPA_AURA_ERR_INT_AURA_ADD_OVER);
aura->err_int_ena |= BIT(NPA_AURA_ERR_INT_AURA_ADD_UNDER);
aura->err_int_ena |= BIT(NPA_AURA_ERR_INT_AURA_FREE_UNDER);
aura->err_int_ena |= BIT(NPA_AURA_ERR_INT_POOL_DIS);
+ aura->avg_con = ROC_NPA_AVG_CONT;
/* Many to one reduction */
aura->err_qint_idx = aura_id % lf->qints;
pool->ena = 1;
pool->buf_size = block_size / ROC_ALIGN;
pool->stack_max_pages = stack_size;
- pool->shift = __builtin_clz(block_count) - 8;
+ pool->shift = plt_log2_u32(block_count);
+ pool->shift = pool->shift < 8 ? 0 : pool->shift - 8;
pool->ptr_start = 0;
pool->ptr_end = ~0;
pool->stack_caching = 1;
pool->err_int_ena = BIT(NPA_POOL_ERR_INT_OVFLS);
pool->err_int_ena |= BIT(NPA_POOL_ERR_INT_RANGE);
pool->err_int_ena |= BIT(NPA_POOL_ERR_INT_PERR);
+ pool->avg_con = ROC_NPA_AVG_CONT;
/* Many to one reduction */
pool->err_qint_idx = pool_id % lf->qints;