Simply replace the rte_smp_mb barriers with SEQ_CST atomic thread fence,
if there is no load/store operations.
Signed-off-by: Joyce Kong <joyce.kong@arm.com>
Reviewed-by: Ruifeng Wang <ruifeng.wang@arm.com>
*/
used->ring[used->idx & (vq->vring.size - 1)].id = task->req_idx;
used->ring[used->idx & (vq->vring.size - 1)].len = task->data_len;
*/
used->ring[used->idx & (vq->vring.size - 1)].id = task->req_idx;
used->ring[used->idx & (vq->vring.size - 1)].len = task->data_len;
+ rte_atomic_thread_fence(__ATOMIC_SEQ_CST);
+ rte_atomic_thread_fence(__ATOMIC_SEQ_CST);
rte_vhost_clr_inflight_desc_split(task->ctrlr->vid,
vq->id, used->idx, task->req_idx);
rte_vhost_clr_inflight_desc_split(task->ctrlr->vid,
vq->id, used->idx, task->req_idx);
desc->id = task->buffer_id;
desc->addr = 0;
desc->id = task->buffer_id;
desc->addr = 0;
+ rte_atomic_thread_fence(__ATOMIC_SEQ_CST);
if (vq->used_wrap_counter)
desc->flags |= VIRTQ_DESC_F_AVAIL | VIRTQ_DESC_F_USED;
else
desc->flags &= ~(VIRTQ_DESC_F_AVAIL | VIRTQ_DESC_F_USED);
if (vq->used_wrap_counter)
desc->flags |= VIRTQ_DESC_F_AVAIL | VIRTQ_DESC_F_USED;
else
desc->flags &= ~(VIRTQ_DESC_F_AVAIL | VIRTQ_DESC_F_USED);
+ rte_atomic_thread_fence(__ATOMIC_SEQ_CST);
rte_vhost_clr_inflight_desc_packed(task->ctrlr->vid, vq->id,
task->inflight_idx);
rte_vhost_clr_inflight_desc_packed(task->ctrlr->vid, vq->id,
task->inflight_idx);