net/i40e: populate error in flow director parser
[dpdk.git] / drivers / net / i40e / i40e_rxtx_common_avx.h
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2015 Intel Corporation
3  */
4
5 #ifndef _I40E_RXTX_COMMON_AVX_H_
6 #define _I40E_RXTX_COMMON_AVX_H_
7 #include <stdint.h>
8 #include <ethdev_driver.h>
9 #include <rte_malloc.h>
10
11 #include "i40e_ethdev.h"
12 #include "i40e_rxtx.h"
13
14 #ifndef __INTEL_COMPILER
15 #pragma GCC diagnostic ignored "-Wcast-qual"
16 #endif
17
18 #ifdef __AVX2__
19 static __rte_always_inline void
20 i40e_rxq_rearm_common(struct i40e_rx_queue *rxq, __rte_unused bool avx512)
21 {
22         int i;
23         uint16_t rx_id;
24         volatile union i40e_rx_desc *rxdp;
25         struct i40e_rx_entry *rxep = &rxq->sw_ring[rxq->rxrearm_start];
26
27         rxdp = rxq->rx_ring + rxq->rxrearm_start;
28
29         /* Pull 'n' more MBUFs into the software ring */
30         if (rte_mempool_get_bulk(rxq->mp,
31                                  (void *)rxep,
32                                  RTE_I40E_RXQ_REARM_THRESH) < 0) {
33                 if (rxq->rxrearm_nb + RTE_I40E_RXQ_REARM_THRESH >=
34                     rxq->nb_rx_desc) {
35                         __m128i dma_addr0;
36                         dma_addr0 = _mm_setzero_si128();
37                         for (i = 0; i < RTE_I40E_DESCS_PER_LOOP; i++) {
38                                 rxep[i].mbuf = &rxq->fake_mbuf;
39                                 _mm_store_si128((__m128i *)&rxdp[i].read,
40                                                 dma_addr0);
41                         }
42                 }
43                 rte_eth_devices[rxq->port_id].data->rx_mbuf_alloc_failed +=
44                         RTE_I40E_RXQ_REARM_THRESH;
45                 return;
46         }
47
48 #ifndef RTE_LIBRTE_I40E_16BYTE_RX_DESC
49         struct rte_mbuf *mb0, *mb1;
50         __m128i dma_addr0, dma_addr1;
51         __m128i hdr_room = _mm_set_epi64x(RTE_PKTMBUF_HEADROOM,
52                         RTE_PKTMBUF_HEADROOM);
53         /* Initialize the mbufs in vector, process 2 mbufs in one loop */
54         for (i = 0; i < RTE_I40E_RXQ_REARM_THRESH; i += 2, rxep += 2) {
55                 __m128i vaddr0, vaddr1;
56
57                 mb0 = rxep[0].mbuf;
58                 mb1 = rxep[1].mbuf;
59
60                 /* load buf_addr(lo 64bit) and buf_iova(hi 64bit) */
61                 RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, buf_iova) !=
62                                 offsetof(struct rte_mbuf, buf_addr) + 8);
63                 vaddr0 = _mm_loadu_si128((__m128i *)&mb0->buf_addr);
64                 vaddr1 = _mm_loadu_si128((__m128i *)&mb1->buf_addr);
65
66                 /* convert pa to dma_addr hdr/data */
67                 dma_addr0 = _mm_unpackhi_epi64(vaddr0, vaddr0);
68                 dma_addr1 = _mm_unpackhi_epi64(vaddr1, vaddr1);
69
70                 /* add headroom to pa values */
71                 dma_addr0 = _mm_add_epi64(dma_addr0, hdr_room);
72                 dma_addr1 = _mm_add_epi64(dma_addr1, hdr_room);
73
74                 /* flush desc with pa dma_addr */
75                 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr0);
76                 _mm_store_si128((__m128i *)&rxdp++->read, dma_addr1);
77         }
78 #else
79 #ifdef __AVX512VL__
80         if (avx512) {
81                 struct rte_mbuf *mb0, *mb1, *mb2, *mb3;
82                 struct rte_mbuf *mb4, *mb5, *mb6, *mb7;
83                 __m512i dma_addr0_3, dma_addr4_7;
84                 __m512i hdr_room = _mm512_set1_epi64(RTE_PKTMBUF_HEADROOM);
85                 /* Initialize the mbufs in vector, process 8 mbufs in one loop */
86                 for (i = 0; i < RTE_I40E_RXQ_REARM_THRESH;
87                                 i += 8, rxep += 8, rxdp += 8) {
88                         __m128i vaddr0, vaddr1, vaddr2, vaddr3;
89                         __m128i vaddr4, vaddr5, vaddr6, vaddr7;
90                         __m256i vaddr0_1, vaddr2_3;
91                         __m256i vaddr4_5, vaddr6_7;
92                         __m512i vaddr0_3, vaddr4_7;
93
94                         mb0 = rxep[0].mbuf;
95                         mb1 = rxep[1].mbuf;
96                         mb2 = rxep[2].mbuf;
97                         mb3 = rxep[3].mbuf;
98                         mb4 = rxep[4].mbuf;
99                         mb5 = rxep[5].mbuf;
100                         mb6 = rxep[6].mbuf;
101                         mb7 = rxep[7].mbuf;
102
103                         /* load buf_addr(lo 64bit) and buf_iova(hi 64bit) */
104                         RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, buf_iova) !=
105                                         offsetof(struct rte_mbuf, buf_addr) + 8);
106                         vaddr0 = _mm_loadu_si128((__m128i *)&mb0->buf_addr);
107                         vaddr1 = _mm_loadu_si128((__m128i *)&mb1->buf_addr);
108                         vaddr2 = _mm_loadu_si128((__m128i *)&mb2->buf_addr);
109                         vaddr3 = _mm_loadu_si128((__m128i *)&mb3->buf_addr);
110                         vaddr4 = _mm_loadu_si128((__m128i *)&mb4->buf_addr);
111                         vaddr5 = _mm_loadu_si128((__m128i *)&mb5->buf_addr);
112                         vaddr6 = _mm_loadu_si128((__m128i *)&mb6->buf_addr);
113                         vaddr7 = _mm_loadu_si128((__m128i *)&mb7->buf_addr);
114
115                         /**
116                          * merge 0 & 1, by casting 0 to 256-bit and inserting 1
117                          * into the high lanes. Similarly for 2 & 3, and so on.
118                          */
119                         vaddr0_1 =
120                                 _mm256_inserti128_si256(_mm256_castsi128_si256(vaddr0),
121                                                         vaddr1, 1);
122                         vaddr2_3 =
123                                 _mm256_inserti128_si256(_mm256_castsi128_si256(vaddr2),
124                                                         vaddr3, 1);
125                         vaddr4_5 =
126                                 _mm256_inserti128_si256(_mm256_castsi128_si256(vaddr4),
127                                                         vaddr5, 1);
128                         vaddr6_7 =
129                                 _mm256_inserti128_si256(_mm256_castsi128_si256(vaddr6),
130                                                         vaddr7, 1);
131                         vaddr0_3 =
132                                 _mm512_inserti64x4(_mm512_castsi256_si512(vaddr0_1),
133                                                    vaddr2_3, 1);
134                         vaddr4_7 =
135                                 _mm512_inserti64x4(_mm512_castsi256_si512(vaddr4_5),
136                                                    vaddr6_7, 1);
137
138                         /* convert pa to dma_addr hdr/data */
139                         dma_addr0_3 = _mm512_unpackhi_epi64(vaddr0_3, vaddr0_3);
140                         dma_addr4_7 = _mm512_unpackhi_epi64(vaddr4_7, vaddr4_7);
141
142                         /* add headroom to pa values */
143                         dma_addr0_3 = _mm512_add_epi64(dma_addr0_3, hdr_room);
144                         dma_addr4_7 = _mm512_add_epi64(dma_addr4_7, hdr_room);
145
146                         /* flush desc with pa dma_addr */
147                         _mm512_store_si512((__m512i *)&rxdp->read, dma_addr0_3);
148                         _mm512_store_si512((__m512i *)&(rxdp + 4)->read, dma_addr4_7);
149                 }
150         } else
151 #endif /* __AVX512VL__*/
152         {
153                 struct rte_mbuf *mb0, *mb1, *mb2, *mb3;
154                 __m256i dma_addr0_1, dma_addr2_3;
155                 __m256i hdr_room = _mm256_set1_epi64x(RTE_PKTMBUF_HEADROOM);
156                 /* Initialize the mbufs in vector, process 4 mbufs in one loop */
157                 for (i = 0; i < RTE_I40E_RXQ_REARM_THRESH;
158                                 i += 4, rxep += 4, rxdp += 4) {
159                         __m128i vaddr0, vaddr1, vaddr2, vaddr3;
160                         __m256i vaddr0_1, vaddr2_3;
161
162                         mb0 = rxep[0].mbuf;
163                         mb1 = rxep[1].mbuf;
164                         mb2 = rxep[2].mbuf;
165                         mb3 = rxep[3].mbuf;
166
167                         /* load buf_addr(lo 64bit) and buf_iova(hi 64bit) */
168                         RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, buf_iova) !=
169                                         offsetof(struct rte_mbuf, buf_addr) + 8);
170                         vaddr0 = _mm_loadu_si128((__m128i *)&mb0->buf_addr);
171                         vaddr1 = _mm_loadu_si128((__m128i *)&mb1->buf_addr);
172                         vaddr2 = _mm_loadu_si128((__m128i *)&mb2->buf_addr);
173                         vaddr3 = _mm_loadu_si128((__m128i *)&mb3->buf_addr);
174
175                         /**
176                          * merge 0 & 1, by casting 0 to 256-bit and inserting 1
177                          * into the high lanes. Similarly for 2 & 3
178                          */
179                         vaddr0_1 = _mm256_inserti128_si256
180                                 (_mm256_castsi128_si256(vaddr0), vaddr1, 1);
181                         vaddr2_3 = _mm256_inserti128_si256
182                                 (_mm256_castsi128_si256(vaddr2), vaddr3, 1);
183
184                         /* convert pa to dma_addr hdr/data */
185                         dma_addr0_1 = _mm256_unpackhi_epi64(vaddr0_1, vaddr0_1);
186                         dma_addr2_3 = _mm256_unpackhi_epi64(vaddr2_3, vaddr2_3);
187
188                         /* add headroom to pa values */
189                         dma_addr0_1 = _mm256_add_epi64(dma_addr0_1, hdr_room);
190                         dma_addr2_3 = _mm256_add_epi64(dma_addr2_3, hdr_room);
191
192                         /* flush desc with pa dma_addr */
193                         _mm256_store_si256((__m256i *)&rxdp->read, dma_addr0_1);
194                         _mm256_store_si256((__m256i *)&(rxdp + 2)->read, dma_addr2_3);
195                 }
196         }
197
198 #endif
199
200         rxq->rxrearm_start += RTE_I40E_RXQ_REARM_THRESH;
201         if (rxq->rxrearm_start >= rxq->nb_rx_desc)
202                 rxq->rxrearm_start = 0;
203
204         rxq->rxrearm_nb -= RTE_I40E_RXQ_REARM_THRESH;
205
206         rx_id = (uint16_t)((rxq->rxrearm_start == 0) ?
207                              (rxq->nb_rx_desc - 1) : (rxq->rxrearm_start - 1));
208
209         /* Update the tail pointer on the NIC */
210         I40E_PCI_REG_WC_WRITE(rxq->qrx_tail, rx_id);
211 }
212 #endif /* __AVX2__*/
213
214 #endif /*_I40E_RXTX_COMMON_AVX_H_*/