-/*-
- * Copyright (c) 2016 Solarflare Communications Inc.
+/* SPDX-License-Identifier: BSD-3-Clause
+ *
+ * Copyright (c) 2016-2018 Solarflare Communications Inc.
* All rights reserved.
*
* This software was jointly developed between OKTET Labs (under contract
* for Solarflare) and Solarflare Communications, Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
- * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
- * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _SFC_COMMON_EFSYS_H
#include <rte_common.h>
#include <rte_malloc.h>
#include <rte_log.h>
+#include <rte_io.h>
#include "sfc_debug.h"
+#include "sfc_log.h"
#ifdef __cplusplus
extern "C" {
#include "efx_types.h"
-#ifndef _NOTE
-#define _NOTE(s)
-#endif
-
typedef bool boolean_t;
#ifndef B_FALSE
#define MIN(v1, v2) ((v1) < (v2) ? (v1) : (v2))
#endif
-/* There are macros for alignment in DPDK, but we need to make a proper
- * correspondence here, if we want to re-use them at all
- */
-#ifndef IS_P2ALIGNED
-#define IS_P2ALIGNED(v, a) ((((uintptr_t)(v)) & ((uintptr_t)(a) - 1)) == 0)
-#endif
-
-#ifndef P2ROUNDUP
-#define P2ROUNDUP(x, align) (-(-(x) & -(align)))
-#endif
-
-#ifndef P2ALIGN
-#define P2ALIGN(_x, _a) ((_x) & -(_a))
-#endif
-
-#ifndef IS2P
+#ifndef ISP2
#define ISP2(x) rte_is_power_of_2(x)
#endif
rte_prefetch_non_temporal(addr);
}
-/* Modifiers used for Windows builds */
-#define __in
-#define __in_opt
-#define __in_ecount(_n)
-#define __in_ecount_opt(_n)
-#define __in_bcount(_n)
-#define __in_bcount_opt(_n)
-
-#define __out
-#define __out_opt
-#define __out_ecount(_n)
-#define __out_ecount_opt(_n)
-#define __out_bcount(_n)
-#define __out_bcount_opt(_n)
-
-#define __deref_out
-
-#define __inout
-#define __inout_opt
-#define __inout_ecount(_n)
-#define __inout_ecount_opt(_n)
-#define __inout_bcount(_n)
-#define __inout_bcount_opt(_n)
-#define __inout_bcount_full_opt(_n)
-
-#define __deref_out_bcount_opt(n)
-
-#define __checkReturn
-#define __success(_x)
-
-#define __drv_when(_p, _c)
-
/* Code inclusion options */
#define EFSYS_OPT_HUNTINGTON 1
/* Enable SFN8xxx support */
#define EFSYS_OPT_MEDFORD 1
+/* Enable SFN2xxx support */
+#define EFSYS_OPT_MEDFORD2 1
#ifdef RTE_LIBRTE_SFC_EFX_DEBUG
#define EFSYS_OPT_CHECK_REG 1
#else
/* MCDI is required for SFN7xxx and SFN8xx */
#define EFSYS_OPT_MCDI 1
#define EFSYS_OPT_MCDI_LOGGING 1
-#define EFSYS_OPT_MCDI_PROXY_AUTH 0
+#define EFSYS_OPT_MCDI_PROXY_AUTH 1
#define EFSYS_OPT_MAC_STATS 1
-#define EFSYS_OPT_LOOPBACK 0
+#define EFSYS_OPT_LOOPBACK 1
#define EFSYS_OPT_MON_MCDI 0
#define EFSYS_OPT_MON_STATS 0
#define EFSYS_OPT_VPD 0
#define EFSYS_OPT_NVRAM 0
#define EFSYS_OPT_BOOTCFG 0
+#define EFSYS_OPT_IMAGE_LAYOUT 0
#define EFSYS_OPT_DIAG 0
#define EFSYS_OPT_RX_SCALE 1
#define EFSYS_OPT_RX_PACKED_STREAM 0
+#define EFSYS_OPT_RX_ES_SUPER_BUFFER 1
+
+#define EFSYS_OPT_TUNNEL 1
+
+#define EFSYS_OPT_FW_SUBVARIANT_AWARE 1
+
+#define EFSYS_OPT_EVB 0
+
+#define EFSYS_OPT_MCDI_PROXY_AUTH_SERVER 0
+
/* ID */
typedef struct __efsys_identifier_s efsys_identifier_t;
/* DMA */
-typedef phys_addr_t efsys_dma_addr_t;
+typedef rte_iova_t efsys_dma_addr_t;
typedef struct efsys_mem_s {
const struct rte_memzone *esm_mz;
volatile uint32_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_dword_t))); \
\
_addr = (volatile uint32_t *)(_base + (_offset)); \
(_edp)->ed_u32[0] = _addr[0]; \
volatile uint64_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_qword_t))); \
\
_addr = (volatile uint64_t *)(_base + (_offset)); \
(_eqp)->eq_u64[0] = _addr[0]; \
volatile __m128i *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_oword_t))); \
\
_addr = (volatile __m128i *)(_base + (_offset)); \
(_eop)->eo_u128[0] = _addr[0]; \
volatile uint32_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_dword_t))); \
\
EFSYS_PROBE2(mem_writed, unsigned int, (_offset), \
uint32_t, (_edp)->ed_u32[0]); \
volatile uint64_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_qword_t))); \
\
EFSYS_PROBE3(mem_writeq, unsigned int, (_offset), \
uint32_t, (_eqp)->eq_u32[1], \
volatile __m128i *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_oword_t))); \
\
\
EFSYS_PROBE5(mem_writeo, unsigned int, (_offset), \
} while (B_FALSE)
+#define EFSYS_MEM_SIZE(_esmp) \
+ ((_esmp)->esm_mz->len)
+
#define EFSYS_MEM_ADDR(_esmp) \
((_esmp)->esm_addr)
volatile uint32_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_dword_t))); \
_NOTE(CONSTANTCONDITION); \
if (_lock) \
SFC_BAR_LOCK(_esbp); \
\
_addr = (volatile uint32_t *)(_base + (_offset)); \
rte_rmb(); \
- (_edp)->ed_u32[0] = _addr[0]; \
+ (_edp)->ed_u32[0] = rte_read32_relaxed(_addr); \
\
EFSYS_PROBE2(bar_readd, unsigned int, (_offset), \
uint32_t, (_edp)->ed_u32[0]); \
volatile uint64_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_qword_t))); \
\
SFC_BAR_LOCK(_esbp); \
\
_addr = (volatile uint64_t *)(_base + (_offset)); \
rte_rmb(); \
- (_eqp)->eq_u64[0] = _addr[0]; \
+ (_eqp)->eq_u64[0] = rte_read64_relaxed(_addr); \
\
EFSYS_PROBE3(bar_readq, unsigned int, (_offset), \
uint32_t, (_eqp)->eq_u32[1], \
volatile __m128i *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_oword_t))); \
\
_NOTE(CONSTANTCONDITION); \
if (_lock) \
\
_addr = (volatile __m128i *)(_base + (_offset)); \
rte_rmb(); \
+ /* There is no rte_read128_relaxed() yet */ \
(_eop)->eo_u128[0] = _addr[0]; \
\
EFSYS_PROBE5(bar_reado, unsigned int, (_offset), \
volatile uint32_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_dword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_dword_t))); \
\
_NOTE(CONSTANTCONDITION); \
if (_lock) \
uint32_t, (_edp)->ed_u32[0]); \
\
_addr = (volatile uint32_t *)(_base + (_offset)); \
- _addr[0] = (_edp)->ed_u32[0]; \
+ rte_write32_relaxed((_edp)->ed_u32[0], _addr); \
rte_wmb(); \
\
_NOTE(CONSTANTCONDITION); \
volatile uint64_t *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_qword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_qword_t))); \
\
SFC_BAR_LOCK(_esbp); \
\
uint32_t, (_eqp)->eq_u32[0]); \
\
_addr = (volatile uint64_t *)(_base + (_offset)); \
- _addr[0] = (_eqp)->eq_u64[0]; \
+ rte_write64_relaxed((_eqp)->eq_u64[0], _addr); \
rte_wmb(); \
\
SFC_BAR_UNLOCK(_esbp); \
volatile __m128i *_addr; \
\
_NOTE(CONSTANTCONDITION); \
- SFC_ASSERT(IS_P2ALIGNED(_offset, sizeof(efx_oword_t))); \
+ SFC_ASSERT(EFX_IS_P2ALIGNED(size_t, _offset, \
+ sizeof(efx_oword_t))); \
\
_NOTE(CONSTANTCONDITION); \
if (_lock) \
uint32_t, (_eop)->eo_u32[0]); \
\
_addr = (volatile __m128i *)(_base + (_offset)); \
+ /* There is no rte_write128_relaxed() yet */ \
_addr[0] = (_eop)->eo_u128[0]; \
rte_wmb(); \
\
/* BARRIERS */
#define EFSYS_MEM_READ_BARRIER() rte_rmb()
-#define EFSYS_PIO_WRITE_BARRIER() rte_wmb()
+#define EFSYS_PIO_WRITE_BARRIER() rte_io_wmb()
/* DMA SYNC */
*/
#define EFSYS_DMA_SYNC_FOR_KERNEL(_esmp, _offset, _size) ((void)0)
-#define EFSYS_DMA_SYNC_FOR_DEVICE(_esmp, _offset, _size) ((void)0)
+
+/* Just avoid store and compiler (impliciltly) reordering */
+#define EFSYS_DMA_SYNC_FOR_DEVICE(_esmp, _offset, _size) rte_wmb()
/* TIMESTAMP */
#define EFSYS_ERR(_esip, _code, _dword0, _dword1) \
do { \
(void)(_esip); \
- RTE_LOG(ERR, PMD, "FATAL ERROR #%u (0x%08x%08x)\n", \
+ SFC_GENERIC_LOG(ERR, "FATAL ERROR #%u (0x%08x%08x)", \
(_code), (_dword0), (_dword1)); \
_NOTE(CONSTANTCONDITION); \
} while (B_FALSE)