1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2014-2018 Broadcom
14 #define CMP_VALID(cmp, raw_cons, ring) \
15 (!!(rte_le_to_cpu_32(((struct cmpl_base *)(cmp))->info3_v) & \
16 CMPL_BASE_V) == !((raw_cons) & ((ring)->ring_size)))
18 #define CMPL_VALID(cmp, v) \
19 (!!(rte_le_to_cpu_32(((struct cmpl_base *)(cmp))->info3_v) & \
22 #define NQ_CMP_VALID(nqcmp, raw_cons, ring) \
23 (!!((nqcmp)->v & rte_cpu_to_le_32(NQ_CN_V)) == \
24 !((raw_cons) & ((ring)->ring_size)))
26 #define CMP_TYPE(cmp) \
27 (((struct cmpl_base *)cmp)->type & CMPL_BASE_TYPE_MASK)
29 #define ADV_RAW_CMP(idx, n) ((idx) + (n))
30 #define NEXT_RAW_CMP(idx) ADV_RAW_CMP(idx, 1)
31 #define RING_CMP(ring, idx) ((idx) & (ring)->ring_mask)
32 #define RING_CMPL(ring_mask, idx) ((idx) & (ring_mask))
33 #define NEXT_CMP(idx) RING_CMP(ADV_RAW_CMP(idx, 1))
34 #define FLIP_VALID(cons, mask, val) ((cons) >= (mask) ? !(val) : (val))
36 #define DB_CP_REARM_FLAGS (DB_KEY_CP | DB_IDX_VALID)
37 #define DB_CP_FLAGS (DB_KEY_CP | DB_IDX_VALID | DB_IRQ_DIS)
39 #define NEXT_CMPL(cpr, idx, v, inc) do { \
41 if (unlikely((idx) >= (cpr)->cp_ring_struct->ring_size)) { \
46 #define B_CP_DB_REARM(cpr, raw_cons) \
47 rte_write32((DB_CP_REARM_FLAGS | \
48 RING_CMP(((cpr)->cp_ring_struct), raw_cons)), \
49 ((cpr)->cp_db.doorbell))
51 #define B_CP_DB_ARM(cpr) rte_write32((DB_KEY_CP), \
52 ((cpr)->cp_db.doorbell))
54 #define B_CP_DB_DISARM(cpr) (*(uint32_t *)((cpr)->cp_db.doorbell) = \
55 DB_KEY_CP | DB_IRQ_DIS)
57 #define B_CP_DB_IDX_ARM(cpr, cons) \
58 (*(uint32_t *)((cpr)->cp_db.doorbell) = (DB_CP_REARM_FLAGS | \
61 #define B_CP_DB_IDX_DISARM(cpr, cons) do { \
63 (*(uint32_t *)((cpr)->cp_db.doorbell) = (DB_CP_FLAGS | \
66 #define B_CP_DIS_DB(cpr, raw_cons) \
67 rte_write32((DB_CP_FLAGS | \
68 RING_CMP(((cpr)->cp_ring_struct), raw_cons)), \
69 ((cpr)->cp_db.doorbell))
71 #define B_CP_DB(cpr, raw_cons, ring_mask) \
72 rte_write32((DB_CP_FLAGS | \
73 RING_CMPL((ring_mask), raw_cons)), \
74 ((cpr)->cp_db.doorbell))
86 struct bnxt_cp_ring_info {
89 struct cmpl_base *cp_desc_ring;
90 struct bnxt_db_info cp_db;
91 rte_iova_t cp_desc_mapping;
93 struct ctx_hw_stats *hw_stats;
94 rte_iova_t hw_stats_map;
95 uint32_t hw_stats_ctx_id;
97 struct bnxt_ring *cp_ring_struct;
102 #define RX_CMP_L2_ERRORS \
103 (RX_PKT_CMPL_ERRORS_BUFFER_ERROR_MASK | RX_PKT_CMPL_ERRORS_CRC_ERROR)
106 void bnxt_handle_async_event(struct bnxt *bp, struct cmpl_base *cmp);
107 void bnxt_handle_fwd_req(struct bnxt *bp, struct cmpl_base *cmp);
108 int bnxt_event_hwrm_resp_handler(struct bnxt *bp, struct cmpl_base *cmp);
109 void bnxt_dev_reset_and_resume(void *arg);
110 void bnxt_wait_for_device_shutdown(struct bnxt *bp);
112 #define EVENT_DATA1_REASON_CODE_FW_EXCEPTION_FATAL \
113 HWRM_ASYNC_EVENT_CMPL_RESET_NOTIFY_EVENT_DATA1_REASON_CODE_FW_EXCEPTION_FATAL
114 #define EVENT_DATA1_REASON_CODE_MASK \
115 HWRM_ASYNC_EVENT_CMPL_RESET_NOTIFY_EVENT_DATA1_REASON_CODE_MASK
117 #define EVENT_DATA1_FLAGS_MASK \
118 HWRM_ASYNC_EVENT_CMPL_ERROR_RECOVERY_EVENT_DATA1_FLAGS_MASK
120 #define EVENT_DATA1_FLAGS_MASTER_FUNC \
121 HWRM_ASYNC_EVENT_CMPL_ERROR_RECOVERY_EVENT_DATA1_FLAGS_MASTER_FUNC
123 #define EVENT_DATA1_FLAGS_RECOVERY_ENABLED \
124 HWRM_ASYNC_EVENT_CMPL_ERROR_RECOVERY_EVENT_DATA1_FLAGS_RECOVERY_ENABLED
126 bool bnxt_is_recovery_enabled(struct bnxt *bp);
127 bool bnxt_is_master_func(struct bnxt *bp);