1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2018 Intel Corporation
5 #include "ifpga_feature_dev.h"
7 static int fme_err_get_errors(struct ifpga_fme_hw *fme, u64 *val)
9 struct feature_fme_err *fme_err
10 = get_fme_feature_ioaddr_by_index(fme,
11 FME_FEATURE_ID_GLOBAL_ERR);
12 struct feature_fme_error0 fme_error0;
14 fme_error0.csr = readq(&fme_err->fme_err);
15 *val = fme_error0.csr;
20 static int fme_err_get_first_error(struct ifpga_fme_hw *fme, u64 *val)
22 struct feature_fme_err *fme_err
23 = get_fme_feature_ioaddr_by_index(fme,
24 FME_FEATURE_ID_GLOBAL_ERR);
25 struct feature_fme_first_error fme_first_err;
27 fme_first_err.csr = readq(&fme_err->fme_first_err);
28 *val = fme_first_err.err_reg_status;
33 static int fme_err_get_next_error(struct ifpga_fme_hw *fme, u64 *val)
35 struct feature_fme_err *fme_err
36 = get_fme_feature_ioaddr_by_index(fme,
37 FME_FEATURE_ID_GLOBAL_ERR);
38 struct feature_fme_next_error fme_next_err;
40 fme_next_err.csr = readq(&fme_err->fme_next_err);
41 *val = fme_next_err.err_reg_status;
46 static int fme_err_set_clear(struct ifpga_fme_hw *fme, u64 val)
48 struct feature_fme_err *fme_err
49 = get_fme_feature_ioaddr_by_index(fme,
50 FME_FEATURE_ID_GLOBAL_ERR);
51 struct feature_fme_error0 fme_error0;
52 struct feature_fme_first_error fme_first_err;
53 struct feature_fme_next_error fme_next_err;
56 spinlock_lock(&fme->lock);
57 writeq(GENMASK_ULL(63, 0), &fme_err->fme_err_mask);
59 fme_error0.csr = readq(&fme_err->fme_err);
60 if (val != fme_error0.csr) {
65 fme_first_err.csr = readq(&fme_err->fme_first_err);
66 fme_next_err.csr = readq(&fme_err->fme_next_err);
68 writeq(fme_error0.csr, &fme_err->fme_err);
69 writeq(fme_first_err.csr & FME_FIRST_ERROR_MASK,
70 &fme_err->fme_first_err);
71 writeq(fme_next_err.csr & FME_NEXT_ERROR_MASK,
72 &fme_err->fme_next_err);
75 writeq(FME_ERROR0_MASK_DEFAULT, &fme_err->fme_err_mask);
76 spinlock_unlock(&fme->lock);
81 static int fme_err_get_revision(struct ifpga_fme_hw *fme, u64 *val)
83 struct feature_fme_err *fme_err
84 = get_fme_feature_ioaddr_by_index(fme,
85 FME_FEATURE_ID_GLOBAL_ERR);
86 struct feature_header header;
88 header.csr = readq(&fme_err->header);
89 *val = header.revision;
94 static int fme_err_get_pcie0_errors(struct ifpga_fme_hw *fme, u64 *val)
96 struct feature_fme_err *fme_err
97 = get_fme_feature_ioaddr_by_index(fme,
98 FME_FEATURE_ID_GLOBAL_ERR);
99 struct feature_fme_pcie0_error pcie0_err;
101 pcie0_err.csr = readq(&fme_err->pcie0_err);
102 *val = pcie0_err.csr;
107 static int fme_err_set_pcie0_errors(struct ifpga_fme_hw *fme, u64 val)
109 struct feature_fme_err *fme_err
110 = get_fme_feature_ioaddr_by_index(fme,
111 FME_FEATURE_ID_GLOBAL_ERR);
112 struct feature_fme_pcie0_error pcie0_err;
115 spinlock_lock(&fme->lock);
116 writeq(FME_PCIE0_ERROR_MASK, &fme_err->pcie0_err_mask);
118 pcie0_err.csr = readq(&fme_err->pcie0_err);
119 if (val != pcie0_err.csr)
122 writeq(pcie0_err.csr & FME_PCIE0_ERROR_MASK,
123 &fme_err->pcie0_err);
125 writeq(0UL, &fme_err->pcie0_err_mask);
126 spinlock_unlock(&fme->lock);
131 static int fme_err_get_pcie1_errors(struct ifpga_fme_hw *fme, u64 *val)
133 struct feature_fme_err *fme_err
134 = get_fme_feature_ioaddr_by_index(fme,
135 FME_FEATURE_ID_GLOBAL_ERR);
136 struct feature_fme_pcie1_error pcie1_err;
138 pcie1_err.csr = readq(&fme_err->pcie1_err);
139 *val = pcie1_err.csr;
144 static int fme_err_set_pcie1_errors(struct ifpga_fme_hw *fme, u64 val)
146 struct feature_fme_err *fme_err
147 = get_fme_feature_ioaddr_by_index(fme,
148 FME_FEATURE_ID_GLOBAL_ERR);
149 struct feature_fme_pcie1_error pcie1_err;
152 spinlock_lock(&fme->lock);
153 writeq(FME_PCIE1_ERROR_MASK, &fme_err->pcie1_err_mask);
155 pcie1_err.csr = readq(&fme_err->pcie1_err);
156 if (val != pcie1_err.csr)
159 writeq(pcie1_err.csr & FME_PCIE1_ERROR_MASK,
160 &fme_err->pcie1_err);
162 writeq(0UL, &fme_err->pcie1_err_mask);
163 spinlock_unlock(&fme->lock);
168 static int fme_err_get_nonfatal_errors(struct ifpga_fme_hw *fme, u64 *val)
170 struct feature_fme_err *fme_err
171 = get_fme_feature_ioaddr_by_index(fme,
172 FME_FEATURE_ID_GLOBAL_ERR);
173 struct feature_fme_ras_nonfaterror ras_nonfaterr;
175 ras_nonfaterr.csr = readq(&fme_err->ras_nonfaterr);
176 *val = ras_nonfaterr.csr;
181 static int fme_err_get_catfatal_errors(struct ifpga_fme_hw *fme, u64 *val)
183 struct feature_fme_err *fme_err
184 = get_fme_feature_ioaddr_by_index(fme,
185 FME_FEATURE_ID_GLOBAL_ERR);
186 struct feature_fme_ras_catfaterror ras_catfaterr;
188 ras_catfaterr.csr = readq(&fme_err->ras_catfaterr);
189 *val = ras_catfaterr.csr;
194 static int fme_err_get_inject_errors(struct ifpga_fme_hw *fme, u64 *val)
196 struct feature_fme_err *fme_err
197 = get_fme_feature_ioaddr_by_index(fme,
198 FME_FEATURE_ID_GLOBAL_ERR);
199 struct feature_fme_ras_error_inj ras_error_inj;
201 ras_error_inj.csr = readq(&fme_err->ras_error_inj);
202 *val = ras_error_inj.csr & FME_RAS_ERROR_INJ_MASK;
207 static int fme_err_set_inject_errors(struct ifpga_fme_hw *fme, u64 val)
209 struct feature_fme_err *fme_err
210 = get_fme_feature_ioaddr_by_index(fme,
211 FME_FEATURE_ID_GLOBAL_ERR);
212 struct feature_fme_ras_error_inj ras_error_inj;
214 spinlock_lock(&fme->lock);
215 ras_error_inj.csr = readq(&fme_err->ras_error_inj);
217 if (val <= FME_RAS_ERROR_INJ_MASK) {
218 ras_error_inj.csr = val;
220 spinlock_unlock(&fme->lock);
224 writeq(ras_error_inj.csr, &fme_err->ras_error_inj);
225 spinlock_unlock(&fme->lock);
230 static void fme_error_enable(struct ifpga_fme_hw *fme)
232 struct feature_fme_err *fme_err
233 = get_fme_feature_ioaddr_by_index(fme,
234 FME_FEATURE_ID_GLOBAL_ERR);
236 writeq(FME_ERROR0_MASK_DEFAULT, &fme_err->fme_err_mask);
237 writeq(0UL, &fme_err->pcie0_err_mask);
238 writeq(0UL, &fme_err->pcie1_err_mask);
239 writeq(0UL, &fme_err->ras_nonfat_mask);
240 writeq(0UL, &fme_err->ras_catfat_mask);
243 static int fme_global_error_init(struct ifpga_feature *feature)
245 struct ifpga_fme_hw *fme = feature->parent;
247 fme_error_enable(fme);
249 if (feature->ctx_num)
250 fme->capability |= FPGA_FME_CAP_ERR_IRQ;
255 static void fme_global_error_uinit(struct ifpga_feature *feature)
260 static int fme_err_check_seu(struct feature_fme_err *fme_err)
262 struct feature_fme_error_capability error_cap;
264 error_cap.csr = readq(&fme_err->fme_err_capability);
266 return error_cap.seu_support ? 1 : 0;
269 static int fme_err_get_seu_emr(struct ifpga_fme_hw *fme,
272 struct feature_fme_err *fme_err
273 = get_fme_feature_ioaddr_by_index(fme,
274 FME_FEATURE_ID_GLOBAL_ERR);
276 if (!fme_err_check_seu(fme_err))
280 *val = readq(&fme_err->seu_emr_h);
282 *val = readq(&fme_err->seu_emr_l);
287 static int fme_err_fme_err_get_prop(struct ifpga_feature *feature,
288 struct feature_prop *prop)
290 struct ifpga_fme_hw *fme = feature->parent;
291 u16 id = GET_FIELD(PROP_ID, prop->prop_id);
294 case 0x1: /* ERRORS */
295 return fme_err_get_errors(fme, &prop->data);
296 case 0x2: /* FIRST_ERROR */
297 return fme_err_get_first_error(fme, &prop->data);
298 case 0x3: /* NEXT_ERROR */
299 return fme_err_get_next_error(fme, &prop->data);
300 case 0x5: /* SEU EMR LOW */
301 return fme_err_get_seu_emr(fme, &prop->data, 0);
302 case 0x6: /* SEU EMR HIGH */
303 return fme_err_get_seu_emr(fme, &prop->data, 1);
309 static int fme_err_root_get_prop(struct ifpga_feature *feature,
310 struct feature_prop *prop)
312 struct ifpga_fme_hw *fme = feature->parent;
313 u16 id = GET_FIELD(PROP_ID, prop->prop_id);
316 case 0x5: /* REVISION */
317 return fme_err_get_revision(fme, &prop->data);
318 case 0x6: /* PCIE0_ERRORS */
319 return fme_err_get_pcie0_errors(fme, &prop->data);
320 case 0x7: /* PCIE1_ERRORS */
321 return fme_err_get_pcie1_errors(fme, &prop->data);
322 case 0x8: /* NONFATAL_ERRORS */
323 return fme_err_get_nonfatal_errors(fme, &prop->data);
324 case 0x9: /* CATFATAL_ERRORS */
325 return fme_err_get_catfatal_errors(fme, &prop->data);
326 case 0xa: /* INJECT_ERRORS */
327 return fme_err_get_inject_errors(fme, &prop->data);
328 case 0xb: /* REVISION*/
329 return fme_err_get_revision(fme, &prop->data);
335 static int fme_global_error_get_prop(struct ifpga_feature *feature,
336 struct feature_prop *prop)
338 u8 top = GET_FIELD(PROP_TOP, prop->prop_id);
339 u8 sub = GET_FIELD(PROP_SUB, prop->prop_id);
341 /* PROP_SUB is never used */
342 if (sub != PROP_SUB_UNUSED)
346 case ERR_PROP_TOP_FME_ERR:
347 return fme_err_fme_err_get_prop(feature, prop);
348 case ERR_PROP_TOP_UNUSED:
349 return fme_err_root_get_prop(feature, prop);
355 static int fme_err_fme_err_set_prop(struct ifpga_feature *feature,
356 struct feature_prop *prop)
358 struct ifpga_fme_hw *fme = feature->parent;
359 u16 id = GET_FIELD(PROP_ID, prop->prop_id);
362 case 0x4: /* CLEAR */
363 return fme_err_set_clear(fme, prop->data);
369 static int fme_err_root_set_prop(struct ifpga_feature *feature,
370 struct feature_prop *prop)
372 struct ifpga_fme_hw *fme = feature->parent;
373 u16 id = GET_FIELD(PROP_ID, prop->prop_id);
376 case 0x6: /* PCIE0_ERRORS */
377 return fme_err_set_pcie0_errors(fme, prop->data);
378 case 0x7: /* PCIE1_ERRORS */
379 return fme_err_set_pcie1_errors(fme, prop->data);
380 case 0xa: /* INJECT_ERRORS */
381 return fme_err_set_inject_errors(fme, prop->data);
387 static int fme_global_error_set_prop(struct ifpga_feature *feature,
388 struct feature_prop *prop)
390 u8 top = GET_FIELD(PROP_TOP, prop->prop_id);
391 u8 sub = GET_FIELD(PROP_SUB, prop->prop_id);
393 /* PROP_SUB is never used */
394 if (sub != PROP_SUB_UNUSED)
398 case ERR_PROP_TOP_FME_ERR:
399 return fme_err_fme_err_set_prop(feature, prop);
400 case ERR_PROP_TOP_UNUSED:
401 return fme_err_root_set_prop(feature, prop);
407 static int fme_global_err_set_irq(struct ifpga_feature *feature, void *irq_set)
409 struct fpga_fme_err_irq_set *err_irq_set = irq_set;
410 struct ifpga_fme_hw *fme;
413 fme = (struct ifpga_fme_hw *)feature->parent;
415 if (!(fme->capability & FPGA_FME_CAP_ERR_IRQ))
418 spinlock_lock(&fme->lock);
419 ret = fpga_msix_set_block(feature, 0, 1, &err_irq_set->evtfd);
420 spinlock_unlock(&fme->lock);
425 struct ifpga_feature_ops fme_global_err_ops = {
426 .init = fme_global_error_init,
427 .uinit = fme_global_error_uinit,
428 .get_prop = fme_global_error_get_prop,
429 .set_prop = fme_global_error_set_prop,
430 .set_irq = fme_global_err_set_irq,