+/* SPDX-License-Identifier: BSD-3-Clause
+ * Copyright(c) 2017 Cavium, Inc
+ */
/*
* Reciprocal divide
*
uint8_t sh1, sh2;
};
+struct rte_reciprocal_u64 {
+ uint64_t m;
+ uint8_t sh1, sh2;
+};
+
static inline uint32_t rte_reciprocal_divide(uint32_t a, struct rte_reciprocal R)
{
uint32_t t = (uint32_t)(((uint64_t)a * R.m) >> 32);
return (t + ((a - t) >> R.sh1)) >> R.sh2;
}
+static __rte_always_inline uint64_t
+mullhi_u64(uint64_t x, uint64_t y)
+{
+#ifdef __SIZEOF_INT128__
+ __uint128_t xl = x;
+ __uint128_t rl = xl * y;
+
+ return (rl >> 64);
+#else
+ uint64_t u0, u1, v0, v1, k, t;
+ uint64_t w1, w2;
+ uint64_t whi;
+
+ u1 = x >> 32; u0 = x & 0xFFFFFFFF;
+ v1 = y >> 32; v0 = y & 0xFFFFFFFF;
+
+ t = u0*v0;
+ k = t >> 32;
+
+ t = u1*v0 + k;
+ w1 = t & 0xFFFFFFFF;
+ w2 = t >> 32;
+
+ t = u0*v1 + w1;
+ k = t >> 32;
+
+ whi = u1*v1 + w2 + k;
+
+ return whi;
+#endif
+}
+
+static __rte_always_inline uint64_t
+rte_reciprocal_divide_u64(uint64_t a, struct rte_reciprocal_u64 *R)
+{
+ uint64_t t = mullhi_u64(a, R->m);
+
+ return (t + ((a - t) >> R->sh1)) >> R->sh2;
+}
+
struct rte_reciprocal rte_reciprocal_value(uint32_t d);
+struct rte_reciprocal_u64 rte_reciprocal_value_u64(uint64_t d);
#endif /* _RTE_RECIPROCAL_H_ */
+/* SPDX-License-Identifier: BSD-3-Clause
+ * Copyright(c) 2017 Cavium, Inc
+ */
/*-
* BSD LICENSE
*
return R;
}
+
+/*
+ * Code taken from Hacker's Delight:
+ * http://www.hackersdelight.org/hdcodetxt/divlu.c.txt
+ * License permits inclusion here per:
+ * http://www.hackersdelight.org/permissions.htm
+ */
+static uint64_t
+divide_128_div_64_to_64(uint64_t u1, uint64_t u0, uint64_t v, uint64_t *r)
+{
+ const uint64_t b = (1ULL << 32); /* Number base (16 bits). */
+ uint64_t un1, un0, /* Norm. dividend LSD's. */
+ vn1, vn0, /* Norm. divisor digits. */
+ q1, q0, /* Quotient digits. */
+ un64, un21, un10, /* Dividend digit pairs. */
+ rhat; /* A remainder. */
+ int s; /* Shift amount for norm. */
+
+ /* If overflow, set rem. to an impossible value. */
+ if (u1 >= v) {
+ if (r != NULL)
+ *r = (uint64_t) -1;
+ return (uint64_t) -1;
+ }
+
+ /* Count leading zeros. */
+ s = __builtin_clzll(v);
+ if (s > 0) {
+ v = v << s;
+ un64 = (u1 << s) | ((u0 >> (64 - s)) & (-s >> 31));
+ un10 = u0 << s;
+ } else {
+
+ un64 = u1 | u0;
+ un10 = u0;
+ }
+
+ vn1 = v >> 32;
+ vn0 = v & 0xFFFFFFFF;
+
+ un1 = un10 >> 32;
+ un0 = un10 & 0xFFFFFFFF;
+
+ q1 = un64/vn1;
+ rhat = un64 - q1*vn1;
+again1:
+ if (q1 >= b || q1*vn0 > b*rhat + un1) {
+ q1 = q1 - 1;
+ rhat = rhat + vn1;
+ if (rhat < b)
+ goto again1;
+ }
+
+ un21 = un64*b + un1 - q1*v;
+
+ q0 = un21/vn1;
+ rhat = un21 - q0*vn1;
+again2:
+ if (q0 >= b || q0*vn0 > b*rhat + un0) {
+ q0 = q0 - 1;
+ rhat = rhat + vn1;
+ if (rhat < b)
+ goto again2;
+ }
+
+ if (r != NULL)
+ *r = (un21*b + un0 - q0*v) >> s;
+ return q1*b + q0;
+}
+
+struct rte_reciprocal_u64
+rte_reciprocal_value_u64(uint64_t d)
+{
+ struct rte_reciprocal_u64 R;
+ uint64_t m;
+ int l;
+
+ l = 63 - __builtin_clzll(d);
+
+ m = divide_128_div_64_to_64((1ULL << l), 0, d, NULL) << 1;
+ m = (1ULL << l) - d ? m + 2 : 1;
+ R.m = m;
+
+ R.sh1 = l > 1 ? 1 : l;
+ R.sh2 = (l > 0) ? l : 0;
+ R.sh2 -= R.sh2 && (m == 1) ? 1 : 0;
+
+ return R;
+}