Fix compilation on 32 bit platforms (where doing modulus operation with 64 bit requires extra glibc functions) by truncation. The jitter for table distribution is limited to a 32 bit value because random numbers are scaled as 32 bit value.
Also fix some whitespace. Fixes: 99803171ef04 ("netem: add uapi to express delay and jitter in nanoseconds") Reported-by: Randy Dunlap <rdun...@infradead.org> Signed-off-by: Stephen Hemminger <step...@networkplumber.org> --- net/sched/sch_netem.c | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/net/sched/sch_netem.c b/net/sched/sch_netem.c index 644323d6081c..dd70924cbcdf 100644 --- a/net/sched/sch_netem.c +++ b/net/sched/sch_netem.c @@ -312,9 +312,9 @@ static bool loss_event(struct netem_sched_data *q) * std deviation sigma. Uses table lookup to approximate the desired * distribution, and a uniformly-distributed pseudo-random source. */ -static s64 tabledist(s64 mu, s64 sigma, +static s64 tabledist(s64 mu, s32 sigma, struct crndstate *state, - const struct disttable *dist) + const struct disttable *dist) { s64 x; long t; @@ -327,7 +327,7 @@ static s64 tabledist(s64 mu, s64 sigma, /* default uniform distribution */ if (dist == NULL) - return (rnd % (2*sigma)) - sigma + mu; + return (rnd % (2 * sigma)) - sigma + mu; t = dist->table[rnd % dist->size]; x = (sigma % NETEM_DIST_SCALE) * t; -- 2.11.0