Hi,
I incorrectly ran "systat netstat -N" instead of "systat -N netstat" and
got confused why it wasn't resolving host names. The -N gets parsed with
atof to a 0s delay that is then clamped to 5s. The patch below instead
prints an error if the delay cannot be parsed. I think the <= 0 case
should also produce an error but I left the existing behaviour of
setting it to 5s.
Index: usr.bin/systat/main.c
===================================================================
RCS file: /cvs/src/usr.bin/systat/main.c,v
retrieving revision 1.72
diff -u -p -u -p -r1.72 main.c
--- usr.bin/systat/main.c 12 Jan 2020 20:51:08 -0000 1.72
+++ usr.bin/systat/main.c 25 Jan 2021 08:07:34 -0000
@@ -415,6 +415,22 @@ gethz(void)
}
int
+parse_delay(const char *str, double *delay)
+{
+ char *endptr = NULL;
+ double value;
+
+ value = strtod(str, &endptr);
+ if (*endptr == '\0') {
+ if (value <= 0)
+ value = 5;
+ *delay = value;
+ return 0;
+ } else
+ return 1;
+}
+
+int
main(int argc, char *argv[])
{
char errbuf[_POSIX2_LINE_MAX];
@@ -475,9 +491,8 @@ main(int argc, char *argv[])
nflag = 1;
break;
case 's':
- delay = atof(optarg);
- if (delay <= 0)
- delay = 5;
+ if (parse_delay(optarg, &delay))
+ errx(1, "-s %s: invalid delay value", optarg);
break;
case 'w':
rawwidth = strtonum(optarg, 1, MAX_LINE_BUF-1, &errstr);
@@ -497,16 +512,14 @@ main(int argc, char *argv[])
argv += optind;
if (argc == 1) {
- double del = atof(argv[0]);
- if (del == 0)
+ if (parse_delay(argv[0], &delay))
viewstr = argv[0];
- else
- delay = del;
} else if (argc == 2) {
viewstr = argv[0];
- delay = atof(argv[1]);
- if (delay <= 0)
- delay = 5;
+ if (parse_delay(argv[1], &delay))
+ errx(1, "invalid delay value: %s", argv[1]);
+ } else {
+ usage();
}
udelay = (useconds_t)(delay * 1000000.0);