Skip to content

Commit 67aba03

Browse files
committed
Use common datetime part when naming output files across all formats
- the datetime is fetched just once - it is then passed to the functions that start the output files, always as arg1
1 parent d66b67b commit 67aba03

1 file changed

Lines changed: 26 additions & 20 deletions

File tree

testssl.sh

Lines changed: 26 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1453,6 +1453,7 @@ fileout() {
14531453

14541454

14551455
json_header() {
1456+
local datetime_filename_part="$1"
14561457
local fname_prefix
14571458
local filename_provided=false
14581459

@@ -1481,9 +1482,9 @@ json_header() {
14811482
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
14821483
fi
14831484
if [[ -z "$JSONFILE" ]]; then
1484-
JSONFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".json)"
1485+
JSONFILE="$fname_prefix-${datetime_filename_part}.json"
14851486
elif [[ -d "$JSONFILE" ]]; then
1486-
JSONFILE="$JSONFILE/${fname_prefix}-$(date +"%Y%m%d-%H%M".json)"
1487+
JSONFILE="$JSONFILE/${fname_prefix}-${datetime_filename_part}.json"
14871488
fi
14881489
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
14891490
if "$APPEND" && [[ ! -s "$JSONFILE" ]]; then
@@ -1504,6 +1505,7 @@ json_header() {
15041505

15051506

15061507
csv_header() {
1508+
local datetime_filename_part="$1"
15071509
local fname_prefix
15081510
local filename_provided=false
15091511

@@ -1529,9 +1531,9 @@ csv_header() {
15291531
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
15301532
fi
15311533
if [[ -z "$CSVFILE" ]]; then
1532-
CSVFILE="${fname_prefix}-$(date +"%Y%m%d-%H%M".csv)"
1534+
CSVFILE="${fname_prefix}-${datetime_filename_part}.csv"
15331535
elif [[ -d "$CSVFILE" ]]; then
1534-
CSVFILE="$CSVFILE/${fname_prefix}-$(date +"%Y%m%d-%H%M".csv)"
1536+
CSVFILE="$CSVFILE/${fname_prefix}-${datetime_filename_part}.csv"
15351537
fi
15361538
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
15371539
if "$APPEND" && [[ ! -s "$CSVFILE" ]]; then
@@ -1558,6 +1560,7 @@ csv_header() {
15581560
################# END JSON file functions. START HTML functions ####################
15591561

15601562
html_header() {
1563+
local datetime_filename_part="$1"
15611564
local fname_prefix
15621565
local filename_provided=false
15631566

@@ -1586,9 +1589,9 @@ html_header() {
15861589
fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
15871590
fi
15881591
if [[ -z "$HTMLFILE" ]]; then
1589-
HTMLFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".html)"
1592+
HTMLFILE="$fname_prefix-${datetime_filename_part}.html"
15901593
elif [[ -d "$HTMLFILE" ]]; then
1591-
HTMLFILE="$HTMLFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".html)"
1594+
HTMLFILE="$HTMLFILE/$fname_prefix-${datetime_filename_part}.html"
15921595
fi
15931596
# Silently reset APPEND var if the file doesn't exist as otherwise it won't be created
15941597
if "$APPEND" && [[ ! -s "$HTMLFILE" ]]; then
@@ -1636,8 +1639,9 @@ html_footer() {
16361639
################# END HTML file functions ####################
16371640

16381641
prepare_logging() {
1639-
# arg1: for testing mx records name we put a name of logfile in here, otherwise we get strange file names
1640-
local fname_prefix="$1"
1642+
local datetime_filename_part="$1"
1643+
# arg2: for testing mx records name we put a name of logfile in here, otherwise we get strange file names
1644+
local fname_prefix="$2"
16411645
local filename_provided=false
16421646

16431647
if [[ -n "$PARENT_LOGFILE" ]]; then
@@ -1654,10 +1658,10 @@ prepare_logging() {
16541658
[[ -z "$fname_prefix" ]] && fname_prefix="${FNAME_PREFIX}${NODE}_p${PORT}"
16551659

16561660
if [[ -z "$LOGFILE" ]]; then
1657-
LOGFILE="$fname_prefix-$(date +"%Y%m%d-%H%M".log)"
1661+
LOGFILE="$fname_prefix-${datetime_filename_part}.log"
16581662
elif [[ -d "$LOGFILE" ]]; then
16591663
# actually we were instructed to place all files in a DIR instead of the current working dir
1660-
LOGFILE="$LOGFILE/$fname_prefix-$(date +"%Y%m%d-%H%M".log)"
1664+
LOGFILE="$LOGFILE/$fname_prefix-${datetime_filename_part}.log"
16611665
else
16621666
: # just for clarity: a log file was specified, no need to do anything else
16631667
fi
@@ -23412,25 +23416,26 @@ draw_line() {
2341223416

2341323417

2341423418
run_mx_all_ips() {
23419+
local datetime_filename_part="$1"
2341523420
local mxs mx
2341623421
local mxport
2341723422
local -i ret=0
2341823423
local word=""
2341923424

2342023425
STARTTLS_PROTOCOL="smtp"
2342123426
# test first higher priority servers
23422-
mxs=$(get_mx_record "$1" | sort -n | sed -e 's/^.* //' -e 's/\.$//' | tr '\n' ' ')
23427+
mxs=$(get_mx_record "$2" | sort -n | sed -e 's/^.* //' -e 's/\.$//' | tr '\n' ' ')
2342323428
if [[ $CMDLINE_IP == one ]]; then
2342423429
word="as instructed one" # with highest priority
2342523430
mxs=${mxs%% *}
2342623431
else
2342723432
word="the only"
2342823433
fi
23429-
mxport=${2:-25}
23434+
mxport=${3:-25}
2343023435
if [[ -n "$LOGFILE" ]] || [[ -n "$PARENT_LOGFILE" ]]; then
23431-
prepare_logging
23436+
prepare_logging "${datetime_filename_part}"
2343223437
else
23433-
prepare_logging "${FNAME_PREFIX}mx-$1"
23438+
prepare_logging "${datetime_filename_part}" "${FNAME_PREFIX}mx-$1"
2343423439
fi
2343523440
if [[ -n "$mxs" ]] && [[ "$mxs" != ' ' ]]; then
2343623441
[[ $(count_words "$mxs") -gt 1 ]] && MULTIPLE_CHECKS=true
@@ -25324,6 +25329,7 @@ lets_roll() {
2532425329
RET=0 # this is a global as we can have a function main(), see #705. Should we toss then all local $ret?
2532525330
ip=""
2532625331
stopwatch start
25332+
DATETIME_FILENAME_PART="$(date +"%Y%m%d-%H%M")"
2532725333

2532825334
lets_roll init
2532925335
initialize_globals
@@ -25334,9 +25340,9 @@ lets_roll() {
2533425340
# html_header() needs to be called early! Otherwise if html_out() is called before html_header() and the
2533525341
# command line contains --htmlfile <htmlfile> or --html, it'll make problems with html output, see #692.
2533625342
# json_header and csv_header could be called later but for context reasons we'll leave it here
25337-
html_header
25338-
json_header
25339-
csv_header
25343+
html_header "${DATETIME_FILENAME_PART}"
25344+
json_header "${DATETIME_FILENAME_PART}"
25345+
csv_header "${DATETIME_FILENAME_PART}"
2534025346
get_install_dir
2534125347
# see #705, we need to source TLS_DATA_FILE here instead of in get_install_dir(), see #705
2534225348
[[ -r "$TLS_DATA_FILE" ]] && . "$TLS_DATA_FILE"
@@ -25361,7 +25367,7 @@ lets_roll() {
2536125367
fileout_banner
2536225368

2536325369
if "$do_mass_testing"; then
25364-
prepare_logging
25370+
prepare_logging "${DATETIME_FILENAME_PART}"
2536525371
if [[ "$MASS_TESTING_MODE" == parallel ]]; then
2536625372
run_mass_testing_parallel
2536725373
else
@@ -25376,12 +25382,12 @@ lets_roll() {
2537625382
#FIXME: do we need this really here?
2537725383
count_do_variables # if we have just 1x "do_*" --> we do a standard run -- otherwise just the one specified
2537825384
[[ $? -eq 1 ]] && set_scanning_defaults
25379-
run_mx_all_ips "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code
25385+
run_mx_all_ips "${DATETIME_FILENAME_PART}" "${URI}" $PORT # we should reduce run_mx_all_ips to what's necessary as below we have similar code
2538025386
exit $?
2538125387
fi
2538225388

2538325389
[[ -z "$NODE" ]] && parse_hn_port "${URI}" # NODE, URL_PATH, PORT, IPADDRs2CHECK and IPADDRs2SHOW is set now
25384-
prepare_logging
25390+
prepare_logging "${DATETIME_FILENAME_PART}"
2538525391

2538625392
if [[ -n "$PROXY" ]] && $DNS_VIA_PROXY; then
2538725393
NODEIP="$NODE"

0 commit comments

Comments
 (0)