|
32 | 32 | import argparse |
33 | 33 | import logging |
34 | 34 | import os |
| 35 | +import glob |
| 36 | +import re |
| 37 | +from pathlib import Path |
35 | 38 | import time |
36 | 39 | from collections import OrderedDict |
37 | 40 | from contextlib import suppress |
38 | 41 | from datetime import datetime |
39 | | -import itertools |
40 | 42 | import dllogger |
41 | 43 |
|
42 | 44 | import torch |
@@ -1008,6 +1010,11 @@ def _parse_args(): |
1008 | 1010 | args_text = yaml.safe_dump(args.__dict__, default_flow_style=False) |
1009 | 1011 | return args, args_text |
1010 | 1012 |
|
| 1013 | +def unique_log_fpath(fpath): |
| 1014 | + """Have a unique log filename for every separate run""" |
| 1015 | + log_num = max([0] + [int(re.search("\.(\d+)", Path(f).suffix).group(1)) |
| 1016 | + for f in glob.glob(f"{fpath}.*")]) |
| 1017 | + return f"{fpath}.{log_num + 1}" |
1011 | 1018 |
|
1012 | 1019 | def main(): |
1013 | 1020 |
|
@@ -1101,16 +1108,10 @@ def main(): |
1101 | 1108 | if dllogger_dir and not os.path.exists(dllogger_dir): |
1102 | 1109 | os.makedirs(dllogger_dir, exist_ok=True) |
1103 | 1110 | log_path = args.dllogger_name |
1104 | | - original_log_path = log_path |
1105 | | - if os.path.exists(log_path): |
1106 | | - for i in itertools.count(): |
1107 | | - s_fname = original_log_path.split('.') |
1108 | | - log_path = '.'.join(s_fname[:-1]) + f'_{i}.' + s_fname[-1] |
1109 | | - if not os.path.exists(log_path): |
1110 | | - break |
1111 | 1111 | dllogger.init( |
1112 | 1112 | backends=[ |
1113 | | - dllogger.JSONStreamBackend(verbosity=1, filename=log_path), |
| 1113 | + dllogger.JSONStreamBackend(verbosity=1, filename=log_path, append=True), |
| 1114 | + dllogger.JSONStreamBackend(verbosity=1, filename=unique_log_fpath(log_path)), |
1114 | 1115 | dllogger.StdOutBackend(verbosity=0), |
1115 | 1116 | ] |
1116 | 1117 | ) |
|
0 commit comments