commit 096c2bb23d79033be79503754e180145879d3449 Author: Ana C. Custura ana@netstat.org.uk Date: Wed Jun 5 15:07:10 2019 +0100
Allows source to be given as a parameter on the commandline --- onionperf/onionperf | 9 ++++++++- onionperf/reprocessing.py | 42 ++++++++++++++++++++++++------------------ 2 files changed, 32 insertions(+), 19 deletions(-)
diff --git a/onionperf/onionperf b/onionperf/onionperf index fab805b..45ead3c 100755 --- a/onionperf/onionperf +++ b/onionperf/onionperf @@ -385,6 +385,13 @@ files generated by this script will be written""", action="store", dest="date_filter", default=None)
+ reprocess_parser.add_argument('-n', '--nickname', + help="""a nickname STRING that identifies the machine where the input logfiles were produced""", + metavar="STRING", type=str, + action="store", dest="nickname", + default=None) + + # get args and call the command handler for the chosen mode args = main_parser.parse_args() args.func(args) @@ -495,7 +502,7 @@ def reprocess(args): torctl_logs = reprocessing.collect_logs(args.torctl_dirpath, '*torctl.log') log_pairs = reprocessing.match(tgen_logs, torctl_logs, args.date_filter) logging.info("Found {0} matching log pairs to be reprocessed".format(len(log_pairs))) - reprocessing.multiprocess_logs(log_pairs, args.prefix) + reprocessing.multiprocess_logs(log_pairs, args.prefix, args.nickname)
def type_nonnegative_integer(value): i = int(value) diff --git a/onionperf/reprocessing.py b/onionperf/reprocessing.py index 25111ba..66b82c7 100644 --- a/onionperf/reprocessing.py +++ b/onionperf/reprocessing.py @@ -7,6 +7,8 @@ import fnmatch import logging import os import re +import sys +
def collect_logs(dirpath, pattern): logs = [] @@ -14,7 +16,7 @@ def collect_logs(dirpath, pattern): for filename in fnmatch.filter(filenames, pattern): logs.append(os.path.join(root, filename)) return logs - +
def match(tgen_logs, tor_logs, date_filter): log_pairs = [] @@ -24,41 +26,47 @@ def match(tgen_logs, tor_logs, date_filter): date = m.group(0) fdate = datetime.datetime.strptime(date, "%Y-%m-%d") found = False - if date_filter is None or util.do_dates_match(date_filter,fdate): + if date_filter is None or util.do_dates_match(date_filter, fdate): for tor_log in tor_logs: if date in tor_log: log_pairs.append((tgen_log, tor_log, fdate)) found = True - break + break if not found: - logging.warning('Skipping file {0}, could not find a match for it'.format(tgen_log)) - + logging.warning( + 'Skipping file {0}, could not find a match for it'. + format(tgen_log)) + else: - logging.warning('Filename {0} does not contain a date'.format(tgen_log)) + logging.warning( + 'Filename {0} does not contain a date'.format(tgen_log)) if not log_pairs: - logging.warning('Could not find any log matches. No analyses will be performed') + logging.warning( + 'Could not find any log matches. No analyses will be performed') return log_pairs -
-def analyze_func(prefix, pair): - analysis = Analysis() + +def analyze_func(prefix, nick, pair): + analysis = Analysis(nickname=nick) logging.info('Analysing pair for date {0}'.format(pair[2])) analysis.add_tgen_file(pair[0]) analysis.add_torctl_file(pair[1]) analysis.analyze(do_simple=False, date_filter=pair[2]) analysis.save(output_prefix=prefix) - analysis.export_torperf_version_1_1(output_prefix=prefix, do_compress=False) + analysis.export_torperf_version_1_1( + output_prefix=prefix, do_compress=False) return 1 - - -def multiprocess_logs(log_pairs, prefix): + + +def multiprocess_logs(log_pairs, prefix, nick): pool = Pool(cpu_count()) analyses = None try: - func = partial(analyze_func, prefix) + func = partial(analyze_func, prefix, nick) mr = pool.map_async(func, log_pairs) pool.close() - while not mr.ready(): mr.wait(1) + while not mr.ready(): + mr.wait(1) except KeyboardInterrupt: logging.info("interrupted, terminating process pool") pool.terminate() @@ -66,5 +74,3 @@ def multiprocess_logs(log_pairs, prefix): sys.exit() except Exception as e: logging.error(e) - -
tor-commits@lists.torproject.org