commit 0535edd8931c7821f7deff8d148648ed976790b6 Author: Ana Custura ana@netstat.org.uk Date: Thu May 14 14:02:07 2020 +0100
Removes torperf arguments from analysis and reprocessing functions --- onionperf/onionperf | 4 +--- onionperf/reprocessing.py | 9 +++------ onionperf/tests/test_reprocessing.py | 15 +++------------ 3 files changed, 7 insertions(+), 21 deletions(-)
diff --git a/onionperf/onionperf b/onionperf/onionperf index a61fd73..1e0aa66 100755 --- a/onionperf/onionperf +++ b/onionperf/onionperf @@ -421,8 +421,6 @@ def analyze(args): analysis.add_torctl_file(args.torctl_logpath) analysis.analyze(args.do_simple, date_filter=args.date_filter) analysis.save(output_prefix=args.prefix) - if args.save_torperf: - analysis.export_torperf_version_1_1(output_prefix=args.prefix, do_compress=False)
elif args.tgen_logpath is not None and os.path.isdir(args.tgen_logpath) and args.torctl_logpath is not None and os.path.isdir(args.torctl_logpath): from onionperf import reprocessing @@ -430,7 +428,7 @@ def analyze(args): torctl_logs = reprocessing.collect_logs(args.torctl_logpath, '*torctl.log*') log_pairs = reprocessing.match(tgen_logs, torctl_logs, args.date_filter) logging.info("Found {0} matching log pairs to be reprocessed".format(len(log_pairs))) - reprocessing.multiprocess_logs(log_pairs, args.prefix, args.nickname, args.save_torperf, args.do_simple) + reprocessing.multiprocess_logs(log_pairs, args.prefix, args.nickname, args.do_simple)
else: logging.error("Given paths were an unrecognized mix of file and directory paths, nothing will be analyzed") diff --git a/onionperf/reprocessing.py b/onionperf/reprocessing.py index 48f67bb..f88f311 100644 --- a/onionperf/reprocessing.py +++ b/onionperf/reprocessing.py @@ -46,24 +46,21 @@ def match(tgen_logs, tor_logs, date_filter): return log_pairs
-def analyze_func(prefix, nick, save_torperf, do_simple, pair): +def analyze_func(prefix, nick, do_simple, pair): analysis = Analysis(nickname=nick) logging.info('Analysing pair for date {0}'.format(pair[2])) analysis.add_tgen_file(pair[0]) analysis.add_torctl_file(pair[1]) analysis.analyze(do_simple=do_simple, date_filter=pair[2]) analysis.save(output_prefix=prefix) - if save_torperf: - analysis.export_torperf_version_1_1( - output_prefix=prefix, do_compress=False) return 1
-def multiprocess_logs(log_pairs, prefix, nick=None, save_torperf=False, do_simple=False): +def multiprocess_logs(log_pairs, prefix, nick=None, do_simple=False): pool = Pool(cpu_count()) analyses = None try: - func = partial(analyze_func, prefix, nick, save_torperf, do_simple) + func = partial(analyze_func, prefix, nick, do_simple) mr = pool.map_async(func, log_pairs) pool.close() while not mr.ready(): diff --git a/onionperf/tests/test_reprocessing.py b/onionperf/tests/test_reprocessing.py index a120587..5e758d2 100644 --- a/onionperf/tests/test_reprocessing.py +++ b/onionperf/tests/test_reprocessing.py @@ -61,23 +61,17 @@ def test_log_match_with_wrong_filter_date(): def test_analyze_func_json(): pair = (DATA_DIR + 'logs/onionperf_2019-01-10_23:59:59.tgen.log', DATA_DIR + 'logs/onionperf_2019-01-10_23:59:59.torctl.log', datetime.datetime(2019, 1, 10, 0, 0)) work_dir = tempfile.mkdtemp() - reprocessing.analyze_func(work_dir, None, True, False, pair) + reprocessing.analyze_func(work_dir, None, False, pair) json_file = os.path.join(work_dir, "2019-01-10.onionperf.analysis.json.xz") assert(os.path.exists(json_file)) - for i in ['51200', '5242880', '1048576']: - torperf_file = os.path.join(work_dir, "op-ab-{0}-2019-01-10.tpf".format(i)) - assert(os.path.exists(torperf_file)) shutil.rmtree(work_dir)
def test_multiprocess_logs(): pairs = [(DATA_DIR + 'logs/onionperf_2019-01-10_23:59:59.tgen.log', DATA_DIR + 'logs/onionperf_2019-01-10_23:59:59.torctl.log', datetime.datetime(2019, 1, 10, 0, 0))] work_dir = tempfile.mkdtemp() - reprocessing.multiprocess_logs(pairs, work_dir, save_torperf=True) + reprocessing.multiprocess_logs(pairs, work_dir) json_file = os.path.join(work_dir, "2019-01-10.onionperf.analysis.json.xz") assert(os.path.exists(json_file)) - for i in ['51200', '5242880', '1048576']: - torperf_file = os.path.join(work_dir, "op-ab-{0}-2019-01-10.tpf".format(i)) - assert(os.path.exists(torperf_file)) shutil.rmtree(work_dir)
def test_end_to_end(): @@ -85,10 +79,7 @@ def test_end_to_end(): torctl_logs = reprocessing.collect_logs(DATA_DIR, '*torctl.log') log_pairs = reprocessing.match(tgen_logs, torctl_logs, None) work_dir = tempfile.mkdtemp() - reprocessing.multiprocess_logs(log_pairs, work_dir, save_torperf=True) + reprocessing.multiprocess_logs(log_pairs, work_dir) json_file = os.path.join(work_dir, "2019-01-10.onionperf.analysis.json.xz") assert(os.path.exists(json_file)) - for i in ['51200', '5242880', '1048576']: - torperf_file = os.path.join(work_dir, "op-ab-{0}-2019-01-10.tpf".format(i)) - assert(os.path.exists(torperf_file)) shutil.rmtree(work_dir)