[tor-commits] [onionperf/develop] Also accept a directory in `onionperf filter -i`.

karsten at torproject.org karsten at torproject.org
Wed Sep 16 15:15:08 UTC 2020


commit 053da92c69d4b3628f33e1dd610a1bc8f601cbe0
Author: Karsten Loesing <karsten.loesing at gmx.net>
Date:   Thu Aug 27 09:59:59 2020 +0200

    Also accept a directory in `onionperf filter -i`.
    
    And clarify that we're leaving statistics unchanged as part of the
    filtering.
---
 CHANGELOG.md           |  4 ++--
 onionperf/filtering.py | 10 +++-------
 onionperf/onionperf    | 35 ++++++++++++++++++++++++-----------
 3 files changed, 29 insertions(+), 20 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index c57695e..ebd43e1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,8 +1,8 @@
 # Changes in version 0.7 - 2020-??-??
 
  - Add a new `onionperf filter` mode that takes an OnionPerf analysis
-   results file as input, applies filters, and produces a new
-   OnionPerf analysis results file as output.
+   results file or directory as input, applies filters, and produces
+   new OnionPerf analysis results file(s) as output.
 
 # Changes in version 0.6 - 2020-08-08
 
diff --git a/onionperf/filtering.py b/onionperf/filtering.py
index ab96f1e..b431eb9 100644
--- a/onionperf/filtering.py
+++ b/onionperf/filtering.py
@@ -15,9 +15,6 @@ class Filtering(object):
         self.fingerprints_to_exclude = None
         self.fingerprint_pattern = re.compile("\$?([0-9a-fA-F]{40})")
 
-    def read_input(self, path):
-        self.analysis = OPAnalysis.load(filename=path)
-
     def include_fingerprints(self, path):
         self.fingerprints_to_include = []
         with open(path, 'rt') as f:
@@ -36,7 +33,8 @@ class Filtering(object):
                     fingerprint = fingerprint_match.group(1).upper()
                     self.fingerprints_to_exclude.append(fingerprint)
 
-    def apply_filters(self):
+    def apply_filters(self, input_path, output_dir, output_file):
+        self.analysis = OPAnalysis.load(filename=input_path)
         if self.fingerprints_to_include is None and self.fingerprints_to_exclude is None:
             return
         for source in self.analysis.get_nodes():
@@ -94,7 +92,5 @@ class Filtering(object):
                         retained_tgen_transfers[transfer_id] = transfer_data
             self.analysis.set_tgen_streams(source, retained_tgen_streams)
             self.analysis.set_tgen_transfers(source, retained_tgen_transfers)
-
-    def write_output(self, path):
-        self.analysis.save(filename=path)
+        self.analysis.save(filename=output_file, output_prefix=output_dir)
 
diff --git a/onionperf/onionperf b/onionperf/onionperf
index 96c6869..7c16aea 100755
--- a/onionperf/onionperf
+++ b/onionperf/onionperf
@@ -76,8 +76,11 @@ Analyze Tor and TGen output
 """
 
 DESC_FILTER = """
-Takes an OnionPerf analysis results file as input, applies filters,
-and produces a new OnionPerf analysis results file as output.
+Takes an OnionPerf analysis results file or directory as input, applies filters,
+and produces new OnionPerf analysis results file(s) as output.
+
+This subcommand only filters measurements in `data/[source]/tgen/transfers`
+and `data/[source]/tgen/streams`, but leaves any summaries unchanged.
 """
 HELP_FILTER = """
 Filter OnionPerf analysis results
@@ -295,7 +298,8 @@ files generated by this script will be written""",
     filter_parser.set_defaults(func=filter, formatter_class=my_formatter_class)
 
     filter_parser.add_argument('-i', '--input',
-        help="""read the OnionPerf analysis results at PATH as input""",
+        help="""a file or directory PATH from which OnionPerf analysis results
+                files are read""",
         metavar="PATH", required="True",
         action="store", dest="input")
 
@@ -314,8 +318,8 @@ files generated by this script will be written""",
         default=None)
 
     filter_parser.add_argument('-o', '--output',
-        help="""write the filtered output OnionPerf analysis results file to
-                PATH""",
+        help="""a file or directory PATH where filtered output OnionPerf
+                analysis results files are written""",
         metavar="PATH", required="True",
         action="store", dest="output")
 
@@ -439,17 +443,26 @@ def analyze(args):
 def filter(args):
     from onionperf.filtering import Filtering
 
-    p = os.path.abspath(os.path.expanduser(args.input))
-    if not os.path.exists(p):
-        raise argparse.ArgumentTypeError("path '%s' does not exist" % args.input)
+    input_path = os.path.abspath(os.path.expanduser(args.input))
+    if not os.path.exists(input_path):
+        raise argparse.ArgumentTypeError("input path '%s' does not exist" % args.input)
+    output_path = os.path.abspath(os.path.expanduser(args.output))
+    if os.path.exists(output_path):
+        raise argparse.ArgumentTypeError("output path '%s' already exists" % args.output)
     filtering = Filtering()
-    filtering.read_input(args.input)
     if args.include_fingerprints is not None:
         filtering.include_fingerprints(args.include_fingerprints)
     if args.exclude_fingerprints is not None:
         filtering.exclude_fingerprints(args.exclude_fingerprints)
-    filtering.apply_filters()
-    filtering.write_output(args.output)
+    if os.path.isfile(input_path):
+        output_dir, output_file = os.path.split(output_path)
+        filtering.apply_filters(input_path=input_path, output_dir=output_dir, output_file=output_file)
+    else:
+        for dirpath, dirnames, filenames in os.walk(input_path):
+            for filename in filenames:
+                input_file = os.path.join(dirpath, filename)
+                output_dir = os.path.join(output_path, os.path.relpath(dirpath, input_path))
+                filtering.apply_filters(input_path=input_file, output_dir=output_dir, output_file=filename)
 
 def visualize(args):
     from onionperf.visualization import TGenVisualization





More information about the tor-commits mailing list