[tor-commits] [metrics-web/release] Add graphs on circuit build times and latencies.

karsten at torproject.org karsten at torproject.org
Wed Sep 26 15:20:35 UTC 2018


commit 2761d1f733989a5c5aa5d9a4af69ea9153bc4b8f
Author: Karsten Loesing <karsten.loesing at gmx.net>
Date:   Tue Jul 3 17:38:15 2018 +0200

    Add graphs on circuit build times and latencies.
    
    Implements #25774.
---
 src/main/R/rserver/graphs.R                        |  75 ++++++++++++++
 .../torproject/metrics/stats/onionperf/Main.java   | 112 ++++++++++++++++++---
 src/main/resources/web.xml                         |   8 ++
 src/main/resources/web/json/categories.json        |   4 +-
 src/main/resources/web/json/metrics.json           |  24 +++++
 src/main/sql/onionperf/init-onionperf.sql          |  68 +++++++++++++
 6 files changed, 275 insertions(+), 16 deletions(-)

diff --git a/src/main/R/rserver/graphs.R b/src/main/R/rserver/graphs.R
index d5eeff5..56c8a55 100644
--- a/src/main/R/rserver/graphs.R
+++ b/src/main/R/rserver/graphs.R
@@ -721,6 +721,81 @@ write_torperf_failures <- function(start_p = NULL, end_p = NULL,
     write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
 }
 
+prepare_onionperf_buildtimes <- function(start_p, end_p, source_p) {
+    read.csv(paste(stats_dir, "buildtimes.csv", sep = ""),
+    colClasses = c("date" = "Date")) %>%
+    filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+    filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+    filter(if (!is.null(source_p))
+        source == ifelse(source_p == "all", "", source_p) else TRUE)
+}
+
+write_onionperf_buildtimes <- function(start_p = NULL, end_p = NULL,
+    source_p = NULL, path_p) {
+  prepare_onionperf_buildtimes(start_p, end_p, source_p) %>%
+    write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
+}
+
+plot_onionperf_buildtimes <- function(start_p, end_p, source_p, path_p) {
+  prepare_onionperf_buildtimes(start_p, end_p, source_p) %>%
+    mutate(date = as.Date(date),
+      position = factor(position, levels = seq(1, 3, 1),
+        labels = c("1st hop", "2nd hop", "3rd hop"))) %>%
+    ggplot(aes(x = date, y = md, colour = position, fill = position)) +
+    geom_line(size = 0.75) +
+    geom_ribbon(aes(x = as.Date(date), ymin = q1, ymax = q3, alpha = 0.5),
+      show.legend = FALSE) +
+    scale_x_date(name = "", breaks = custom_breaks,
+      labels = custom_labels, minor_breaks = custom_minor_breaks) +
+    scale_y_continuous(name = "", labels = unit_format(unit = "ms"),
+      limits = c(0, NA)) +
+    scale_colour_hue(name = "Medians and interquartile ranges") +
+    scale_fill_hue(name = "Medians and interquartile ranges") +
+    ggtitle(ifelse(source_p == "all", "Circuit build times on all sources",
+        paste("Circuit build times on", source_p))) +
+    labs(caption = copyright_notice) +
+    theme(legend.position = "top")
+  ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
+}
+
+prepare_onionperf_latencies <- function(start_p, end_p, source_p) {
+    read.csv(paste(stats_dir, "latencies.csv", sep = ""),
+    colClasses = c("date" = "Date")) %>%
+    filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+    filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+    filter(if (!is.null(source_p))
+        source == ifelse(source_p == "all", "", source_p) else TRUE)
+}
+
+write_onionperf_latencies <- function(start_p = NULL, end_p = NULL,
+    source_p = NULL, path_p) {
+  prepare_onionperf_latencies(start_p, end_p, source_p) %>%
+    write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
+}
+
+plot_onionperf_latencies <- function(start_p, end_p, source_p, path_p) {
+  prepare_onionperf_latencies(start_p, end_p, source_p) %>%
+    mutate(date = as.Date(date),
+      server = factor(server, levels = c("public", "onion"),
+        labels = c("public server", "onion server"))) %>%
+    ggplot(aes(x = date, y = md, colour = server, fill = server)) +
+    geom_line(size = 0.75) +
+    geom_ribbon(aes(x = as.Date(date), ymin = q1, ymax = q3, alpha = 0.5),
+      show.legend = FALSE) +
+    scale_x_date(name = "", breaks = custom_breaks,
+      labels = custom_labels, minor_breaks = custom_minor_breaks) +
+    scale_y_continuous(name = "", labels = unit_format(unit = "ms"),
+      limits = c(0, NA)) +
+    scale_colour_hue(name = "Medians and interquartile ranges") +
+    scale_fill_hue(name = "Medians and interquartile ranges") +
+    ggtitle(ifelse(source_p == "all",
+        "Circuit round-trip latencies on all sources",
+        paste("Circuit round-trip latencies on", source_p))) +
+    labs(caption = copyright_notice) +
+    theme(legend.position = "top")
+  ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
+}
+
 prepare_connbidirect <- function(start_p, end_p) {
   read.csv(paste(stats_dir, "connbidirect2.csv", sep = ""),
     colClasses = c("date" = "Date", "direction" = "factor")) %>%
diff --git a/src/main/java/org/torproject/metrics/stats/onionperf/Main.java b/src/main/java/org/torproject/metrics/stats/onionperf/Main.java
index 76c2809..5bd55ab 100644
--- a/src/main/java/org/torproject/metrics/stats/onionperf/Main.java
+++ b/src/main/java/org/torproject/metrics/stats/onionperf/Main.java
@@ -26,11 +26,11 @@ import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Calendar;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
-import java.util.SortedSet;
+import java.util.Set;
 import java.util.TimeZone;
-import java.util.TreeSet;
 
 public class Main {
 
@@ -43,8 +43,12 @@ public class Main {
     String dbUrlString = "jdbc:postgresql:onionperf";
     Connection connection = connectToDatabase(dbUrlString);
     importOnionPerfFiles(connection);
-    SortedSet<String> statistics = queryOnionPerf(connection);
-    writeStatistics(Paths.get("stats", "torperf-1.1.csv"), statistics);
+    writeStatistics(Paths.get("stats", "torperf-1.1.csv"),
+        queryOnionPerf(connection));
+    writeStatistics(Paths.get("stats", "buildtimes.csv"),
+        queryBuildTimes(connection));
+    writeStatistics(Paths.get("stats", "latencies.csv"),
+        queryLatencies(connection));
     disconnectFromDatabase(connection);
     log.info("Terminated onionperf module.");
   }
@@ -77,6 +81,13 @@ public class Main {
         + "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
         + "?, ?, ?, ?, ?, ?)", Statement.RETURN_GENERATED_KEYS);
 
+    PreparedStatement psBuildTimesSelect = connection.prepareStatement(
+        "SELECT position FROM buildtimes WHERE measurement_id = ?");
+
+    PreparedStatement psBuildTimesInsert = connection.prepareStatement(
+        "INSERT INTO buildtimes (measurement_id, position, buildtime, delta) "
+            + "VALUES (?, ?, ?, ?)");
+
     Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
     DescriptorReader dr = DescriptorSourceFactory.createDescriptorReader();
     for (Descriptor d : dr.readDescriptors(
@@ -184,7 +195,30 @@ public class Main {
           }
         }
       }
-      /* Could use measurementId to insert path. */
+      if (null != tr.getBuildTimes()) {
+        psBuildTimesSelect.clearParameters();
+        psBuildTimesSelect.setInt(1, measurementId);
+        Set<Integer> skipPositions = new HashSet<>();
+        try (ResultSet rs = psBuildTimesSelect.executeQuery()) {
+          while (rs.next()) {
+            skipPositions.add(rs.getInt(1));
+          }
+        }
+        int position = 1;
+        long previousBuildTime = 0L;
+        for (long buildtime : tr.getBuildTimes()) {
+          if (!skipPositions.contains(position)) {
+            psBuildTimesInsert.clearParameters();
+            psBuildTimesInsert.setInt(1, measurementId);
+            psBuildTimesInsert.setInt(2, position);
+            psBuildTimesInsert.setInt(3, (int) buildtime);
+            psBuildTimesInsert.setInt(4, (int) (buildtime - previousBuildTime));
+            psBuildTimesInsert.execute();
+          }
+          position++;
+          previousBuildTime = buildtime;
+        }
+      }
       connection.commit();
     }
   }
@@ -197,10 +231,12 @@ public class Main {
     return originalString;
   }
 
-  static SortedSet<String> queryOnionPerf(Connection connection)
+  static List<String> queryOnionPerf(Connection connection)
       throws SQLException {
     log.info("Querying statistics from database.");
-    SortedSet<String> statistics = new TreeSet<>();
+    List<String> statistics = new ArrayList<>();
+    statistics
+        .add("date,filesize,source,server,q1,md,q3,timeouts,failures,requests");
     Statement st = connection.createStatement();
     String queryString = "SELECT date, filesize, source, server, q1, md, q3, "
         + "timeouts, failures, requests FROM onionperf";
@@ -225,20 +261,66 @@ public class Main {
     return statistics;
   }
 
+  static List<String> queryBuildTimes(Connection connection)
+      throws SQLException {
+    log.info("Querying buildtime statistics from database.");
+    List<String> statistics = new ArrayList<>();
+    statistics.add("date,source,position,q1,md,q3");
+    Statement st = connection.createStatement();
+    String queryString = "SELECT date, source, position, q1, md, q3 "
+        + "FROM buildtimes_stats";
+    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.US);
+    dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+    Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+    try (ResultSet rs = st.executeQuery(queryString)) {
+      while (rs.next()) {
+        statistics.add(String.format("%s,%s,%d,%d,%d,%d",
+            dateFormat.format(rs.getDate("date", calendar)),
+            emptyNull(rs.getString("source")),
+            rs.getInt("position"),
+            rs.getInt("q1"),
+            rs.getInt("md"),
+            rs.getInt("q3")));
+      }
+    }
+    return statistics;
+  }
+
+  static List<String> queryLatencies(Connection connection)
+      throws SQLException {
+    log.info("Querying latency statistics from database.");
+    List<String> statistics = new ArrayList<>();
+    statistics.add("date,source,server,q1,md,q3");
+    Statement st = connection.createStatement();
+    String queryString = "SELECT date, source, server, q1, md, q3 "
+        + "FROM latencies_stats";
+    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.US);
+    dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+    Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+    try (ResultSet rs = st.executeQuery(queryString)) {
+      while (rs.next()) {
+        statistics.add(String.format("%s,%s,%s,%d,%d,%d",
+            dateFormat.format(rs.getDate("date", calendar)),
+            emptyNull(rs.getString("source")),
+            rs.getString("server"),
+            rs.getInt("q1"),
+            rs.getInt("md"),
+            rs.getInt("q3")));
+      }
+    }
+    return statistics;
+  }
+
   private static String emptyNull(String text) {
     return null == text ? "" : text;
   }
 
-  static void writeStatistics(Path webstatsPath,
-      SortedSet<String> statistics) throws IOException {
+  static void writeStatistics(Path webstatsPath, List<String> statistics)
+      throws IOException {
     webstatsPath.toFile().getParentFile().mkdirs();
-    List<String> lines = new ArrayList<>();
-    lines
-        .add("date,filesize,source,server,q1,md,q3,timeouts,failures,requests");
-    lines.addAll(statistics);
-    log.info("Writing {} lines to {}.", lines.size(),
+    log.info("Writing {} lines to {}.", statistics.size(),
         webstatsPath.toFile().getAbsolutePath());
-    Files.write(webstatsPath, lines, StandardCharsets.UTF_8);
+    Files.write(webstatsPath, statistics, StandardCharsets.UTF_8);
   }
 
   private static void disconnectFromDatabase(Connection connection)
diff --git a/src/main/resources/web.xml b/src/main/resources/web.xml
index 9c83591..1fe51b9 100644
--- a/src/main/resources/web.xml
+++ b/src/main/resources/web.xml
@@ -43,6 +43,8 @@
     <url-pattern>/userstats-bridge-version.html</url-pattern>
     <url-pattern>/torperf.html</url-pattern>
     <url-pattern>/torperf-failures.html</url-pattern>
+    <url-pattern>/onionperf-buildtimes.html</url-pattern>
+    <url-pattern>/onionperf-latencies.html</url-pattern>
     <url-pattern>/connbidirect.html</url-pattern>
     <url-pattern>/hidserv-dir-onions-seen.html</url-pattern>
     <url-pattern>/hidserv-rend-relayed-cells.html</url-pattern>
@@ -152,6 +154,12 @@
     <url-pattern>/torperf-failures.png</url-pattern>
     <url-pattern>/torperf-failures.pdf</url-pattern>
     <url-pattern>/torperf-failures.csv</url-pattern>
+    <url-pattern>/onionperf-buildtimes.png</url-pattern>
+    <url-pattern>/onionperf-buildtimes.pdf</url-pattern>
+    <url-pattern>/onionperf-buildtimes.csv</url-pattern>
+    <url-pattern>/onionperf-latencies.png</url-pattern>
+    <url-pattern>/onionperf-latencies.pdf</url-pattern>
+    <url-pattern>/onionperf-latencies.csv</url-pattern>
     <url-pattern>/connbidirect.png</url-pattern>
     <url-pattern>/connbidirect.pdf</url-pattern>
     <url-pattern>/connbidirect.csv</url-pattern>
diff --git a/src/main/resources/web/json/categories.json b/src/main/resources/web/json/categories.json
index b323c79..23e6ee4 100644
--- a/src/main/resources/web/json/categories.json
+++ b/src/main/resources/web/json/categories.json
@@ -61,7 +61,9 @@
     "description": "We use <a href=\"https://gitweb.torproject.org/torperf.git\">Torperf</a> and <a href=\"https://github.com/robgjansen/onionperf\">OnionPerf</a> to run performance measurements. Both work by fetching files of different sizes over Tor and measuring how long that takes.",
     "metrics": [
       "torperf",
-      "torperf-failures"
+      "torperf-failures",
+      "onionperf-buildtimes",
+      "onionperf-latencies"
     ]
   },
   {
diff --git a/src/main/resources/web/json/metrics.json b/src/main/resources/web/json/metrics.json
index 8dbdc66..0f85a28 100644
--- a/src/main/resources/web/json/metrics.json
+++ b/src/main/resources/web/json/metrics.json
@@ -310,6 +310,30 @@
     ]
   },
   {
+    "id": "onionperf-buildtimes",
+    "title": "Circuit build times",
+    "type": "Graph",
+    "description": "<p>This graph shows build times of circuits used for downloading static files of different sizes over Tor. The graph shows the range of measurements from first to third quartile, and highlights the median. The slowest and fastest quarter of measurements are omitted from the graph.</p>",
+    "function": "onionperf_buildtimes",
+    "parameters": [
+      "start",
+      "end",
+      "source"
+    ]
+  },
+  {
+    "id": "onionperf-latencies",
+    "title": "Circuit round-trip latencies",
+    "type": "Graph",
+    "description": "<p>This graph shows round-trip latencies of circuits used for downloading static files of different sizes over Tor, either from a server on the public internet or from a version 2 onion server. Round-trip latencies are measured as the time between sending the HTTP request and receiving the HTTP response header. The graph shows the range of measurements from first to third quartile, and highlights the median. The slowest and fastest quarter of measurements are omitted from the graph.</p>",
+    "function": "onionperf_latencies",
+    "parameters": [
+      "start",
+      "end",
+      "source"
+    ]
+  },
+  {
     "id": "connbidirect",
     "title": "Fraction of connections used uni-/bidirectionally",
     "type": "Graph",
diff --git a/src/main/sql/onionperf/init-onionperf.sql b/src/main/sql/onionperf/init-onionperf.sql
index 557eabb..fad6bef 100644
--- a/src/main/sql/onionperf/init-onionperf.sql
+++ b/src/main/sql/onionperf/init-onionperf.sql
@@ -43,6 +43,14 @@ CREATE TABLE IF NOT EXISTS measurements (
   UNIQUE (source, filesize, start)
 );
 
+CREATE TABLE IF NOT EXISTS buildtimes (
+  measurement_id INTEGER REFERENCES measurements (measurement_id) NOT NULL,
+  position INTEGER NOT NULL,
+  buildtime INTEGER NOT NULL,
+  delta INTEGER NOT NULL,
+  UNIQUE (measurement_id, position)
+);
+
 CREATE TYPE server AS ENUM ('public', 'onion');
 
 CREATE OR REPLACE VIEW onionperf AS
@@ -88,3 +96,63 @@ WHERE DATE(start) < current_date - 1
 GROUP BY date, filesize, 3, server) sub
 ORDER BY date, filesize, source, server;
 
+CREATE OR REPLACE VIEW buildtimes_stats AS
+SELECT date,
+  source,
+  position,
+  TRUNC(q[1]) AS q1,
+  TRUNC(q[2]) AS md,
+  TRUNC(q[3]) AS q3
+FROM (
+SELECT DATE(start) AS date,
+  source,
+  position,
+  PERCENTILE_CONT(ARRAY[0.25,0.5,0.75]) WITHIN GROUP(ORDER BY delta) AS q
+FROM measurements NATURAL JOIN buildtimes
+WHERE DATE(start) < current_date - 1
+AND position <= 3
+GROUP BY date, source, position
+UNION
+SELECT DATE(start) AS date,
+  '' AS source,
+  position,
+  PERCENTILE_CONT(ARRAY[0.25,0.5,0.75]) WITHIN GROUP(ORDER BY delta) AS q
+FROM measurements NATURAL JOIN buildtimes
+WHERE DATE(start) < current_date - 1
+AND position <= 3
+GROUP BY date, 2, position) sub
+ORDER BY date, source, position;
+
+CREATE OR REPLACE VIEW latencies_stats AS
+SELECT date,
+  source,
+  server,
+  TRUNC(q[1]) AS q1,
+  TRUNC(q[2]) AS md,
+  TRUNC(q[3]) AS q3
+FROM (
+SELECT DATE(start) AS date,
+  source,
+  CASE WHEN endpointremote LIKE '%.onion%' THEN 'onion'
+    ELSE 'public' END AS server,
+  PERCENTILE_CONT(ARRAY[0.25,0.5,0.75])
+  WITHIN GROUP(ORDER BY dataresponse - datarequest) AS q
+FROM measurements
+WHERE DATE(start) < current_date - 1
+AND datarequest > 0
+AND dataresponse > 0
+GROUP BY date, source, server
+UNION
+SELECT DATE(start) AS date,
+  '' AS source,
+  CASE WHEN endpointremote LIKE '%.onion%' THEN 'onion'
+    ELSE 'public' END AS server,
+  PERCENTILE_CONT(ARRAY[0.25,0.5,0.75])
+  WITHIN GROUP(ORDER BY dataresponse - datarequest) AS q
+FROM measurements
+WHERE DATE(start) < current_date - 1
+AND datarequest > 0
+AND dataresponse > 0
+GROUP BY date, 2, server) sub
+ORDER BY date, source, server;
+





More information about the tor-commits mailing list