tor-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
April 2014
- 22 participants
- 2020 discussions

11 Apr '14
commit 19bbe6ec48f3732d749287efed545975fda138bb
Author: David Fifield <david(a)bamsoftware.com>
Date: Thu Apr 10 08:59:12 2014 -0700
Fix meek-client-torbrowser makefile target.
---
meek-client-torbrowser/Makefile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/meek-client-torbrowser/Makefile b/meek-client-torbrowser/Makefile
index 7ce013b..9d0a70f 100644
--- a/meek-client-torbrowser/Makefile
+++ b/meek-client-torbrowser/Makefile
@@ -4,9 +4,9 @@ BINDIR = $(PREFIX)/bin
GOBUILDFLAGS =
-all: meek-client
+all: meek-client-torbrowser
-meek-client: *.go
+meek-client-torbrowser: *.go
go build $(GOBUILDFLAGS)
clean:
1
0

11 Apr '14
commit b99f5ca97ec222e01601ed125d7c25618a35683c
Author: Translation commit bot <translation(a)torproject.org>
Date: Fri Apr 11 09:45:48 2014 +0000
Update translations for tails-iuk
---
hu.po | 57 +++++++++++++++++++++++++++++----------------------------
1 file changed, 29 insertions(+), 28 deletions(-)
diff --git a/hu.po b/hu.po
index fe45c05..064312a 100644
--- a/hu.po
+++ b/hu.po
@@ -3,13 +3,14 @@
# This file is distributed under the same license as the PACKAGE package.
#
# Translators:
+# lajos <mrlajos(a)gmail.com>, 2014
msgid ""
msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: Tails developers <tails(a)boum.org>\n"
"POT-Creation-Date: 2014-03-05 15:11+0100\n"
-"PO-Revision-Date: 2014-03-06 08:17+0000\n"
-"Last-Translator: runasand <runa.sandvik(a)gmail.com>\n"
+"PO-Revision-Date: 2014-04-11 09:40+0000\n"
+"Last-Translator: lajos <mrlajos(a)gmail.com>\n"
"Language-Team: Hungarian (http://www.transifex.com/projects/p/torproject/language/hu/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
@@ -24,7 +25,7 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:216
msgid "Error while checking for upgrades"
-msgstr ""
+msgstr "Hiba frissítések ellenőrzése közben"
#: ../lib/Tails/IUK/Frontend.pm:219
msgid ""
@@ -37,15 +38,15 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:234
msgid "no automatic upgrade is available from our website for this version"
-msgstr ""
+msgstr "Nincs elérhető automatikus frissítés a weboldalunkon ehhez a verzióhoz"
#: ../lib/Tails/IUK/Frontend.pm:240
msgid "your device was not created using Tails Installer"
-msgstr ""
+msgstr "Az eszközöd nem a Tails telepítővel volt létrehozva"
#: ../lib/Tails/IUK/Frontend.pm:245
msgid "Tails was started from a DVD or a read-only device"
-msgstr ""
+msgstr "A Tails DVD-ről vagy egy csak olvasható eszközről lett indítva"
#: ../lib/Tails/IUK/Frontend.pm:250
msgid "there is not enough free space on the Tails system partition"
@@ -58,15 +59,15 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:261
#, perl-brace-format
msgid "No explanation available for reason '%{reason}s'."
-msgstr ""
+msgstr "Nincs elérhető magyarázat erre '%{reason}s'."
#: ../lib/Tails/IUK/Frontend.pm:281
msgid "The system is up-to-date"
-msgstr ""
+msgstr "A rendszer naprakész"
#: ../lib/Tails/IUK/Frontend.pm:286
msgid "This version of Tails is outdated, and may have security issues."
-msgstr ""
+msgstr "A Tails ezen verziója nem naprakész és biztonsági hibákat tartalmazhat."
#: ../lib/Tails/IUK/Frontend.pm:318
#, perl-brace-format
@@ -90,7 +91,7 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:360
msgid "Error while detecting available upgrades"
-msgstr ""
+msgstr "Hiba történt elérhető frissítések keresésekor"
#: ../lib/Tails/IUK/Frontend.pm:370
#, perl-brace-format
@@ -110,15 +111,15 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:385
msgid "Upgrade available"
-msgstr ""
+msgstr "Frissítés elérhető"
#: ../lib/Tails/IUK/Frontend.pm:386
msgid "Upgrade now"
-msgstr ""
+msgstr "Frissítés most"
#: ../lib/Tails/IUK/Frontend.pm:387
msgid "Upgrade later"
-msgstr ""
+msgstr "Frissítés később"
#: ../lib/Tails/IUK/Frontend.pm:395
#, perl-brace-format
@@ -134,16 +135,16 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:411
msgid "New version available"
-msgstr ""
+msgstr "Új verzió elérhető"
#: ../lib/Tails/IUK/Frontend.pm:468
msgid "Downloading upgrade"
-msgstr ""
+msgstr "Frissítés letöltése"
#: ../lib/Tails/IUK/Frontend.pm:471
#, perl-brace-format
msgid "Downloading the upgrade to %{name}s %{version}s..."
-msgstr ""
+msgstr "Frissítés letöltése %{name}s %{version}s..."
#: ../lib/Tails/IUK/Frontend.pm:512
msgid ""
@@ -155,7 +156,7 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:528 ../lib/Tails/IUK/Frontend.pm:547
msgid "Error while downloading the upgrade"
-msgstr ""
+msgstr "Hiba frissítés letöltésekor"
#: ../lib/Tails/IUK/Frontend.pm:540
#, perl-brace-format
@@ -166,11 +167,11 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:559
msgid "Error while creating temporary downloading directory"
-msgstr ""
+msgstr "Hiba történt az ideiglenes letöltési könyvtár létrehozásakor"
#: ../lib/Tails/IUK/Frontend.pm:562
msgid "Failed to create temporary download directory"
-msgstr ""
+msgstr "Nem lehet létrehozni az ideiglenes letöltési könyvtárat"
#: ../lib/Tails/IUK/Frontend.pm:574
msgid ""
@@ -184,35 +185,35 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:579
msgid "Restart Tails"
-msgstr ""
+msgstr "Tails újraindítása"
#: ../lib/Tails/IUK/Frontend.pm:580
msgid "Restart now"
-msgstr ""
+msgstr "Újraindítás most"
#: ../lib/Tails/IUK/Frontend.pm:581
msgid "Restart later"
-msgstr ""
+msgstr "Újraindítás később"
#: ../lib/Tails/IUK/Frontend.pm:592
msgid "Error while restarting the system"
-msgstr ""
+msgstr "Hiba a rendszer újraindításakor"
#: ../lib/Tails/IUK/Frontend.pm:595
msgid "Failed to restart the system"
-msgstr ""
+msgstr "Nem sikerült a rendszer újraindítása"
#: ../lib/Tails/IUK/Frontend.pm:610
msgid "Error while shutting down the network"
-msgstr ""
+msgstr "Hiba a hálózat leállításakor"
#: ../lib/Tails/IUK/Frontend.pm:613
msgid "Failed to shutdown network"
-msgstr ""
+msgstr "Nem sikerült a hálózat leállítása"
#: ../lib/Tails/IUK/Frontend.pm:623
msgid "Upgrading the system"
-msgstr ""
+msgstr "Rendszer frissítés"
#: ../lib/Tails/IUK/Frontend.pm:625
msgid ""
@@ -231,4 +232,4 @@ msgstr ""
#: ../lib/Tails/IUK/Frontend.pm:672
msgid "Error while installing the upgrade"
-msgstr ""
+msgstr "Hiba frissítés telepítésekor"
1
0

r26703: {website} Update links to TorStatus websites. (website/trunk/projects/en)
by Karsten Loesing 11 Apr '14
by Karsten Loesing 11 Apr '14
11 Apr '14
Author: kloesing
Date: 2014-04-11 08:13:12 +0000 (Fri, 11 Apr 2014)
New Revision: 26703
Modified:
website/trunk/projects/en/onionoo.wml
Log:
Update links to TorStatus websites.
Modified: website/trunk/projects/en/onionoo.wml
===================================================================
--- website/trunk/projects/en/onionoo.wml 2014-04-11 00:52:48 UTC (rev 26702)
+++ website/trunk/projects/en/onionoo.wml 2014-04-11 08:13:12 UTC (rev 26703)
@@ -103,8 +103,9 @@
<a href="https://svn.torproject.org/svn/torstatus/trunk/">unmaintained</a>
website that displays Tor relay information similar to
<a href="http://atlas.torproject.org/">Atlas</a>. There are still a
- few <a href="http://torstatus.all.de/">TorStatus websites</a>
- running.</p>
+ few <a href="http://torstatus.blutmagie.de/">TorStatus</a>
+ <a href="https://torstatus.rueckgr.at/">websites</a>
+ <a href="http://tns.hermetix.org/">running</a>.</p>
<p>There's another project from summer 2011 called TorStatus which is
a <a href="https://gitweb.torproject.org/torstatus.git">rewrite</a> of
1
0

11 Apr '14
commit 4afcccf3de9d923332d6b043c9dd43bb2ee82b22
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 10 18:35:12 2014 +0200
Split bandwidth data writer into two classes.
---
.../torproject/onionoo/BandwidthDataWriter.java | 395 --------------------
.../onionoo/BandwidthDocumentWriter.java | 199 ++++++++++
src/org/torproject/onionoo/BandwidthStatus.java | 74 +++-
.../torproject/onionoo/BandwidthStatusUpdater.java | 152 ++++++++
src/org/torproject/onionoo/DocumentStore.java | 6 +-
src/org/torproject/onionoo/Main.java | 11 +-
6 files changed, 435 insertions(+), 402 deletions(-)
diff --git a/src/org/torproject/onionoo/BandwidthDataWriter.java b/src/org/torproject/onionoo/BandwidthDataWriter.java
deleted file mode 100644
index 227df2b..0000000
--- a/src/org/torproject/onionoo/BandwidthDataWriter.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.onionoo;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Locale;
-import java.util.Scanner;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.ExtraInfoDescriptor;
-
-/* Write bandwidth data files to disk and delete bandwidth files of relays
- * or bridges that fell out of the summary list.
- *
- * Bandwidth history data is available in different resolutions, depending
- * on the considered time interval. Data for the past 72 hours is
- * available for 15 minute detail, data for the past week in 1 hour
- * detail, data for the past month in 4 hour detail, data for the past 3
- * months in 12 hour detail, data for the past year in 2 day detail, and
- * earlier data in 10 day detail. These detail levels have been chosen to
- * provide between 92 and 192 data points for graphing the bandwidth of
- * the past day, past week, past month, past three months, past year, and
- * past five years.
- *
- * Only update bandwidth data files for which new bandwidth histories are
- * available. There's no point in updating bandwidth documents when we
- * don't have newer bandwidth data to add. This means that, e.g., the
- * last 3 days in the bandwidth document may not be equivalent to the last
- * 3 days as of publishing the document, but that's something clients can
- * work around. */
-public class BandwidthDataWriter implements DescriptorListener,
- StatusUpdater, FingerprintListener, DocumentWriter {
-
- private DescriptorSource descriptorSource;
-
- private DocumentStore documentStore;
-
- private long now;
-
- private SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
-
- public BandwidthDataWriter(DescriptorSource descriptorSource,
- DocumentStore documentStore, Time time) {
- this.descriptorSource = descriptorSource;
- this.documentStore = documentStore;
- this.now = time.currentTimeMillis();
- this.dateTimeFormat.setLenient(false);
- this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- this.registerDescriptorListeners();
- this.registerFingerprintListeners();
- }
-
- private void registerDescriptorListeners() {
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.RELAY_EXTRA_INFOS);
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.BRIDGE_EXTRA_INFOS);
- }
-
- private void registerFingerprintListeners() {
- /* TODO Not used yet.
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.RELAY_EXTRA_INFOS);
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.BRIDGE_EXTRA_INFOS);*/
- }
-
- public void processDescriptor(Descriptor descriptor, boolean relay) {
- if (descriptor instanceof ExtraInfoDescriptor) {
- this.parseDescriptor((ExtraInfoDescriptor) descriptor);
- }
- }
-
- public void processFingerprints(SortedSet<String> fingerprints,
- boolean relay) {
- /* TODO Not used yet. */
- }
-
- public void updateStatuses() {
- /* Status files are already updated while processing descriptors. */
- }
-
- public void writeDocuments() {
- /* Document files are already updated while processing descriptors. */
- }
-
- private void parseDescriptor(ExtraInfoDescriptor descriptor) {
- String fingerprint = descriptor.getFingerprint();
- boolean updateHistory = false;
- SortedMap<Long, long[]> writeHistory = new TreeMap<Long, long[]>(),
- readHistory = new TreeMap<Long, long[]>();
- if (descriptor.getWriteHistory() != null) {
- parseHistoryLine(descriptor.getWriteHistory().getLine(),
- writeHistory);
- updateHistory = true;
- }
- if (descriptor.getReadHistory() != null) {
- parseHistoryLine(descriptor.getReadHistory().getLine(),
- readHistory);
- updateHistory = true;
- }
- if (updateHistory) {
- this.readHistoryFromDisk(fingerprint, writeHistory, readHistory);
- this.compressHistory(writeHistory);
- this.compressHistory(readHistory);
- this.writeHistoryToDisk(fingerprint, writeHistory, readHistory);
- this.writeBandwidthDataFileToDisk(fingerprint, writeHistory,
- readHistory);
- }
- }
-
- private void parseHistoryLine(String line,
- SortedMap<Long, long[]> history) {
- String[] parts = line.split(" ");
- if (parts.length < 6) {
- return;
- }
- try {
- long endMillis = this.dateTimeFormat.parse(parts[1] + " "
- + parts[2]).getTime();
- long intervalMillis = Long.parseLong(parts[3].substring(1)) * 1000L;
- String[] values = parts[5].split(",");
- for (int i = values.length - 1; i >= 0; i--) {
- long bandwidthValue = Long.parseLong(values[i]);
- long startMillis = endMillis - intervalMillis;
- history.put(startMillis, new long[] { startMillis, endMillis,
- bandwidthValue });
- endMillis -= intervalMillis;
- }
- } catch (ParseException e) {
- System.err.println("Could not parse timestamp in line '" + line
- + "'. Skipping.");
- }
- }
-
- private void readHistoryFromDisk(String fingerprint,
- SortedMap<Long, long[]> writeHistory,
- SortedMap<Long, long[]> readHistory) {
- BandwidthStatus bandwidthStatus = this.documentStore.retrieve(
- BandwidthStatus.class, false, fingerprint);
- if (bandwidthStatus == null) {
- return;
- }
- String historyString = bandwidthStatus.documentString;
- try {
- Scanner s = new Scanner(historyString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- String[] parts = line.split(" ");
- if (parts.length != 6) {
- System.err.println("Illegal line '" + line + "' in bandwidth "
- + "history for fingerprint '" + fingerprint + "'. "
- + "Skipping this line.");
- continue;
- }
- SortedMap<Long, long[]> history = parts[0].equals("r")
- ? readHistory : writeHistory;
- long startMillis = this.dateTimeFormat.parse(parts[1] + " "
- + parts[2]).getTime();
- long endMillis = this.dateTimeFormat.parse(parts[3] + " "
- + parts[4]).getTime();
- long bandwidth = Long.parseLong(parts[5]);
- long previousEndMillis = history.headMap(startMillis).isEmpty()
- ? startMillis
- : history.get(history.headMap(startMillis).lastKey())[1];
- long nextStartMillis = history.tailMap(startMillis).isEmpty()
- ? endMillis : history.tailMap(startMillis).firstKey();
- if (previousEndMillis <= startMillis &&
- nextStartMillis >= endMillis) {
- history.put(startMillis, new long[] { startMillis, endMillis,
- bandwidth });
- }
- }
- s.close();
- } catch (ParseException e) {
- System.err.println("Could not parse timestamp while reading "
- + "bandwidth history for fingerprint '" + fingerprint + "'. "
- + "Skipping.");
- e.printStackTrace();
- }
- }
-
- private void compressHistory(
- SortedMap<Long, long[]> history) {
- SortedMap<Long, long[]> uncompressedHistory =
- new TreeMap<Long, long[]>(history);
- history.clear();
- long lastStartMillis = 0L, lastEndMillis = 0L, lastBandwidth = 0L;
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String lastMonthString = "1970-01";
- for (long[] v : uncompressedHistory.values()) {
- long startMillis = v[0], endMillis = v[1], bandwidth = v[2];
- long intervalLengthMillis;
- if (this.now - endMillis <= 72L * 60L * 60L * 1000L) {
- intervalLengthMillis = 15L * 60L * 1000L;
- } else if (this.now - endMillis <= 7L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 60L * 60L * 1000L;
- } else if (this.now - endMillis <= 31L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 4L * 60L * 60L * 1000L;
- } else if (this.now - endMillis <= 92L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 12L * 60L * 60L * 1000L;
- } else if (this.now - endMillis <= 366L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 2L * 24L * 60L * 60L * 1000L;
- } else {
- intervalLengthMillis = 10L * 24L * 60L * 60L * 1000L;
- }
- String monthString = dateTimeFormat.format(startMillis);
- if (lastEndMillis == startMillis &&
- ((lastEndMillis - 1L) / intervalLengthMillis) ==
- ((endMillis - 1L) / intervalLengthMillis) &&
- lastMonthString.equals(monthString)) {
- lastEndMillis = endMillis;
- lastBandwidth += bandwidth;
- } else {
- if (lastStartMillis > 0L) {
- history.put(lastStartMillis, new long[] { lastStartMillis,
- lastEndMillis, lastBandwidth });
- }
- lastStartMillis = startMillis;
- lastEndMillis = endMillis;
- lastBandwidth = bandwidth;
- }
- lastMonthString = monthString;
- }
- if (lastStartMillis > 0L) {
- history.put(lastStartMillis, new long[] { lastStartMillis,
- lastEndMillis, lastBandwidth });
- }
- }
-
- private void writeHistoryToDisk(String fingerprint,
- SortedMap<Long, long[]> writeHistory,
- SortedMap<Long, long[]> readHistory) {
- StringBuilder sb = new StringBuilder();
- for (long[] v : writeHistory.values()) {
- sb.append("w " + this.dateTimeFormat.format(v[0]) + " "
- + this.dateTimeFormat.format(v[1]) + " "
- + String.valueOf(v[2]) + "\n");
- }
- for (long[] v : readHistory.values()) {
- sb.append("r " + this.dateTimeFormat.format(v[0]) + " "
- + this.dateTimeFormat.format(v[1]) + " "
- + String.valueOf(v[2]) + "\n");
- }
- BandwidthStatus bandwidthStatus = new BandwidthStatus();
- bandwidthStatus.documentString = sb.toString();
- this.documentStore.store(bandwidthStatus, fingerprint);
- }
-
- private void writeBandwidthDataFileToDisk(String fingerprint,
- SortedMap<Long, long[]> writeHistory,
- SortedMap<Long, long[]> readHistory) {
- String writeHistoryString = formatHistoryString(writeHistory);
- String readHistoryString = formatHistoryString(readHistory);
- StringBuilder sb = new StringBuilder();
- sb.append("{\"fingerprint\":\"" + fingerprint + "\",\n"
- + "\"write_history\":{\n" + writeHistoryString + "},\n"
- + "\"read_history\":{\n" + readHistoryString + "}}\n");
- BandwidthDocument bandwidthDocument = new BandwidthDocument();
- bandwidthDocument.documentString = sb.toString();
- this.documentStore.store(bandwidthDocument, fingerprint);
- }
-
- private String[] graphNames = new String[] {
- "3_days",
- "1_week",
- "1_month",
- "3_months",
- "1_year",
- "5_years" };
-
- private long[] graphIntervals = new long[] {
- 72L * 60L * 60L * 1000L,
- 7L * 24L * 60L * 60L * 1000L,
- 31L * 24L * 60L * 60L * 1000L,
- 92L * 24L * 60L * 60L * 1000L,
- 366L * 24L * 60L * 60L * 1000L,
- 5L * 366L * 24L * 60L * 60L * 1000L };
-
- private long[] dataPointIntervals = new long[] {
- 15L * 60L * 1000L,
- 60L * 60L * 1000L,
- 4L * 60L * 60L * 1000L,
- 12L * 60L * 60L * 1000L,
- 2L * 24L * 60L * 60L * 1000L,
- 10L * 24L * 60L * 60L * 1000L };
-
- private String formatHistoryString(SortedMap<Long, long[]> history) {
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < this.graphIntervals.length; i++) {
- String graphName = this.graphNames[i];
- long graphInterval = this.graphIntervals[i];
- long dataPointInterval = this.dataPointIntervals[i];
- List<Long> dataPoints = new ArrayList<Long>();
- long intervalStartMillis = ((this.now - graphInterval)
- / dataPointInterval) * dataPointInterval;
- long totalMillis = 0L, totalBandwidth = 0L;
- for (long[] v : history.values()) {
- long startMillis = v[0], endMillis = v[1], bandwidth = v[2];
- if (endMillis < intervalStartMillis) {
- continue;
- }
- while ((intervalStartMillis / dataPointInterval) !=
- (endMillis / dataPointInterval)) {
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1L : (totalBandwidth * 1000L) / totalMillis);
- totalBandwidth = 0L;
- totalMillis = 0L;
- intervalStartMillis += dataPointInterval;
- }
- totalBandwidth += bandwidth;
- totalMillis += (endMillis - startMillis);
- }
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1L : (totalBandwidth * 1000L) / totalMillis);
- long maxValue = 1L;
- int firstNonNullIndex = -1, lastNonNullIndex = -1;
- for (int j = 0; j < dataPoints.size(); j++) {
- long dataPoint = dataPoints.get(j);
- if (dataPoint >= 0L) {
- if (firstNonNullIndex < 0) {
- firstNonNullIndex = j;
- }
- lastNonNullIndex = j;
- if (dataPoint > maxValue) {
- maxValue = dataPoint;
- }
- }
- }
- if (firstNonNullIndex < 0) {
- continue;
- }
- long firstDataPointMillis = (((this.now - graphInterval)
- / dataPointInterval) + firstNonNullIndex) * dataPointInterval
- + dataPointInterval / 2L;
- if (i > 0 &&
- firstDataPointMillis >= this.now - graphIntervals[i - 1]) {
- /* Skip bandwidth history object, because it doesn't contain
- * anything new that wasn't already contained in the last
- * bandwidth history object(s). */
- continue;
- }
- long lastDataPointMillis = firstDataPointMillis
- + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
- double factor = ((double) maxValue) / 999.0;
- int count = lastNonNullIndex - firstNonNullIndex + 1;
- StringBuilder sb2 = new StringBuilder();
- sb2.append("\"" + graphName + "\":{"
- + "\"first\":\""
- + this.dateTimeFormat.format(firstDataPointMillis) + "\","
- + "\"last\":\""
- + this.dateTimeFormat.format(lastDataPointMillis) + "\","
- +"\"interval\":" + String.valueOf(dataPointInterval / 1000L)
- + ",\"factor\":" + String.format(Locale.US, "%.3f", factor)
- + ",\"count\":" + String.valueOf(count) + ",\"values\":[");
- int written = 0, previousNonNullIndex = -2;
- boolean foundTwoAdjacentDataPoints = false;
- for (int j = firstNonNullIndex; j <= lastNonNullIndex; j++) {
- long dataPoint = dataPoints.get(j);
- if (dataPoint >= 0L) {
- if (j - previousNonNullIndex == 1) {
- foundTwoAdjacentDataPoints = true;
- }
- previousNonNullIndex = j;
- }
- sb2.append((written++ > 0 ? "," : "") + (dataPoint < 0L ? "null" :
- String.valueOf((dataPoint * 999L) / maxValue)));
- }
- sb2.append("]},\n");
- if (foundTwoAdjacentDataPoints) {
- sb.append(sb2.toString());
- }
- }
- String result = sb.toString();
- if (result.length() >= 2) {
- result = result.substring(0, result.length() - 2) + "\n";
- }
- return result;
- }
-
- public String getStatsString() {
- /* TODO Add statistics string. */
- return null;
- }
-}
-
diff --git a/src/org/torproject/onionoo/BandwidthDocumentWriter.java b/src/org/torproject/onionoo/BandwidthDocumentWriter.java
new file mode 100644
index 0000000..754c8f3
--- /dev/null
+++ b/src/org/torproject/onionoo/BandwidthDocumentWriter.java
@@ -0,0 +1,199 @@
+/* Copyright 2011--2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+
+public class BandwidthDocumentWriter implements FingerprintListener,
+ DocumentWriter{
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ private SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+
+ public BandwidthDocumentWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore, Time time) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.dateTimeFormat.setLenient(false);
+ this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ this.registerFingerprintListeners();
+ }
+
+ private void registerFingerprintListeners() {
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.RELAY_EXTRA_INFOS);
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.BRIDGE_EXTRA_INFOS);
+ }
+
+ private Set<String> updateBandwidthDocuments = new HashSet<String>();
+
+ public void processFingerprints(SortedSet<String> fingerprints,
+ boolean relay) {
+ this.updateBandwidthDocuments.addAll(fingerprints);
+ }
+
+ public void writeDocuments() {
+ for (String fingerprint : this.updateBandwidthDocuments) {
+ BandwidthStatus bandwidthStatus = this.documentStore.retrieve(
+ BandwidthStatus.class, true, fingerprint);
+ if (bandwidthStatus == null) {
+ continue;
+ }
+ this.writeBandwidthDataFileToDisk(fingerprint,
+ bandwidthStatus.writeHistory, bandwidthStatus.readHistory);
+ }
+ Logger.printStatusTime("Wrote bandwidth document files");
+ }
+
+ private void writeBandwidthDataFileToDisk(String fingerprint,
+ SortedMap<Long, long[]> writeHistory,
+ SortedMap<Long, long[]> readHistory) {
+ String writeHistoryString = formatHistoryString(writeHistory);
+ String readHistoryString = formatHistoryString(readHistory);
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"fingerprint\":\"" + fingerprint + "\",\n"
+ + "\"write_history\":{\n" + writeHistoryString + "},\n"
+ + "\"read_history\":{\n" + readHistoryString + "}}\n");
+ BandwidthDocument bandwidthDocument = new BandwidthDocument();
+ bandwidthDocument.documentString = sb.toString();
+ this.documentStore.store(bandwidthDocument, fingerprint);
+ }
+
+ private String[] graphNames = new String[] {
+ "3_days",
+ "1_week",
+ "1_month",
+ "3_months",
+ "1_year",
+ "5_years" };
+
+ private long[] graphIntervals = new long[] {
+ 72L * 60L * 60L * 1000L,
+ 7L * 24L * 60L * 60L * 1000L,
+ 31L * 24L * 60L * 60L * 1000L,
+ 92L * 24L * 60L * 60L * 1000L,
+ 366L * 24L * 60L * 60L * 1000L,
+ 5L * 366L * 24L * 60L * 60L * 1000L };
+
+ private long[] dataPointIntervals = new long[] {
+ 15L * 60L * 1000L,
+ 60L * 60L * 1000L,
+ 4L * 60L * 60L * 1000L,
+ 12L * 60L * 60L * 1000L,
+ 2L * 24L * 60L * 60L * 1000L,
+ 10L * 24L * 60L * 60L * 1000L };
+
+ private String formatHistoryString(SortedMap<Long, long[]> history) {
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < this.graphIntervals.length; i++) {
+ String graphName = this.graphNames[i];
+ long graphInterval = this.graphIntervals[i];
+ long dataPointInterval = this.dataPointIntervals[i];
+ List<Long> dataPoints = new ArrayList<Long>();
+ long intervalStartMillis = ((this.now - graphInterval)
+ / dataPointInterval) * dataPointInterval;
+ long totalMillis = 0L, totalBandwidth = 0L;
+ for (long[] v : history.values()) {
+ long startMillis = v[0], endMillis = v[1], bandwidth = v[2];
+ if (endMillis < intervalStartMillis) {
+ continue;
+ }
+ while ((intervalStartMillis / dataPointInterval) !=
+ (endMillis / dataPointInterval)) {
+ dataPoints.add(totalMillis * 5L < dataPointInterval
+ ? -1L : (totalBandwidth * 1000L) / totalMillis);
+ totalBandwidth = 0L;
+ totalMillis = 0L;
+ intervalStartMillis += dataPointInterval;
+ }
+ totalBandwidth += bandwidth;
+ totalMillis += (endMillis - startMillis);
+ }
+ dataPoints.add(totalMillis * 5L < dataPointInterval
+ ? -1L : (totalBandwidth * 1000L) / totalMillis);
+ long maxValue = 1L;
+ int firstNonNullIndex = -1, lastNonNullIndex = -1;
+ for (int j = 0; j < dataPoints.size(); j++) {
+ long dataPoint = dataPoints.get(j);
+ if (dataPoint >= 0L) {
+ if (firstNonNullIndex < 0) {
+ firstNonNullIndex = j;
+ }
+ lastNonNullIndex = j;
+ if (dataPoint > maxValue) {
+ maxValue = dataPoint;
+ }
+ }
+ }
+ if (firstNonNullIndex < 0) {
+ continue;
+ }
+ long firstDataPointMillis = (((this.now - graphInterval)
+ / dataPointInterval) + firstNonNullIndex) * dataPointInterval
+ + dataPointInterval / 2L;
+ if (i > 0 &&
+ firstDataPointMillis >= this.now - graphIntervals[i - 1]) {
+ /* Skip bandwidth history object, because it doesn't contain
+ * anything new that wasn't already contained in the last
+ * bandwidth history object(s). */
+ continue;
+ }
+ long lastDataPointMillis = firstDataPointMillis
+ + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
+ double factor = ((double) maxValue) / 999.0;
+ int count = lastNonNullIndex - firstNonNullIndex + 1;
+ StringBuilder sb2 = new StringBuilder();
+ sb2.append("\"" + graphName + "\":{"
+ + "\"first\":\""
+ + this.dateTimeFormat.format(firstDataPointMillis) + "\","
+ + "\"last\":\""
+ + this.dateTimeFormat.format(lastDataPointMillis) + "\","
+ +"\"interval\":" + String.valueOf(dataPointInterval / 1000L)
+ + ",\"factor\":" + String.format(Locale.US, "%.3f", factor)
+ + ",\"count\":" + String.valueOf(count) + ",\"values\":[");
+ int written = 0, previousNonNullIndex = -2;
+ boolean foundTwoAdjacentDataPoints = false;
+ for (int j = firstNonNullIndex; j <= lastNonNullIndex; j++) {
+ long dataPoint = dataPoints.get(j);
+ if (dataPoint >= 0L) {
+ if (j - previousNonNullIndex == 1) {
+ foundTwoAdjacentDataPoints = true;
+ }
+ previousNonNullIndex = j;
+ }
+ sb2.append((written++ > 0 ? "," : "") + (dataPoint < 0L ? "null" :
+ String.valueOf((dataPoint * 999L) / maxValue)));
+ }
+ sb2.append("]},\n");
+ if (foundTwoAdjacentDataPoints) {
+ sb.append(sb2.toString());
+ }
+ }
+ String result = sb.toString();
+ if (result.length() >= 2) {
+ result = result.substring(0, result.length() - 2) + "\n";
+ }
+ return result;
+ }
+
+ public String getStatsString() {
+ /* TODO Add statistics string. */
+ return null;
+ }
+}
diff --git a/src/org/torproject/onionoo/BandwidthStatus.java b/src/org/torproject/onionoo/BandwidthStatus.java
index bf6f504..fd3c36e 100644
--- a/src/org/torproject/onionoo/BandwidthStatus.java
+++ b/src/org/torproject/onionoo/BandwidthStatus.java
@@ -1,7 +1,79 @@
-/* Copyright 2013 The Tor Project
+/* Copyright 2013--2014 The Tor Project
* See LICENSE for licensing information */
package org.torproject.onionoo;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Scanner;
+import java.util.SortedMap;
+import java.util.TimeZone;
+import java.util.TreeMap;
+
class BandwidthStatus extends Document {
+
+ SortedMap<Long, long[]> writeHistory = new TreeMap<Long, long[]>();
+
+ SortedMap<Long, long[]> readHistory = new TreeMap<Long, long[]>();
+
+ public void fromDocumentString(String documentString) {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setLenient(false);
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ try {
+ Scanner s = new Scanner(documentString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ String[] parts = line.split(" ");
+ if (parts.length != 6) {
+ System.err.println("Illegal line '" + line + "' in bandwidth "
+ + "history. Skipping this line.");
+ continue;
+ }
+ SortedMap<Long, long[]> history = parts[0].equals("r")
+ ? readHistory : writeHistory;
+ long startMillis = dateTimeFormat.parse(parts[1] + " "
+ + parts[2]).getTime();
+ long endMillis = dateTimeFormat.parse(parts[3] + " "
+ + parts[4]).getTime();
+ long bandwidth = Long.parseLong(parts[5]);
+ long previousEndMillis = history.headMap(startMillis).isEmpty()
+ ? startMillis
+ : history.get(history.headMap(startMillis).lastKey())[1];
+ long nextStartMillis = history.tailMap(startMillis).isEmpty()
+ ? endMillis : history.tailMap(startMillis).firstKey();
+ if (previousEndMillis <= startMillis &&
+ nextStartMillis >= endMillis) {
+ history.put(startMillis, new long[] { startMillis, endMillis,
+ bandwidth });
+ }
+ }
+ s.close();
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp while reading "
+ + "bandwidth history. Skipping.");
+ e.printStackTrace();
+ }
+
+ }
+
+ public String toDocumentString() {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setLenient(false);
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ StringBuilder sb = new StringBuilder();
+ for (long[] v : writeHistory.values()) {
+ sb.append("w " + dateTimeFormat.format(v[0]) + " "
+ + dateTimeFormat.format(v[1]) + " "
+ + String.valueOf(v[2]) + "\n");
+ }
+ for (long[] v : readHistory.values()) {
+ sb.append("r " + dateTimeFormat.format(v[0]) + " "
+ + dateTimeFormat.format(v[1]) + " "
+ + String.valueOf(v[2]) + "\n");
+ }
+ return sb.toString();
+ }
}
diff --git a/src/org/torproject/onionoo/BandwidthStatusUpdater.java b/src/org/torproject/onionoo/BandwidthStatusUpdater.java
new file mode 100644
index 0000000..6254260
--- /dev/null
+++ b/src/org/torproject/onionoo/BandwidthStatusUpdater.java
@@ -0,0 +1,152 @@
+/* Copyright 2011--2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.SortedMap;
+import java.util.TimeZone;
+import java.util.TreeMap;
+
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.ExtraInfoDescriptor;
+
+public class BandwidthStatusUpdater implements DescriptorListener,
+ StatusUpdater {
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ private SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+
+ public BandwidthStatusUpdater(DescriptorSource descriptorSource,
+ DocumentStore documentStore, Time time) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.dateTimeFormat.setLenient(false);
+ this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ this.registerDescriptorListeners();
+ }
+
+ private void registerDescriptorListeners() {
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.RELAY_EXTRA_INFOS);
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.BRIDGE_EXTRA_INFOS);
+ }
+
+ public void processDescriptor(Descriptor descriptor, boolean relay) {
+ if (descriptor instanceof ExtraInfoDescriptor) {
+ this.parseDescriptor((ExtraInfoDescriptor) descriptor);
+ }
+ }
+
+ public void updateStatuses() {
+ /* Status files are already updated while processing descriptors. */
+ }
+
+ private void parseDescriptor(ExtraInfoDescriptor descriptor) {
+ String fingerprint = descriptor.getFingerprint();
+ BandwidthStatus bandwidthStatus = this.documentStore.retrieve(
+ BandwidthStatus.class, true, fingerprint);
+ if (bandwidthStatus == null) {
+ bandwidthStatus = new BandwidthStatus();
+ }
+ if (descriptor.getWriteHistory() != null) {
+ parseHistoryLine(descriptor.getWriteHistory().getLine(),
+ bandwidthStatus.writeHistory);
+ }
+ if (descriptor.getReadHistory() != null) {
+ parseHistoryLine(descriptor.getReadHistory().getLine(),
+ bandwidthStatus.readHistory);
+ }
+ this.compressHistory(bandwidthStatus.writeHistory);
+ this.compressHistory(bandwidthStatus.readHistory);
+ this.documentStore.store(bandwidthStatus, fingerprint);
+ }
+
+ private void parseHistoryLine(String line,
+ SortedMap<Long, long[]> history) {
+ String[] parts = line.split(" ");
+ if (parts.length < 6) {
+ return;
+ }
+ try {
+ long endMillis = this.dateTimeFormat.parse(parts[1] + " "
+ + parts[2]).getTime();
+ long intervalMillis = Long.parseLong(parts[3].substring(1)) * 1000L;
+ String[] values = parts[5].split(",");
+ for (int i = values.length - 1; i >= 0; i--) {
+ long bandwidthValue = Long.parseLong(values[i]);
+ long startMillis = endMillis - intervalMillis;
+ /* TODO Should we first check whether an interval is already
+ * contained in history? */
+ history.put(startMillis, new long[] { startMillis, endMillis,
+ bandwidthValue });
+ endMillis -= intervalMillis;
+ }
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp in line '" + line
+ + "'. Skipping.");
+ }
+ }
+
+ private void compressHistory(SortedMap<Long, long[]> history) {
+ SortedMap<Long, long[]> uncompressedHistory =
+ new TreeMap<Long, long[]>(history);
+ history.clear();
+ long lastStartMillis = 0L, lastEndMillis = 0L, lastBandwidth = 0L;
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String lastMonthString = "1970-01";
+ for (long[] v : uncompressedHistory.values()) {
+ long startMillis = v[0], endMillis = v[1], bandwidth = v[2];
+ long intervalLengthMillis;
+ if (this.now - endMillis <= 72L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 15L * 60L * 1000L;
+ } else if (this.now - endMillis <= 7L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 60L * 60L * 1000L;
+ } else if (this.now - endMillis <= 31L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 4L * 60L * 60L * 1000L;
+ } else if (this.now - endMillis <= 92L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 12L * 60L * 60L * 1000L;
+ } else if (this.now - endMillis <= 366L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 2L * 24L * 60L * 60L * 1000L;
+ } else {
+ intervalLengthMillis = 10L * 24L * 60L * 60L * 1000L;
+ }
+ String monthString = dateTimeFormat.format(startMillis);
+ if (lastEndMillis == startMillis &&
+ ((lastEndMillis - 1L) / intervalLengthMillis) ==
+ ((endMillis - 1L) / intervalLengthMillis) &&
+ lastMonthString.equals(monthString)) {
+ lastEndMillis = endMillis;
+ lastBandwidth += bandwidth;
+ } else {
+ if (lastStartMillis > 0L) {
+ history.put(lastStartMillis, new long[] { lastStartMillis,
+ lastEndMillis, lastBandwidth });
+ }
+ lastStartMillis = startMillis;
+ lastEndMillis = endMillis;
+ lastBandwidth = bandwidth;
+ }
+ lastMonthString = monthString;
+ }
+ if (lastStartMillis > 0L) {
+ history.put(lastStartMillis, new long[] { lastStartMillis,
+ lastEndMillis, lastBandwidth });
+ }
+ }
+
+ public String getStatsString() {
+ /* TODO Add statistics string. */
+ return null;
+ }
+}
+
diff --git a/src/org/torproject/onionoo/DocumentStore.java b/src/org/torproject/onionoo/DocumentStore.java
index 5da7267..be6abd5 100644
--- a/src/org/torproject/onionoo/DocumentStore.java
+++ b/src/org/torproject/onionoo/DocumentStore.java
@@ -196,7 +196,8 @@ public class DocumentStore {
document instanceof UptimeDocument) {
Gson gson = new Gson();
documentString = gson.toJson(this);
- } else if (document instanceof WeightsStatus ||
+ } else if (document instanceof BandwidthStatus ||
+ document instanceof WeightsStatus ||
document instanceof ClientsStatus ||
document instanceof UptimeStatus) {
documentString = document.toDocumentString();
@@ -290,7 +291,8 @@ public class DocumentStore {
documentType.equals(UptimeDocument.class)) {
return this.retrieveParsedDocumentFile(documentType,
documentString);
- } else if (documentType.equals(WeightsStatus.class) ||
+ } else if (documentType.equals(BandwidthStatus.class) ||
+ documentType.equals(WeightsStatus.class) ||
documentType.equals(ClientsStatus.class) ||
documentType.equals(UptimeStatus.class)) {
return this.retrieveParsedStatusFile(documentType, documentString);
diff --git a/src/org/torproject/onionoo/Main.java b/src/org/torproject/onionoo/Main.java
index 434d90c..60db116 100644
--- a/src/org/torproject/onionoo/Main.java
+++ b/src/org/torproject/onionoo/Main.java
@@ -32,16 +32,18 @@ public class Main {
Logger.printStatusTime("Initialized reverse domain name resolver");
NodeDataWriter ndw = new NodeDataWriter(dso, rdnr, ls, ds, t);
Logger.printStatusTime("Initialized node data writer");
- BandwidthDataWriter bdw = new BandwidthDataWriter(dso, ds, t);
- Logger.printStatusTime("Initialized bandwidth data writer");
+ BandwidthStatusUpdater bsu = new BandwidthStatusUpdater(dso, ds, t);
+ Logger.printStatusTime("Initialized bandwidth status updater");
WeightsStatusUpdater wsu = new WeightsStatusUpdater(dso, ds, t);
Logger.printStatusTime("Initialized weights status updater");
ClientsStatusUpdater csu = new ClientsStatusUpdater(dso, ds, t);
Logger.printStatusTime("Initialized clients status updater");
UptimeStatusUpdater usu = new UptimeStatusUpdater(dso, ds);
Logger.printStatusTime("Initialized uptime status updater");
- StatusUpdater[] sus = new StatusUpdater[] { ndw, bdw, wsu, csu, usu };
+ StatusUpdater[] sus = new StatusUpdater[] { ndw, bsu, wsu, csu, usu };
+ BandwidthDocumentWriter bdw = new BandwidthDocumentWriter(dso, ds, t);
+ Logger.printStatusTime("Initialized bandwidth document writer");
WeightsDocumentWriter wdw = new WeightsDocumentWriter(dso, ds, t);
Logger.printStatusTime("Initialized weights document writer");
ClientsDocumentWriter cdw = new ClientsDocumentWriter(dso, ds, t);
@@ -95,7 +97,8 @@ public class Main {
}
/* TODO Print status updater statistics for *all* status updaters once
* all data writers have been separated. */
- for (DocumentWriter dw : new DocumentWriter[] { wdw, cdw, udw }) {
+ for (DocumentWriter dw : new DocumentWriter[] { bdw, wdw, cdw,
+ udw }) {
String statsString = dw.getStatsString();
if (statsString != null) {
Logger.printStatistics(dw.getClass().getSimpleName(),
1
0

11 Apr '14
commit e152b6761b125ddcbfe57fc8713043e2af0ad2a3
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Fri Mar 14 15:53:54 2014 +0100
Split node data writer into two classes.
---
.../torproject/onionoo/DetailsDocumentWriter.java | 380 ++++++++
src/org/torproject/onionoo/Main.java | 15 +-
src/org/torproject/onionoo/NodeDataWriter.java | 959 --------------------
.../onionoo/NodeDetailsStatusUpdater.java | 634 +++++++++++++
4 files changed, 1022 insertions(+), 966 deletions(-)
diff --git a/src/org/torproject/onionoo/DetailsDocumentWriter.java b/src/org/torproject/onionoo/DetailsDocumentWriter.java
new file mode 100644
index 0000000..0fd47c5
--- /dev/null
+++ b/src/org/torproject/onionoo/DetailsDocumentWriter.java
@@ -0,0 +1,380 @@
+package org.torproject.onionoo;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeSet;
+
+import org.apache.commons.lang.StringEscapeUtils;
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.ExitList;
+import org.torproject.descriptor.ExitListEntry;
+
+public class DetailsDocumentWriter implements DescriptorListener,
+ FingerprintListener, DocumentWriter {
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ public DetailsDocumentWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore, Time time) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.registerDescriptorListeners();
+ this.registerFingerprintListeners();
+ }
+
+ private void registerDescriptorListeners() {
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.EXIT_LISTS);
+ }
+
+ public void processDescriptor(Descriptor descriptor, boolean relay) {
+ if (descriptor instanceof ExitList) {
+ this.processExitList((ExitList) descriptor);
+ }
+ }
+
+ private Map<String, Set<ExitListEntry>> exitListEntries =
+ new HashMap<String, Set<ExitListEntry>>();
+
+ /* TODO Processing descriptors should really be done in
+ * NodeDetailsStatusUpdater, not here. This is also a bug, because
+ * we're only considering newly published exit lists. */
+ private void processExitList(ExitList exitList) {
+ for (ExitListEntry exitListEntry : exitList.getExitListEntries()) {
+ if (exitListEntry.getScanMillis() <
+ this.now - 24L * 60L * 60L * 1000L) {
+ continue;
+ }
+ String fingerprint = exitListEntry.getFingerprint();
+ if (!this.exitListEntries.containsKey(fingerprint)) {
+ this.exitListEntries.put(fingerprint,
+ new HashSet<ExitListEntry>());
+ }
+ this.exitListEntries.get(fingerprint).add(exitListEntry);
+ }
+ }
+
+ private void registerFingerprintListeners() {
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.RELAY_CONSENSUSES);
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.BRIDGE_STATUSES);
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.BRIDGE_POOL_ASSIGNMENTS);
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.EXIT_LISTS);
+ }
+
+ private SortedSet<String> newRelays = new TreeSet<String>(),
+ newBridges = new TreeSet<String>();
+
+ public void processFingerprints(SortedSet<String> fingerprints,
+ boolean relay) {
+ if (relay) {
+ this.newRelays.addAll(fingerprints);
+ } else {
+ this.newBridges.addAll(fingerprints);
+ }
+ }
+
+ public void writeDocuments() {
+ this.updateRelayDetailsFiles();
+ this.updateBridgeDetailsFiles();
+ Logger.printStatusTime("Wrote details document files");
+ }
+
+ private void updateRelayDetailsFiles() {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ for (String fingerprint : this.newRelays) {
+
+ /* Generate network-status-specific part. */
+ NodeStatus entry = this.documentStore.retrieve(NodeStatus.class,
+ true, fingerprint);
+ if (entry == null) {
+ continue;
+ }
+ String nickname = entry.getNickname();
+ String address = entry.getAddress();
+ List<String> orAddresses = new ArrayList<String>();
+ orAddresses.add(address + ":" + entry.getOrPort());
+ orAddresses.addAll(entry.getOrAddressesAndPorts());
+ StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
+ int addressesWritten = 0;
+ for (String orAddress : orAddresses) {
+ orAddressesAndPortsBuilder.append(
+ (addressesWritten++ > 0 ? "," : "") + "\""
+ + orAddress.toLowerCase() + "\"");
+ }
+ String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
+ String firstSeen = dateTimeFormat.format(
+ entry.getFirstSeenMillis());
+ String lastChangedOrAddress = dateTimeFormat.format(
+ entry.getLastChangedOrAddress());
+ String running = entry.getRunning() ? "true" : "false";
+ int dirPort = entry.getDirPort();
+ String countryCode = entry.getCountryCode();
+ String latitude = entry.getLatitude();
+ String longitude = entry.getLongitude();
+ String countryName = entry.getCountryName();
+ String regionName = entry.getRegionName();
+ String cityName = entry.getCityName();
+ String aSNumber = entry.getASNumber();
+ String aSName = entry.getASName();
+ long consensusWeight = entry.getConsensusWeight();
+ String hostName = entry.getHostName();
+ double advertisedBandwidthFraction =
+ entry.getAdvertisedBandwidthFraction();
+ double consensusWeightFraction = entry.getConsensusWeightFraction();
+ double guardProbability = entry.getGuardProbability();
+ double middleProbability = entry.getMiddleProbability();
+ double exitProbability = entry.getExitProbability();
+ String defaultPolicy = entry.getDefaultPolicy();
+ String portList = entry.getPortList();
+ Boolean recommendedVersion = entry.getRecommendedVersion();
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"version\":1,\n"
+ + "\"nickname\":\"" + nickname + "\",\n"
+ + "\"fingerprint\":\"" + fingerprint + "\",\n"
+ + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
+ + "]");
+ if (dirPort != 0) {
+ sb.append(",\n\"dir_address\":\"" + address + ":" + dirPort
+ + "\"");
+ }
+ sb.append(",\n\"last_seen\":\"" + lastSeen + "\"");
+ sb.append(",\n\"first_seen\":\"" + firstSeen + "\"");
+ sb.append(",\n\"last_changed_address_or_port\":\""
+ + lastChangedOrAddress + "\"");
+ sb.append(",\n\"running\":" + running);
+ SortedSet<String> relayFlags = entry.getRelayFlags();
+ if (!relayFlags.isEmpty()) {
+ sb.append(",\n\"flags\":[");
+ int written = 0;
+ for (String relayFlag : relayFlags) {
+ sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
+ }
+ sb.append("]");
+ }
+ if (countryCode != null) {
+ sb.append(",\n\"country\":\"" + countryCode + "\"");
+ }
+ if (latitude != null) {
+ sb.append(",\n\"latitude\":" + latitude);
+ }
+ if (longitude != null) {
+ sb.append(",\n\"longitude\":" + longitude);
+ }
+ if (countryName != null) {
+ sb.append(",\n\"country_name\":\""
+ + escapeJSON(countryName) + "\"");
+ }
+ if (regionName != null) {
+ sb.append(",\n\"region_name\":\""
+ + escapeJSON(regionName) + "\"");
+ }
+ if (cityName != null) {
+ sb.append(",\n\"city_name\":\""
+ + escapeJSON(cityName) + "\"");
+ }
+ if (aSNumber != null) {
+ sb.append(",\n\"as_number\":\""
+ + escapeJSON(aSNumber) + "\"");
+ }
+ if (aSName != null) {
+ sb.append(",\n\"as_name\":\""
+ + escapeJSON(aSName) + "\"");
+ }
+ if (consensusWeight >= 0L) {
+ sb.append(",\n\"consensus_weight\":"
+ + String.valueOf(consensusWeight));
+ }
+ if (hostName != null) {
+ sb.append(",\n\"host_name\":\""
+ + escapeJSON(hostName) + "\"");
+ }
+ if (advertisedBandwidthFraction >= 0.0) {
+ sb.append(String.format(
+ ",\n\"advertised_bandwidth_fraction\":%.9f",
+ advertisedBandwidthFraction));
+ }
+ if (consensusWeightFraction >= 0.0) {
+ sb.append(String.format(",\n\"consensus_weight_fraction\":%.9f",
+ consensusWeightFraction));
+ }
+ if (guardProbability >= 0.0) {
+ sb.append(String.format(",\n\"guard_probability\":%.9f",
+ guardProbability));
+ }
+ if (middleProbability >= 0.0) {
+ sb.append(String.format(",\n\"middle_probability\":%.9f",
+ middleProbability));
+ }
+ if (exitProbability >= 0.0) {
+ sb.append(String.format(",\n\"exit_probability\":%.9f",
+ exitProbability));
+ }
+ if (defaultPolicy != null && (defaultPolicy.equals("accept") ||
+ defaultPolicy.equals("reject")) && portList != null) {
+ sb.append(",\n\"exit_policy_summary\":{\"" + defaultPolicy
+ + "\":[");
+ int portsWritten = 0;
+ for (String portOrPortRange : portList.split(",")) {
+ sb.append((portsWritten++ > 0 ? "," : "")
+ + "\"" + portOrPortRange + "\"");
+ }
+ sb.append("]}");
+ }
+ if (recommendedVersion != null) {
+ sb.append(",\n\"recommended_version\":" + (recommendedVersion ?
+ "true" : "false"));
+ }
+
+ /* Add exit addresses if at least one of them is distinct from the
+ * onion-routing addresses. */
+ if (exitListEntries.containsKey(fingerprint)) {
+ for (ExitListEntry exitListEntry :
+ exitListEntries.get(fingerprint)) {
+ entry.addExitAddress(exitListEntry.getExitAddress());
+ }
+ }
+ if (!entry.getExitAddresses().isEmpty()) {
+ sb.append(",\n\"exit_addresses\":[");
+ int written = 0;
+ for (String exitAddress : entry.getExitAddresses()) {
+ sb.append((written++ > 0 ? "," : "") + "\""
+ + exitAddress.toLowerCase() + "\"");
+ }
+ sb.append("]");
+ }
+
+ /* Append descriptor-specific part from details status file, and
+ * update contact in node status. */
+ DetailsStatus detailsStatus = this.documentStore.retrieve(
+ DetailsStatus.class, false, fingerprint);
+ if (detailsStatus != null &&
+ detailsStatus.documentString.length() > 0) {
+ sb.append(",\n" + detailsStatus.documentString);
+ String contact = null;
+ Scanner s = new Scanner(detailsStatus.documentString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (!line.startsWith("\"contact\":")) {
+ continue;
+ }
+ int start = "\"contact\":\"".length(), end = line.length() - 1;
+ if (line.endsWith(",")) {
+ end--;
+ }
+ contact = unescapeJSON(line.substring(start, end));
+ break;
+ }
+ s.close();
+ entry.setContact(contact);
+ }
+
+ /* Finish details string. */
+ sb.append("\n}\n");
+
+ /* Write details file to disk. */
+ DetailsDocument detailsDocument = new DetailsDocument();
+ detailsDocument.documentString = sb.toString();
+ this.documentStore.store(detailsDocument, fingerprint);
+ }
+ }
+
+ private void updateBridgeDetailsFiles() {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ for (String fingerprint : this.newBridges) {
+
+ /* Generate network-status-specific part. */
+ NodeStatus entry = this.documentStore.retrieve(NodeStatus.class,
+ true, fingerprint);
+ if (entry == null) {
+ continue;
+ }
+ String nickname = entry.getNickname();
+ String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
+ String firstSeen = dateTimeFormat.format(
+ entry.getFirstSeenMillis());
+ String running = entry.getRunning() ? "true" : "false";
+ String address = entry.getAddress();
+ List<String> orAddresses = new ArrayList<String>();
+ orAddresses.add(address + ":" + entry.getOrPort());
+ orAddresses.addAll(entry.getOrAddressesAndPorts());
+ StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
+ int addressesWritten = 0;
+ for (String orAddress : orAddresses) {
+ orAddressesAndPortsBuilder.append(
+ (addressesWritten++ > 0 ? "," : "") + "\""
+ + orAddress.toLowerCase() + "\"");
+ }
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"version\":1,\n"
+ + "\"nickname\":\"" + nickname + "\",\n"
+ + "\"hashed_fingerprint\":\"" + fingerprint + "\",\n"
+ + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
+ + "],\n\"last_seen\":\"" + lastSeen + "\",\n\"first_seen\":\""
+ + firstSeen + "\",\n\"running\":" + running);
+
+ SortedSet<String> relayFlags = entry.getRelayFlags();
+ if (!relayFlags.isEmpty()) {
+ sb.append(",\n\"flags\":[");
+ int written = 0;
+ for (String relayFlag : relayFlags) {
+ sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
+ }
+ sb.append("]");
+ }
+
+ /* Append descriptor-specific part from details status file. */
+ DetailsStatus detailsStatus = this.documentStore.retrieve(
+ DetailsStatus.class, false, fingerprint);
+ if (detailsStatus != null &&
+ detailsStatus.documentString.length() > 0) {
+ sb.append(",\n" + detailsStatus.documentString);
+ }
+
+ /* Finish details string. */
+ sb.append("\n}\n");
+
+ /* Write details file to disk. */
+ DetailsDocument detailsDocument = new DetailsDocument();
+ detailsDocument.documentString = sb.toString();
+ this.documentStore.store(detailsDocument, fingerprint);
+ }
+ }
+
+ private static String escapeJSON(String s) {
+ return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'");
+ }
+
+ private static String unescapeJSON(String s) {
+ return StringEscapeUtils.unescapeJavaScript(s.replaceAll("'", "\\'"));
+ }
+
+ public String getStatsString() {
+ /* TODO Add statistics string. */
+ return null;
+ }
+}
diff --git a/src/org/torproject/onionoo/Main.java b/src/org/torproject/onionoo/Main.java
index 60db116..355baac 100644
--- a/src/org/torproject/onionoo/Main.java
+++ b/src/org/torproject/onionoo/Main.java
@@ -30,7 +30,8 @@ public class Main {
Logger.printStatusTime("Initialized Geoip lookup service");
ReverseDomainNameResolver rdnr = new ReverseDomainNameResolver(t);
Logger.printStatusTime("Initialized reverse domain name resolver");
- NodeDataWriter ndw = new NodeDataWriter(dso, rdnr, ls, ds, t);
+ NodeDetailsStatusUpdater ndsu = new NodeDetailsStatusUpdater(dso,
+ rdnr, ls, ds, t);
Logger.printStatusTime("Initialized node data writer");
BandwidthStatusUpdater bsu = new BandwidthStatusUpdater(dso, ds, t);
Logger.printStatusTime("Initialized bandwidth status updater");
@@ -40,8 +41,11 @@ public class Main {
Logger.printStatusTime("Initialized clients status updater");
UptimeStatusUpdater usu = new UptimeStatusUpdater(dso, ds);
Logger.printStatusTime("Initialized uptime status updater");
- StatusUpdater[] sus = new StatusUpdater[] { ndw, bsu, wsu, csu, usu };
+ StatusUpdater[] sus = new StatusUpdater[] { ndsu, bsu, wsu, csu,
+ usu };
+ DetailsDocumentWriter ddw = new DetailsDocumentWriter(dso, ds, t);
+ Logger.printStatusTime("Initialized details document writer");
BandwidthDocumentWriter bdw = new BandwidthDocumentWriter(dso, ds, t);
Logger.printStatusTime("Initialized bandwidth document writer");
WeightsDocumentWriter wdw = new WeightsDocumentWriter(dso, ds, t);
@@ -50,7 +54,7 @@ public class Main {
Logger.printStatusTime("Initialized clients document writer");
UptimeDocumentWriter udw = new UptimeDocumentWriter(dso, ds, t);
Logger.printStatusTime("Initialized uptime document writer");
- DocumentWriter[] dws = new DocumentWriter[] { ndw, bdw, wdw, cdw,
+ DocumentWriter[] dws = new DocumentWriter[] { ddw, bdw, wdw, cdw,
udw };
Logger.printStatus("Reading descriptors.");
@@ -95,10 +99,7 @@ public class Main {
statsString);
}
}
- /* TODO Print status updater statistics for *all* status updaters once
- * all data writers have been separated. */
- for (DocumentWriter dw : new DocumentWriter[] { bdw, wdw, cdw,
- udw }) {
+ for (DocumentWriter dw : dws) {
String statsString = dw.getStatsString();
if (statsString != null) {
Logger.printStatistics(dw.getClass().getSimpleName(),
diff --git a/src/org/torproject/onionoo/NodeDataWriter.java b/src/org/torproject/onionoo/NodeDataWriter.java
deleted file mode 100644
index 5941b1c..0000000
--- a/src/org/torproject/onionoo/NodeDataWriter.java
+++ /dev/null
@@ -1,959 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.onionoo;
-
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Scanner;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.apache.commons.lang.StringEscapeUtils;
-import org.torproject.descriptor.BridgeNetworkStatus;
-import org.torproject.descriptor.BridgePoolAssignment;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.ExitList;
-import org.torproject.descriptor.ExitListEntry;
-import org.torproject.descriptor.NetworkStatusEntry;
-import org.torproject.descriptor.RelayNetworkStatusConsensus;
-import org.torproject.descriptor.ServerDescriptor;
-import org.torproject.onionoo.LookupService.LookupResult;
-
-/* Write updated summary and details data files to disk.
- *
- * The parts of details files coming from server descriptors always come
- * from the last known descriptor of a relay or bridge, not from the
- * descriptor that was last referenced in a network status. */
-public class NodeDataWriter implements DescriptorListener, StatusUpdater,
- FingerprintListener, DocumentWriter {
-
- private DescriptorSource descriptorSource;
-
- private ReverseDomainNameResolver reverseDomainNameResolver;
-
- private LookupService lookupService;
-
- private DocumentStore documentStore;
-
- private long now;
-
- private SortedMap<String, NodeStatus> knownNodes =
- new TreeMap<String, NodeStatus>();
-
- private SortedMap<String, NodeStatus> relays;
-
- private SortedMap<String, NodeStatus> bridges;
-
- private long relaysLastValidAfterMillis = -1L;
-
- private long bridgesLastPublishedMillis = -1L;
-
- private SortedMap<String, Integer> lastBandwidthWeights = null;
-
- private int relayConsensusesProcessed = 0, bridgeStatusesProcessed = 0;
-
- public NodeDataWriter(DescriptorSource descriptorSource,
- ReverseDomainNameResolver reverseDomainNameResolver,
- LookupService lookupService, DocumentStore documentStore,
- Time time) {
- this.descriptorSource = descriptorSource;
- this.reverseDomainNameResolver = reverseDomainNameResolver;
- this.lookupService = lookupService;
- this.documentStore = documentStore;
- this.now = time.currentTimeMillis();
- this.registerDescriptorListeners();
- this.registerFingerprintListeners();
- }
-
- private void registerDescriptorListeners() {
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.RELAY_CONSENSUSES);
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.RELAY_SERVER_DESCRIPTORS);
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.BRIDGE_STATUSES);
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.BRIDGE_POOL_ASSIGNMENTS);
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.EXIT_LISTS);
- }
-
- private void registerFingerprintListeners() {
- /* TODO Not used yet.
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.RELAY_CONSENSUSES);
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.RELAY_SERVER_DESCRIPTORS);
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.BRIDGE_STATUSES);
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.BRIDGE_POOL_ASSIGNMENTS);
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.EXIT_LISTS);*/
- }
-
- public void processDescriptor(Descriptor descriptor, boolean relay) {
- if (descriptor instanceof RelayNetworkStatusConsensus) {
- this.updateRelayNetworkStatusConsensus(
- (RelayNetworkStatusConsensus) descriptor);
- } else if (descriptor instanceof ServerDescriptor && relay) {
- this.processRelayServerDescriptor((ServerDescriptor) descriptor);
- } else if (descriptor instanceof BridgeNetworkStatus) {
- this.updateBridgeNetworkStatus((BridgeNetworkStatus) descriptor);
- } else if (descriptor instanceof ServerDescriptor && !relay) {
- this.processBridgeServerDescriptor((ServerDescriptor) descriptor);
- } else if (descriptor instanceof BridgePoolAssignment) {
- this.processBridgePoolAssignment((BridgePoolAssignment) descriptor);
- } else if (descriptor instanceof ExitList) {
- this.processExitList((ExitList) descriptor);
- }
- }
-
- public void processFingerprints(SortedSet<String> fingerprints,
- boolean relay) {
- /* TODO Not used yet. */
- }
-
- public void updateStatuses() {
- this.readStatusSummary();
- Logger.printStatusTime("Read status summary");
- this.setCurrentNodes();
- Logger.printStatusTime("Set current node fingerprints");
- this.startReverseDomainNameLookups();
- Logger.printStatusTime("Started reverse domain name lookups");
- this.lookUpCitiesAndASes();
- Logger.printStatusTime("Looked up cities and ASes");
- this.setRunningBits();
- Logger.printStatusTime("Set running bits");
- this.calculatePathSelectionProbabilities();
- Logger.printStatusTime("Calculated path selection probabilities");
- this.finishReverseDomainNameLookups();
- Logger.printStatusTime("Finished reverse domain name lookups");
- this.writeStatusSummary();
- Logger.printStatusTime("Wrote status summary");
- this.writeOutSummary();
- Logger.printStatusTime("Wrote out summary");
- }
-
- public void writeDocuments() {
- this.writeOutDetails();
- Logger.printStatusTime("Wrote detail data files");
- }
-
- private void updateRelayNetworkStatusConsensus(
- RelayNetworkStatusConsensus consensus) {
- long validAfterMillis = consensus.getValidAfterMillis();
- if (validAfterMillis > this.relaysLastValidAfterMillis) {
- this.relaysLastValidAfterMillis = validAfterMillis;
- }
- Set<String> recommendedVersions = null;
- if (consensus.getRecommendedServerVersions() != null) {
- recommendedVersions = new HashSet<String>();
- for (String recommendedVersion :
- consensus.getRecommendedServerVersions()) {
- recommendedVersions.add("Tor " + recommendedVersion);
- }
- }
- for (NetworkStatusEntry entry :
- consensus.getStatusEntries().values()) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String address = entry.getAddress();
- SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
- entry.getOrAddresses());
- int orPort = entry.getOrPort();
- int dirPort = entry.getDirPort();
- SortedSet<String> relayFlags = entry.getFlags();
- long consensusWeight = entry.getBandwidth();
- String defaultPolicy = entry.getDefaultPolicy();
- String portList = entry.getPortList();
- Boolean recommendedVersion = (recommendedVersions == null ||
- entry.getVersion() == null) ? null :
- recommendedVersions.contains(entry.getVersion());
- NodeStatus newNodeStatus = new NodeStatus(true, nickname,
- fingerprint, address, orAddressesAndPorts, null,
- validAfterMillis, orPort, dirPort, relayFlags, consensusWeight,
- null, null, -1L, defaultPolicy, portList, validAfterMillis,
- validAfterMillis, null, null, recommendedVersion);
- if (this.knownNodes.containsKey(fingerprint)) {
- this.knownNodes.get(fingerprint).update(newNodeStatus);
- } else {
- this.knownNodes.put(fingerprint, newNodeStatus);
- }
- }
- this.relayConsensusesProcessed++;
- if (this.relaysLastValidAfterMillis == validAfterMillis) {
- this.lastBandwidthWeights = consensus.getBandwidthWeights();
- }
- }
-
- private void updateBridgeNetworkStatus(BridgeNetworkStatus status) {
- long publishedMillis = status.getPublishedMillis();
- if (publishedMillis > this.bridgesLastPublishedMillis) {
- this.bridgesLastPublishedMillis = publishedMillis;
- }
- for (NetworkStatusEntry entry : status.getStatusEntries().values()) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String address = entry.getAddress();
- SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
- entry.getOrAddresses());
- int orPort = entry.getOrPort();
- int dirPort = entry.getDirPort();
- SortedSet<String> relayFlags = entry.getFlags();
- NodeStatus newNodeStatus = new NodeStatus(false, nickname,
- fingerprint, address, orAddressesAndPorts, null,
- publishedMillis, orPort, dirPort, relayFlags, -1L, "??", null,
- -1L, null, null, publishedMillis, -1L, null, null, null);
- if (this.knownNodes.containsKey(fingerprint)) {
- this.knownNodes.get(fingerprint).update(newNodeStatus);
- } else {
- this.knownNodes.put(fingerprint, newNodeStatus);
- }
- }
- this.bridgeStatusesProcessed++;
- }
-
- private void readStatusSummary() {
- SortedSet<String> fingerprints = this.documentStore.list(
- NodeStatus.class, true);
- for (String fingerprint : fingerprints) {
- NodeStatus node = this.documentStore.retrieve(NodeStatus.class,
- true, fingerprint);
- if (node.isRelay()) {
- this.relaysLastValidAfterMillis = Math.max(
- this.relaysLastValidAfterMillis, node.getLastSeenMillis());
- } else {
- this.bridgesLastPublishedMillis = Math.max(
- this.bridgesLastPublishedMillis, node.getLastSeenMillis());
- }
- if (this.knownNodes.containsKey(fingerprint)) {
- this.knownNodes.get(fingerprint).update(node);
- } else {
- this.knownNodes.put(fingerprint, node);
- }
- }
- }
-
- private void setRunningBits() {
- for (NodeStatus node : this.knownNodes.values()) {
- if (node.isRelay() && node.getRelayFlags().contains("Running") &&
- node.getLastSeenMillis() == this.relaysLastValidAfterMillis) {
- node.setRunning(true);
- }
- if (!node.isRelay() && node.getRelayFlags().contains("Running") &&
- node.getLastSeenMillis() == this.bridgesLastPublishedMillis) {
- node.setRunning(true);
- }
- }
- }
-
- private void lookUpCitiesAndASes() {
- SortedSet<String> addressStrings = new TreeSet<String>();
- for (NodeStatus node : this.knownNodes.values()) {
- if (node.isRelay()) {
- addressStrings.add(node.getAddress());
- }
- }
- if (addressStrings.isEmpty()) {
- System.err.println("No relay IP addresses to resolve to cities or "
- + "ASN.");
- return;
- }
- SortedMap<String, LookupResult> lookupResults =
- this.lookupService.lookup(addressStrings);
- for (NodeStatus node : knownNodes.values()) {
- if (!node.isRelay()) {
- continue;
- }
- String addressString = node.getAddress();
- if (lookupResults.containsKey(addressString)) {
- LookupResult lookupResult = lookupResults.get(addressString);
- node.setCountryCode(lookupResult.countryCode);
- node.setCountryName(lookupResult.countryName);
- node.setRegionName(lookupResult.regionName);
- node.setCityName(lookupResult.cityName);
- node.setLatitude(lookupResult.latitude);
- node.setLongitude(lookupResult.longitude);
- node.setASNumber(lookupResult.aSNumber);
- node.setASName(lookupResult.aSName);
- }
- }
- }
-
- private void writeStatusSummary() {
- this.writeSummary(true);
- }
-
- private void writeOutSummary() {
- this.writeSummary(false);
- }
-
- private void writeSummary(boolean includeArchive) {
- SortedMap<String, NodeStatus> nodes = includeArchive
- ? this.knownNodes : this.getCurrentNodes();
- for (Map.Entry<String, NodeStatus> e : nodes.entrySet()) {
- this.documentStore.store(e.getValue(), e.getKey());
- }
- }
-
- private SortedMap<String, NodeStatus> getCurrentNodes() {
- long cutoff = Math.max(this.relaysLastValidAfterMillis,
- this.bridgesLastPublishedMillis) - 7L * 24L * 60L * 60L * 1000L;
- SortedMap<String, NodeStatus> currentNodes =
- new TreeMap<String, NodeStatus>();
- for (Map.Entry<String, NodeStatus> e : this.knownNodes.entrySet()) {
- if (e.getValue().getLastSeenMillis() >= cutoff) {
- currentNodes.put(e.getKey(), e.getValue());
- }
- }
- return currentNodes;
- }
-
- private void processRelayServerDescriptor(
- ServerDescriptor descriptor) {
- String fingerprint = descriptor.getFingerprint();
- DetailsStatus detailsStatus = this.documentStore.retrieve(
- DetailsStatus.class, false, fingerprint);
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setLenient(false);
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String publishedDateTime =
- dateTimeFormat.format(descriptor.getPublishedMillis());
- if (detailsStatus != null) {
- String detailsString = detailsStatus.documentString;
- String descPublishedLine = "\"desc_published\":\""
- + publishedDateTime + "\",";
- Scanner s = new Scanner(detailsString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- if (line.startsWith("\"desc_published\":\"")) {
- if (descPublishedLine.compareTo(line) < 0) {
- return;
- } else {
- break;
- }
- }
- }
- s.close();
- }
- StringBuilder sb = new StringBuilder();
- String lastRestartedString = dateTimeFormat.format(
- descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L);
- int bandwidthRate = descriptor.getBandwidthRate();
- int bandwidthBurst = descriptor.getBandwidthBurst();
- int observedBandwidth = descriptor.getBandwidthObserved();
- int advertisedBandwidth = Math.min(bandwidthRate,
- Math.min(bandwidthBurst, observedBandwidth));
- sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
- + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
- + "\"bandwidth_rate\":" + bandwidthRate + ",\n"
- + "\"bandwidth_burst\":" + bandwidthBurst + ",\n"
- + "\"observed_bandwidth\":" + observedBandwidth + ",\n"
- + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
- + "\"exit_policy\":[");
- int written = 0;
- for (String exitPolicyLine : descriptor.getExitPolicyLines()) {
- sb.append((written++ > 0 ? "," : "") + "\n \"" + exitPolicyLine
- + "\"");
- }
- sb.append("\n]");
- if (descriptor.getContact() != null) {
- sb.append(",\n\"contact\":\""
- + escapeJSON(descriptor.getContact()) + "\"");
- }
- if (descriptor.getPlatform() != null) {
- sb.append(",\n\"platform\":\""
- + escapeJSON(descriptor.getPlatform()) + "\"");
- }
- if (descriptor.getFamilyEntries() != null) {
- sb.append(",\n\"family\":[");
- written = 0;
- for (String familyEntry : descriptor.getFamilyEntries()) {
- sb.append((written++ > 0 ? "," : "") + "\n \"" + familyEntry
- + "\"");
- }
- sb.append("\n]");
- }
- if (descriptor.getIpv6DefaultPolicy() != null &&
- (descriptor.getIpv6DefaultPolicy().equals("accept") ||
- descriptor.getIpv6DefaultPolicy().equals("reject")) &&
- descriptor.getIpv6PortList() != null) {
- sb.append(",\n\"exit_policy_v6_summary\":{\""
- + descriptor.getIpv6DefaultPolicy() + "\":[");
- int portsWritten = 0;
- for (String portOrPortRange :
- descriptor.getIpv6PortList().split(",")) {
- sb.append((portsWritten++ > 0 ? "," : "") + "\"" + portOrPortRange
- + "\"");
- }
- sb.append("]}");
- }
- if (descriptor.isHibernating()) {
- sb.append(",\n\"hibernating\":true");
- }
- detailsStatus = new DetailsStatus();
- detailsStatus.documentString = sb.toString();
- this.documentStore.store(detailsStatus, fingerprint);
- }
-
- private void processBridgeServerDescriptor(
- ServerDescriptor descriptor) {
- String fingerprint = descriptor.getFingerprint();
- DetailsStatus detailsStatus = this.documentStore.retrieve(
- DetailsStatus.class, false, fingerprint);
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setLenient(false);
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String publishedDateTime =
- dateTimeFormat.format(descriptor.getPublishedMillis());
- String poolAssignmentLine = null;
- if (detailsStatus != null) {
- String detailsString = detailsStatus.documentString;
- String descPublishedLine = "\"desc_published\":\""
- + publishedDateTime + "\",";
- Scanner s = new Scanner(detailsString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- if (line.startsWith("\"pool_assignment\":")) {
- poolAssignmentLine = line;
- } else if (line.startsWith("\"desc_published\":") &&
- descPublishedLine.compareTo(line) < 0) {
- return;
- }
- }
- s.close();
- }
- StringBuilder sb = new StringBuilder();
- String lastRestartedString = dateTimeFormat.format(
- descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L);
- int advertisedBandwidth = Math.min(descriptor.getBandwidthRate(),
- Math.min(descriptor.getBandwidthBurst(),
- descriptor.getBandwidthObserved()));
- sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
- + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
- + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
- + "\"platform\":\"" + escapeJSON(descriptor.getPlatform())
- + "\"");
- if (poolAssignmentLine != null) {
- sb.append(",\n" + poolAssignmentLine);
- }
- detailsStatus = new DetailsStatus();
- detailsStatus.documentString = sb.toString();
- this.documentStore.store(detailsStatus, fingerprint);
- }
-
- private void processBridgePoolAssignment(
- BridgePoolAssignment bridgePoolAssignment) {
- for (Map.Entry<String, String> e :
- bridgePoolAssignment.getEntries().entrySet()) {
- String fingerprint = e.getKey();
- String details = e.getValue();
- StringBuilder sb = new StringBuilder();
- DetailsStatus detailsStatus = this.documentStore.retrieve(
- DetailsStatus.class, false, fingerprint);
- if (detailsStatus != null) {
- String detailsString = detailsStatus.documentString;
- Scanner s = new Scanner(detailsString);
- int linesWritten = 0;
- boolean endsWithComma = false;
- while (s.hasNextLine()) {
- String line = s.nextLine();
- if (!line.startsWith("\"pool_assignment\":")) {
- sb.append((linesWritten++ > 0 ? "\n" : "") + line);
- endsWithComma = line.endsWith(",");
- }
- }
- s.close();
- if (sb.length() > 0) {
- sb.append((endsWithComma ? "" : ",") + "\n");
- }
- }
- sb.append("\"pool_assignment\":\"" + details + "\"");
- detailsStatus = new DetailsStatus();
- detailsStatus.documentString = sb.toString();
- this.documentStore.store(detailsStatus, fingerprint);
- }
- }
-
- private Map<String, Set<ExitListEntry>> exitListEntries =
- new HashMap<String, Set<ExitListEntry>>();
-
- private void processExitList(ExitList exitList) {
- for (ExitListEntry exitListEntry : exitList.getExitListEntries()) {
- if (exitListEntry.getScanMillis() <
- this.now - 24L * 60L * 60L * 1000L) {
- continue;
- }
- String fingerprint = exitListEntry.getFingerprint();
- if (!this.exitListEntries.containsKey(fingerprint)) {
- this.exitListEntries.put(fingerprint,
- new HashSet<ExitListEntry>());
- }
- this.exitListEntries.get(fingerprint).add(exitListEntry);
- }
- }
-
- private void setCurrentNodes() {
- SortedMap<String, NodeStatus> currentNodes = this.getCurrentNodes();
- this.relays = new TreeMap<String, NodeStatus>();
- this.bridges = new TreeMap<String, NodeStatus>();
- for (Map.Entry<String, NodeStatus> e : currentNodes.entrySet()) {
- if (e.getValue().isRelay()) {
- this.relays.put(e.getKey(), e.getValue());
- } else {
- this.bridges.put(e.getKey(), e.getValue());
- }
- }
- }
-
- private void startReverseDomainNameLookups() {
- Map<String, Long> addressLastLookupTimes =
- new HashMap<String, Long>();
- for (NodeStatus relay : relays.values()) {
- addressLastLookupTimes.put(relay.getAddress(),
- relay.getLastRdnsLookup());
- }
- this.reverseDomainNameResolver.setAddresses(addressLastLookupTimes);
- this.reverseDomainNameResolver.startReverseDomainNameLookups();
- }
-
- private void finishReverseDomainNameLookups() {
- this.reverseDomainNameResolver.finishReverseDomainNameLookups();
- Map<String, String> lookupResults =
- this.reverseDomainNameResolver.getLookupResults();
- long startedRdnsLookups =
- this.reverseDomainNameResolver.getLookupStartMillis();
- for (NodeStatus relay : relays.values()) {
- if (lookupResults.containsKey(relay.getAddress())) {
- relay.setHostName(lookupResults.get(relay.getAddress()));
- relay.setLastRdnsLookup(startedRdnsLookups);
- }
- }
- }
-
- private void calculatePathSelectionProbabilities() {
- boolean consensusContainsBandwidthWeights = false;
- double wgg = 0.0, wgd = 0.0, wmg = 0.0, wmm = 0.0, wme = 0.0,
- wmd = 0.0, wee = 0.0, wed = 0.0;
- if (this.lastBandwidthWeights != null) {
- SortedSet<String> weightKeys = new TreeSet<String>(Arrays.asList(
- "Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(",")));
- weightKeys.removeAll(this.lastBandwidthWeights.keySet());
- if (weightKeys.isEmpty()) {
- consensusContainsBandwidthWeights = true;
- wgg = ((double) this.lastBandwidthWeights.get("Wgg")) / 10000.0;
- wgd = ((double) this.lastBandwidthWeights.get("Wgd")) / 10000.0;
- wmg = ((double) this.lastBandwidthWeights.get("Wmg")) / 10000.0;
- wmm = ((double) this.lastBandwidthWeights.get("Wmm")) / 10000.0;
- wme = ((double) this.lastBandwidthWeights.get("Wme")) / 10000.0;
- wmd = ((double) this.lastBandwidthWeights.get("Wmd")) / 10000.0;
- wee = ((double) this.lastBandwidthWeights.get("Wee")) / 10000.0;
- wed = ((double) this.lastBandwidthWeights.get("Wed")) / 10000.0;
- }
- } else {
- System.err.println("Could not determine most recent Wxx parameter "
- + "values, probably because we didn't parse a consensus in "
- + "this execution. All relays' guard/middle/exit weights are "
- + "going to be 0.0.");
- }
- SortedMap<String, Double>
- advertisedBandwidths = new TreeMap<String, Double>(),
- consensusWeights = new TreeMap<String, Double>(),
- guardWeights = new TreeMap<String, Double>(),
- middleWeights = new TreeMap<String, Double>(),
- exitWeights = new TreeMap<String, Double>();
- double totalAdvertisedBandwidth = 0.0;
- double totalConsensusWeight = 0.0;
- double totalGuardWeight = 0.0;
- double totalMiddleWeight = 0.0;
- double totalExitWeight = 0.0;
- for (Map.Entry<String, NodeStatus> e : this.relays.entrySet()) {
- String fingerprint = e.getKey();
- NodeStatus relay = e.getValue();
- if (!relay.getRunning()) {
- continue;
- }
- boolean isExit = relay.getRelayFlags().contains("Exit") &&
- !relay.getRelayFlags().contains("BadExit");
- boolean isGuard = relay.getRelayFlags().contains("Guard");
- DetailsStatus detailsStatus = this.documentStore.retrieve(
- DetailsStatus.class, false, fingerprint);
- if (detailsStatus != null) {
- double advertisedBandwidth = -1.0;
- String detailsString = detailsStatus.documentString;
- Scanner s = new Scanner(detailsString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- if (!line.startsWith("\"advertised_bandwidth\":")) {
- continue;
- }
- try {
- advertisedBandwidth = (double) Integer.parseInt(
- line.split(":")[1].replaceAll(",", ""));
- } catch (NumberFormatException ex) {
- /* Handle below. */
- }
- break;
- }
- s.close();
- if (advertisedBandwidth >= 0.0) {
- advertisedBandwidths.put(fingerprint, advertisedBandwidth);
- totalAdvertisedBandwidth += advertisedBandwidth;
- }
- }
- double consensusWeight = (double) relay.getConsensusWeight();
- consensusWeights.put(fingerprint, consensusWeight);
- totalConsensusWeight += consensusWeight;
- if (consensusContainsBandwidthWeights) {
- double guardWeight = consensusWeight,
- middleWeight = consensusWeight,
- exitWeight = consensusWeight;
- if (isGuard && isExit) {
- guardWeight *= wgd;
- middleWeight *= wmd;
- exitWeight *= wed;
- } else if (isGuard) {
- guardWeight *= wgg;
- middleWeight *= wmg;
- exitWeight = 0.0;
- } else if (isExit) {
- guardWeight = 0.0;
- middleWeight *= wme;
- exitWeight *= wee;
- } else {
- guardWeight = 0.0;
- middleWeight *= wmm;
- exitWeight = 0.0;
- }
- guardWeights.put(fingerprint, guardWeight);
- middleWeights.put(fingerprint, middleWeight);
- exitWeights.put(fingerprint, exitWeight);
- totalGuardWeight += guardWeight;
- totalMiddleWeight += middleWeight;
- totalExitWeight += exitWeight;
- }
- }
- for (Map.Entry<String, NodeStatus> e : this.relays.entrySet()) {
- String fingerprint = e.getKey();
- NodeStatus relay = e.getValue();
- if (advertisedBandwidths.containsKey(fingerprint)) {
- relay.setAdvertisedBandwidthFraction(advertisedBandwidths.get(
- fingerprint) / totalAdvertisedBandwidth);
- }
- if (consensusWeights.containsKey(fingerprint)) {
- relay.setConsensusWeightFraction(consensusWeights.get(fingerprint)
- / totalConsensusWeight);
- }
- if (guardWeights.containsKey(fingerprint)) {
- relay.setGuardProbability(guardWeights.get(fingerprint)
- / totalGuardWeight);
- }
- if (middleWeights.containsKey(fingerprint)) {
- relay.setMiddleProbability(middleWeights.get(fingerprint)
- / totalMiddleWeight);
- }
- if (exitWeights.containsKey(fingerprint)) {
- relay.setExitProbability(exitWeights.get(fingerprint)
- / totalExitWeight);
- }
- }
- }
-
- private void writeOutDetails() {
- this.updateRelayDetailsFiles();
- this.updateBridgeDetailsFiles();
- }
-
- private static String escapeJSON(String s) {
- return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'");
- }
-
- private static String unescapeJSON(String s) {
- return StringEscapeUtils.unescapeJavaScript(s.replaceAll("'", "\\'"));
- }
-
- private void updateRelayDetailsFiles() {
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- for (Map.Entry<String, NodeStatus> relay : this.relays.entrySet()) {
- String fingerprint = relay.getKey();
-
- /* Generate network-status-specific part. */
- NodeStatus entry = relay.getValue();
- String nickname = entry.getNickname();
- String address = entry.getAddress();
- List<String> orAddresses = new ArrayList<String>();
- orAddresses.add(address + ":" + entry.getOrPort());
- orAddresses.addAll(entry.getOrAddressesAndPorts());
- StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
- int addressesWritten = 0;
- for (String orAddress : orAddresses) {
- orAddressesAndPortsBuilder.append(
- (addressesWritten++ > 0 ? "," : "") + "\""
- + orAddress.toLowerCase() + "\"");
- }
- String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- String lastChangedOrAddress = dateTimeFormat.format(
- entry.getLastChangedOrAddress());
- String running = entry.getRunning() ? "true" : "false";
- int dirPort = entry.getDirPort();
- String countryCode = entry.getCountryCode();
- String latitude = entry.getLatitude();
- String longitude = entry.getLongitude();
- String countryName = entry.getCountryName();
- String regionName = entry.getRegionName();
- String cityName = entry.getCityName();
- String aSNumber = entry.getASNumber();
- String aSName = entry.getASName();
- long consensusWeight = entry.getConsensusWeight();
- String hostName = entry.getHostName();
- double advertisedBandwidthFraction =
- entry.getAdvertisedBandwidthFraction();
- double consensusWeightFraction = entry.getConsensusWeightFraction();
- double guardProbability = entry.getGuardProbability();
- double middleProbability = entry.getMiddleProbability();
- double exitProbability = entry.getExitProbability();
- String defaultPolicy = entry.getDefaultPolicy();
- String portList = entry.getPortList();
- Boolean recommendedVersion = entry.getRecommendedVersion();
- StringBuilder sb = new StringBuilder();
- sb.append("{\"version\":1,\n"
- + "\"nickname\":\"" + nickname + "\",\n"
- + "\"fingerprint\":\"" + fingerprint + "\",\n"
- + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
- + "]");
- if (dirPort != 0) {
- sb.append(",\n\"dir_address\":\"" + address + ":" + dirPort
- + "\"");
- }
- sb.append(",\n\"last_seen\":\"" + lastSeen + "\"");
- sb.append(",\n\"first_seen\":\"" + firstSeen + "\"");
- sb.append(",\n\"last_changed_address_or_port\":\""
- + lastChangedOrAddress + "\"");
- sb.append(",\n\"running\":" + running);
- SortedSet<String> relayFlags = entry.getRelayFlags();
- if (!relayFlags.isEmpty()) {
- sb.append(",\n\"flags\":[");
- int written = 0;
- for (String relayFlag : relayFlags) {
- sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
- }
- sb.append("]");
- }
- if (countryCode != null) {
- sb.append(",\n\"country\":\"" + countryCode + "\"");
- }
- if (latitude != null) {
- sb.append(",\n\"latitude\":" + latitude);
- }
- if (longitude != null) {
- sb.append(",\n\"longitude\":" + longitude);
- }
- if (countryName != null) {
- sb.append(",\n\"country_name\":\""
- + escapeJSON(countryName) + "\"");
- }
- if (regionName != null) {
- sb.append(",\n\"region_name\":\""
- + escapeJSON(regionName) + "\"");
- }
- if (cityName != null) {
- sb.append(",\n\"city_name\":\""
- + escapeJSON(cityName) + "\"");
- }
- if (aSNumber != null) {
- sb.append(",\n\"as_number\":\""
- + escapeJSON(aSNumber) + "\"");
- }
- if (aSName != null) {
- sb.append(",\n\"as_name\":\""
- + escapeJSON(aSName) + "\"");
- }
- if (consensusWeight >= 0L) {
- sb.append(",\n\"consensus_weight\":"
- + String.valueOf(consensusWeight));
- }
- if (hostName != null) {
- sb.append(",\n\"host_name\":\""
- + escapeJSON(hostName) + "\"");
- }
- if (advertisedBandwidthFraction >= 0.0) {
- sb.append(String.format(
- ",\n\"advertised_bandwidth_fraction\":%.9f",
- advertisedBandwidthFraction));
- }
- if (consensusWeightFraction >= 0.0) {
- sb.append(String.format(",\n\"consensus_weight_fraction\":%.9f",
- consensusWeightFraction));
- }
- if (guardProbability >= 0.0) {
- sb.append(String.format(",\n\"guard_probability\":%.9f",
- guardProbability));
- }
- if (middleProbability >= 0.0) {
- sb.append(String.format(",\n\"middle_probability\":%.9f",
- middleProbability));
- }
- if (exitProbability >= 0.0) {
- sb.append(String.format(",\n\"exit_probability\":%.9f",
- exitProbability));
- }
- if (defaultPolicy != null && (defaultPolicy.equals("accept") ||
- defaultPolicy.equals("reject")) && portList != null) {
- sb.append(",\n\"exit_policy_summary\":{\"" + defaultPolicy
- + "\":[");
- int portsWritten = 0;
- for (String portOrPortRange : portList.split(",")) {
- sb.append((portsWritten++ > 0 ? "," : "")
- + "\"" + portOrPortRange + "\"");
- }
- sb.append("]}");
- }
- if (recommendedVersion != null) {
- sb.append(",\n\"recommended_version\":" + (recommendedVersion ?
- "true" : "false"));
- }
-
- /* Add exit addresses if at least one of them is distinct from the
- * onion-routing addresses. */
- if (exitListEntries.containsKey(fingerprint)) {
- for (ExitListEntry exitListEntry :
- exitListEntries.get(fingerprint)) {
- entry.addExitAddress(exitListEntry.getExitAddress());
- }
- }
- if (!entry.getExitAddresses().isEmpty()) {
- sb.append(",\n\"exit_addresses\":[");
- int written = 0;
- for (String exitAddress : entry.getExitAddresses()) {
- sb.append((written++ > 0 ? "," : "") + "\""
- + exitAddress.toLowerCase() + "\"");
- }
- sb.append("]");
- }
-
- /* Append descriptor-specific part from details status file, and
- * update contact in node status. */
- DetailsStatus detailsStatus = this.documentStore.retrieve(
- DetailsStatus.class, false, fingerprint);
- if (detailsStatus != null &&
- detailsStatus.documentString.length() > 0) {
- sb.append(",\n" + detailsStatus.documentString);
- String contact = null;
- Scanner s = new Scanner(detailsStatus.documentString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- if (!line.startsWith("\"contact\":")) {
- continue;
- }
- int start = "\"contact\":\"".length(), end = line.length() - 1;
- if (line.endsWith(",")) {
- end--;
- }
- contact = unescapeJSON(line.substring(start, end));
- break;
- }
- s.close();
- entry.setContact(contact);
- }
-
- /* Finish details string. */
- sb.append("\n}\n");
-
- /* Write details file to disk. */
- DetailsDocument detailsDocument = new DetailsDocument();
- detailsDocument.documentString = sb.toString();
- this.documentStore.store(detailsDocument, fingerprint);
- }
- }
-
- private void updateBridgeDetailsFiles() {
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- for (Map.Entry<String, NodeStatus> bridge : this.bridges.entrySet()) {
- String fingerprint = bridge.getKey();
-
- /* Generate network-status-specific part. */
- NodeStatus entry = bridge.getValue();
- String nickname = entry.getNickname();
- String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- String running = entry.getRunning() ? "true" : "false";
- String address = entry.getAddress();
- List<String> orAddresses = new ArrayList<String>();
- orAddresses.add(address + ":" + entry.getOrPort());
- orAddresses.addAll(entry.getOrAddressesAndPorts());
- StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
- int addressesWritten = 0;
- for (String orAddress : orAddresses) {
- orAddressesAndPortsBuilder.append(
- (addressesWritten++ > 0 ? "," : "") + "\""
- + orAddress.toLowerCase() + "\"");
- }
- StringBuilder sb = new StringBuilder();
- sb.append("{\"version\":1,\n"
- + "\"nickname\":\"" + nickname + "\",\n"
- + "\"hashed_fingerprint\":\"" + fingerprint + "\",\n"
- + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
- + "],\n\"last_seen\":\"" + lastSeen + "\",\n\"first_seen\":\""
- + firstSeen + "\",\n\"running\":" + running);
-
- SortedSet<String> relayFlags = entry.getRelayFlags();
- if (!relayFlags.isEmpty()) {
- sb.append(",\n\"flags\":[");
- int written = 0;
- for (String relayFlag : relayFlags) {
- sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
- }
- sb.append("]");
- }
-
- /* Append descriptor-specific part from details status file. */
- DetailsStatus detailsStatus = this.documentStore.retrieve(
- DetailsStatus.class, false, fingerprint);
- if (detailsStatus != null &&
- detailsStatus.documentString.length() > 0) {
- sb.append(",\n" + detailsStatus.documentString);
- }
-
- /* Finish details string. */
- sb.append("\n}\n");
-
- /* Write details file to disk. */
- DetailsDocument detailsDocument = new DetailsDocument();
- detailsDocument.documentString = sb.toString();
- this.documentStore.store(detailsDocument, fingerprint);
- }
- }
-
- public String getStatsString() {
- StringBuilder sb = new StringBuilder();
- sb.append(" " + Logger.formatDecimalNumber(
- relayConsensusesProcessed) + " relay consensuses processed\n");
- sb.append(" " + Logger.formatDecimalNumber(bridgeStatusesProcessed)
- + " bridge statuses processed\n");
- return sb.toString();
- }
-}
-
diff --git a/src/org/torproject/onionoo/NodeDetailsStatusUpdater.java b/src/org/torproject/onionoo/NodeDetailsStatusUpdater.java
new file mode 100644
index 0000000..3c67aea
--- /dev/null
+++ b/src/org/torproject/onionoo/NodeDetailsStatusUpdater.java
@@ -0,0 +1,634 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.apache.commons.lang.StringEscapeUtils;
+import org.torproject.descriptor.BridgeNetworkStatus;
+import org.torproject.descriptor.BridgePoolAssignment;
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.NetworkStatusEntry;
+import org.torproject.descriptor.RelayNetworkStatusConsensus;
+import org.torproject.descriptor.ServerDescriptor;
+import org.torproject.onionoo.LookupService.LookupResult;
+
+public class NodeDetailsStatusUpdater implements DescriptorListener,
+ StatusUpdater {
+
+ private DescriptorSource descriptorSource;
+
+ private ReverseDomainNameResolver reverseDomainNameResolver;
+
+ private LookupService lookupService;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ private SortedMap<String, NodeStatus> knownNodes =
+ new TreeMap<String, NodeStatus>();
+
+ private SortedMap<String, NodeStatus> relays;
+
+ private SortedMap<String, NodeStatus> bridges;
+
+ private long relaysLastValidAfterMillis = -1L;
+
+ private long bridgesLastPublishedMillis = -1L;
+
+ private SortedMap<String, Integer> lastBandwidthWeights = null;
+
+ private int relayConsensusesProcessed = 0, bridgeStatusesProcessed = 0;
+
+ public NodeDetailsStatusUpdater(DescriptorSource descriptorSource,
+ ReverseDomainNameResolver reverseDomainNameResolver,
+ LookupService lookupService, DocumentStore documentStore,
+ Time time) {
+ this.descriptorSource = descriptorSource;
+ this.reverseDomainNameResolver = reverseDomainNameResolver;
+ this.lookupService = lookupService;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.registerDescriptorListeners();
+ }
+
+ private void registerDescriptorListeners() {
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.RELAY_CONSENSUSES);
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.BRIDGE_STATUSES);
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.BRIDGE_POOL_ASSIGNMENTS);
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.EXIT_LISTS);
+ }
+
+ public void processDescriptor(Descriptor descriptor, boolean relay) {
+ if (descriptor instanceof RelayNetworkStatusConsensus) {
+ this.processRelayNetworkStatusConsensus(
+ (RelayNetworkStatusConsensus) descriptor);
+ } else if (descriptor instanceof ServerDescriptor && relay) {
+ this.processRelayServerDescriptor((ServerDescriptor) descriptor);
+ } else if (descriptor instanceof BridgeNetworkStatus) {
+ this.processBridgeNetworkStatus((BridgeNetworkStatus) descriptor);
+ } else if (descriptor instanceof ServerDescriptor && !relay) {
+ this.processBridgeServerDescriptor((ServerDescriptor) descriptor);
+ } else if (descriptor instanceof BridgePoolAssignment) {
+ this.processBridgePoolAssignment((BridgePoolAssignment) descriptor);
+ }
+ }
+
+ public void updateStatuses() {
+ this.readStatusSummary();
+ Logger.printStatusTime("Read status summary");
+ this.setCurrentNodes();
+ Logger.printStatusTime("Set current node fingerprints");
+ this.startReverseDomainNameLookups();
+ Logger.printStatusTime("Started reverse domain name lookups");
+ this.lookUpCitiesAndASes();
+ Logger.printStatusTime("Looked up cities and ASes");
+ this.setRunningBits();
+ Logger.printStatusTime("Set running bits");
+ this.calculatePathSelectionProbabilities();
+ Logger.printStatusTime("Calculated path selection probabilities");
+ this.finishReverseDomainNameLookups();
+ Logger.printStatusTime("Finished reverse domain name lookups");
+ this.writeStatusSummary();
+ Logger.printStatusTime("Wrote status summary");
+ /* TODO Does anything break if we take the following out?
+ * Like, does DocumentStore make sure there's a status/summary with
+ * all node statuses and an out/summary with only recent ones?
+ this.writeOutSummary();
+ Logger.printStatusTime("Wrote out summary");*/
+ }
+
+ private void processRelayNetworkStatusConsensus(
+ RelayNetworkStatusConsensus consensus) {
+ long validAfterMillis = consensus.getValidAfterMillis();
+ if (validAfterMillis > this.relaysLastValidAfterMillis) {
+ this.relaysLastValidAfterMillis = validAfterMillis;
+ }
+ Set<String> recommendedVersions = null;
+ if (consensus.getRecommendedServerVersions() != null) {
+ recommendedVersions = new HashSet<String>();
+ for (String recommendedVersion :
+ consensus.getRecommendedServerVersions()) {
+ recommendedVersions.add("Tor " + recommendedVersion);
+ }
+ }
+ for (NetworkStatusEntry entry :
+ consensus.getStatusEntries().values()) {
+ String nickname = entry.getNickname();
+ String fingerprint = entry.getFingerprint();
+ String address = entry.getAddress();
+ SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
+ entry.getOrAddresses());
+ int orPort = entry.getOrPort();
+ int dirPort = entry.getDirPort();
+ SortedSet<String> relayFlags = entry.getFlags();
+ long consensusWeight = entry.getBandwidth();
+ String defaultPolicy = entry.getDefaultPolicy();
+ String portList = entry.getPortList();
+ Boolean recommendedVersion = (recommendedVersions == null ||
+ entry.getVersion() == null) ? null :
+ recommendedVersions.contains(entry.getVersion());
+ NodeStatus newNodeStatus = new NodeStatus(true, nickname,
+ fingerprint, address, orAddressesAndPorts, null,
+ validAfterMillis, orPort, dirPort, relayFlags, consensusWeight,
+ null, null, -1L, defaultPolicy, portList, validAfterMillis,
+ validAfterMillis, null, null, recommendedVersion);
+ if (this.knownNodes.containsKey(fingerprint)) {
+ this.knownNodes.get(fingerprint).update(newNodeStatus);
+ } else {
+ this.knownNodes.put(fingerprint, newNodeStatus);
+ }
+ }
+ this.relayConsensusesProcessed++;
+ if (this.relaysLastValidAfterMillis == validAfterMillis) {
+ this.lastBandwidthWeights = consensus.getBandwidthWeights();
+ }
+ }
+
+ private void processBridgeNetworkStatus(BridgeNetworkStatus status) {
+ long publishedMillis = status.getPublishedMillis();
+ if (publishedMillis > this.bridgesLastPublishedMillis) {
+ this.bridgesLastPublishedMillis = publishedMillis;
+ }
+ for (NetworkStatusEntry entry : status.getStatusEntries().values()) {
+ String nickname = entry.getNickname();
+ String fingerprint = entry.getFingerprint();
+ String address = entry.getAddress();
+ SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
+ entry.getOrAddresses());
+ int orPort = entry.getOrPort();
+ int dirPort = entry.getDirPort();
+ SortedSet<String> relayFlags = entry.getFlags();
+ NodeStatus newNodeStatus = new NodeStatus(false, nickname,
+ fingerprint, address, orAddressesAndPorts, null,
+ publishedMillis, orPort, dirPort, relayFlags, -1L, "??", null,
+ -1L, null, null, publishedMillis, -1L, null, null, null);
+ if (this.knownNodes.containsKey(fingerprint)) {
+ this.knownNodes.get(fingerprint).update(newNodeStatus);
+ } else {
+ this.knownNodes.put(fingerprint, newNodeStatus);
+ }
+ }
+ this.bridgeStatusesProcessed++;
+ }
+
+ private void readStatusSummary() {
+ SortedSet<String> fingerprints = this.documentStore.list(
+ NodeStatus.class, true);
+ for (String fingerprint : fingerprints) {
+ NodeStatus node = this.documentStore.retrieve(NodeStatus.class,
+ true, fingerprint);
+ if (node.isRelay()) {
+ this.relaysLastValidAfterMillis = Math.max(
+ this.relaysLastValidAfterMillis, node.getLastSeenMillis());
+ } else {
+ this.bridgesLastPublishedMillis = Math.max(
+ this.bridgesLastPublishedMillis, node.getLastSeenMillis());
+ }
+ if (this.knownNodes.containsKey(fingerprint)) {
+ this.knownNodes.get(fingerprint).update(node);
+ } else {
+ this.knownNodes.put(fingerprint, node);
+ }
+ }
+ }
+
+ private void setRunningBits() {
+ for (NodeStatus node : this.knownNodes.values()) {
+ if (node.isRelay() && node.getRelayFlags().contains("Running") &&
+ node.getLastSeenMillis() == this.relaysLastValidAfterMillis) {
+ node.setRunning(true);
+ }
+ if (!node.isRelay() && node.getRelayFlags().contains("Running") &&
+ node.getLastSeenMillis() == this.bridgesLastPublishedMillis) {
+ node.setRunning(true);
+ }
+ }
+ }
+
+ private void lookUpCitiesAndASes() {
+ SortedSet<String> addressStrings = new TreeSet<String>();
+ for (NodeStatus node : this.knownNodes.values()) {
+ if (node.isRelay()) {
+ addressStrings.add(node.getAddress());
+ }
+ }
+ if (addressStrings.isEmpty()) {
+ System.err.println("No relay IP addresses to resolve to cities or "
+ + "ASN.");
+ return;
+ }
+ SortedMap<String, LookupResult> lookupResults =
+ this.lookupService.lookup(addressStrings);
+ for (NodeStatus node : knownNodes.values()) {
+ if (!node.isRelay()) {
+ continue;
+ }
+ String addressString = node.getAddress();
+ if (lookupResults.containsKey(addressString)) {
+ LookupResult lookupResult = lookupResults.get(addressString);
+ node.setCountryCode(lookupResult.countryCode);
+ node.setCountryName(lookupResult.countryName);
+ node.setRegionName(lookupResult.regionName);
+ node.setCityName(lookupResult.cityName);
+ node.setLatitude(lookupResult.latitude);
+ node.setLongitude(lookupResult.longitude);
+ node.setASNumber(lookupResult.aSNumber);
+ node.setASName(lookupResult.aSName);
+ }
+ }
+ }
+
+ private void writeStatusSummary() {
+ this.writeSummary(true);
+ }
+
+ private void writeSummary(boolean includeArchive) {
+ SortedMap<String, NodeStatus> nodes = includeArchive
+ ? this.knownNodes : this.getCurrentNodes();
+ for (Map.Entry<String, NodeStatus> e : nodes.entrySet()) {
+ this.documentStore.store(e.getValue(), e.getKey());
+ }
+ }
+
+ private SortedMap<String, NodeStatus> getCurrentNodes() {
+ long cutoff = Math.max(this.relaysLastValidAfterMillis,
+ this.bridgesLastPublishedMillis) - 7L * 24L * 60L * 60L * 1000L;
+ SortedMap<String, NodeStatus> currentNodes =
+ new TreeMap<String, NodeStatus>();
+ for (Map.Entry<String, NodeStatus> e : this.knownNodes.entrySet()) {
+ if (e.getValue().getLastSeenMillis() >= cutoff) {
+ currentNodes.put(e.getKey(), e.getValue());
+ }
+ }
+ return currentNodes;
+ }
+
+ private void processRelayServerDescriptor(
+ ServerDescriptor descriptor) {
+ String fingerprint = descriptor.getFingerprint();
+ DetailsStatus detailsStatus = this.documentStore.retrieve(
+ DetailsStatus.class, false, fingerprint);
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setLenient(false);
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String publishedDateTime =
+ dateTimeFormat.format(descriptor.getPublishedMillis());
+ if (detailsStatus != null) {
+ String detailsString = detailsStatus.documentString;
+ String descPublishedLine = "\"desc_published\":\""
+ + publishedDateTime + "\",";
+ Scanner s = new Scanner(detailsString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (line.startsWith("\"desc_published\":\"")) {
+ if (descPublishedLine.compareTo(line) < 0) {
+ return;
+ } else {
+ break;
+ }
+ }
+ }
+ s.close();
+ }
+ StringBuilder sb = new StringBuilder();
+ String lastRestartedString = dateTimeFormat.format(
+ descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L);
+ int bandwidthRate = descriptor.getBandwidthRate();
+ int bandwidthBurst = descriptor.getBandwidthBurst();
+ int observedBandwidth = descriptor.getBandwidthObserved();
+ int advertisedBandwidth = Math.min(bandwidthRate,
+ Math.min(bandwidthBurst, observedBandwidth));
+ sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
+ + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
+ + "\"bandwidth_rate\":" + bandwidthRate + ",\n"
+ + "\"bandwidth_burst\":" + bandwidthBurst + ",\n"
+ + "\"observed_bandwidth\":" + observedBandwidth + ",\n"
+ + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
+ + "\"exit_policy\":[");
+ int written = 0;
+ for (String exitPolicyLine : descriptor.getExitPolicyLines()) {
+ sb.append((written++ > 0 ? "," : "") + "\n \"" + exitPolicyLine
+ + "\"");
+ }
+ sb.append("\n]");
+ if (descriptor.getContact() != null) {
+ sb.append(",\n\"contact\":\""
+ + escapeJSON(descriptor.getContact()) + "\"");
+ }
+ if (descriptor.getPlatform() != null) {
+ sb.append(",\n\"platform\":\""
+ + escapeJSON(descriptor.getPlatform()) + "\"");
+ }
+ if (descriptor.getFamilyEntries() != null) {
+ sb.append(",\n\"family\":[");
+ written = 0;
+ for (String familyEntry : descriptor.getFamilyEntries()) {
+ sb.append((written++ > 0 ? "," : "") + "\n \"" + familyEntry
+ + "\"");
+ }
+ sb.append("\n]");
+ }
+ if (descriptor.getIpv6DefaultPolicy() != null &&
+ (descriptor.getIpv6DefaultPolicy().equals("accept") ||
+ descriptor.getIpv6DefaultPolicy().equals("reject")) &&
+ descriptor.getIpv6PortList() != null) {
+ sb.append(",\n\"exit_policy_v6_summary\":{\""
+ + descriptor.getIpv6DefaultPolicy() + "\":[");
+ int portsWritten = 0;
+ for (String portOrPortRange :
+ descriptor.getIpv6PortList().split(",")) {
+ sb.append((portsWritten++ > 0 ? "," : "") + "\"" + portOrPortRange
+ + "\"");
+ }
+ sb.append("]}");
+ }
+ if (descriptor.isHibernating()) {
+ sb.append(",\n\"hibernating\":true");
+ }
+ detailsStatus = new DetailsStatus();
+ detailsStatus.documentString = sb.toString();
+ this.documentStore.store(detailsStatus, fingerprint);
+ }
+
+ private void processBridgeServerDescriptor(
+ ServerDescriptor descriptor) {
+ String fingerprint = descriptor.getFingerprint();
+ DetailsStatus detailsStatus = this.documentStore.retrieve(
+ DetailsStatus.class, false, fingerprint);
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setLenient(false);
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String publishedDateTime =
+ dateTimeFormat.format(descriptor.getPublishedMillis());
+ String poolAssignmentLine = null;
+ if (detailsStatus != null) {
+ String detailsString = detailsStatus.documentString;
+ String descPublishedLine = "\"desc_published\":\""
+ + publishedDateTime + "\",";
+ Scanner s = new Scanner(detailsString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (line.startsWith("\"pool_assignment\":")) {
+ poolAssignmentLine = line;
+ } else if (line.startsWith("\"desc_published\":") &&
+ descPublishedLine.compareTo(line) < 0) {
+ return;
+ }
+ }
+ s.close();
+ }
+ StringBuilder sb = new StringBuilder();
+ String lastRestartedString = dateTimeFormat.format(
+ descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L);
+ int advertisedBandwidth = Math.min(descriptor.getBandwidthRate(),
+ Math.min(descriptor.getBandwidthBurst(),
+ descriptor.getBandwidthObserved()));
+ sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
+ + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
+ + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
+ + "\"platform\":\"" + escapeJSON(descriptor.getPlatform())
+ + "\"");
+ if (poolAssignmentLine != null) {
+ sb.append(",\n" + poolAssignmentLine);
+ }
+ detailsStatus = new DetailsStatus();
+ detailsStatus.documentString = sb.toString();
+ this.documentStore.store(detailsStatus, fingerprint);
+ }
+
+ private static String escapeJSON(String s) {
+ return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'");
+ }
+
+ private void processBridgePoolAssignment(
+ BridgePoolAssignment bridgePoolAssignment) {
+ for (Map.Entry<String, String> e :
+ bridgePoolAssignment.getEntries().entrySet()) {
+ String fingerprint = e.getKey();
+ String details = e.getValue();
+ StringBuilder sb = new StringBuilder();
+ DetailsStatus detailsStatus = this.documentStore.retrieve(
+ DetailsStatus.class, false, fingerprint);
+ if (detailsStatus != null) {
+ String detailsString = detailsStatus.documentString;
+ Scanner s = new Scanner(detailsString);
+ int linesWritten = 0;
+ boolean endsWithComma = false;
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (!line.startsWith("\"pool_assignment\":")) {
+ sb.append((linesWritten++ > 0 ? "\n" : "") + line);
+ endsWithComma = line.endsWith(",");
+ }
+ }
+ s.close();
+ if (sb.length() > 0) {
+ sb.append((endsWithComma ? "" : ",") + "\n");
+ }
+ }
+ sb.append("\"pool_assignment\":\"" + details + "\"");
+ detailsStatus = new DetailsStatus();
+ detailsStatus.documentString = sb.toString();
+ this.documentStore.store(detailsStatus, fingerprint);
+ }
+ }
+
+ private void setCurrentNodes() {
+ SortedMap<String, NodeStatus> currentNodes = this.getCurrentNodes();
+ this.relays = new TreeMap<String, NodeStatus>();
+ this.bridges = new TreeMap<String, NodeStatus>();
+ for (Map.Entry<String, NodeStatus> e : currentNodes.entrySet()) {
+ if (e.getValue().isRelay()) {
+ this.relays.put(e.getKey(), e.getValue());
+ } else {
+ this.bridges.put(e.getKey(), e.getValue());
+ }
+ }
+ }
+
+ private void startReverseDomainNameLookups() {
+ Map<String, Long> addressLastLookupTimes =
+ new HashMap<String, Long>();
+ for (NodeStatus relay : relays.values()) {
+ addressLastLookupTimes.put(relay.getAddress(),
+ relay.getLastRdnsLookup());
+ }
+ this.reverseDomainNameResolver.setAddresses(addressLastLookupTimes);
+ this.reverseDomainNameResolver.startReverseDomainNameLookups();
+ }
+
+ private void finishReverseDomainNameLookups() {
+ this.reverseDomainNameResolver.finishReverseDomainNameLookups();
+ Map<String, String> lookupResults =
+ this.reverseDomainNameResolver.getLookupResults();
+ long startedRdnsLookups =
+ this.reverseDomainNameResolver.getLookupStartMillis();
+ for (NodeStatus relay : relays.values()) {
+ if (lookupResults.containsKey(relay.getAddress())) {
+ relay.setHostName(lookupResults.get(relay.getAddress()));
+ relay.setLastRdnsLookup(startedRdnsLookups);
+ }
+ }
+ }
+
+ private void calculatePathSelectionProbabilities() {
+ boolean consensusContainsBandwidthWeights = false;
+ double wgg = 0.0, wgd = 0.0, wmg = 0.0, wmm = 0.0, wme = 0.0,
+ wmd = 0.0, wee = 0.0, wed = 0.0;
+ if (this.lastBandwidthWeights != null) {
+ SortedSet<String> weightKeys = new TreeSet<String>(Arrays.asList(
+ "Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(",")));
+ weightKeys.removeAll(this.lastBandwidthWeights.keySet());
+ if (weightKeys.isEmpty()) {
+ consensusContainsBandwidthWeights = true;
+ wgg = ((double) this.lastBandwidthWeights.get("Wgg")) / 10000.0;
+ wgd = ((double) this.lastBandwidthWeights.get("Wgd")) / 10000.0;
+ wmg = ((double) this.lastBandwidthWeights.get("Wmg")) / 10000.0;
+ wmm = ((double) this.lastBandwidthWeights.get("Wmm")) / 10000.0;
+ wme = ((double) this.lastBandwidthWeights.get("Wme")) / 10000.0;
+ wmd = ((double) this.lastBandwidthWeights.get("Wmd")) / 10000.0;
+ wee = ((double) this.lastBandwidthWeights.get("Wee")) / 10000.0;
+ wed = ((double) this.lastBandwidthWeights.get("Wed")) / 10000.0;
+ }
+ } else {
+ System.err.println("Could not determine most recent Wxx parameter "
+ + "values, probably because we didn't parse a consensus in "
+ + "this execution. All relays' guard/middle/exit weights are "
+ + "going to be 0.0.");
+ }
+ SortedMap<String, Double>
+ advertisedBandwidths = new TreeMap<String, Double>(),
+ consensusWeights = new TreeMap<String, Double>(),
+ guardWeights = new TreeMap<String, Double>(),
+ middleWeights = new TreeMap<String, Double>(),
+ exitWeights = new TreeMap<String, Double>();
+ double totalAdvertisedBandwidth = 0.0;
+ double totalConsensusWeight = 0.0;
+ double totalGuardWeight = 0.0;
+ double totalMiddleWeight = 0.0;
+ double totalExitWeight = 0.0;
+ for (Map.Entry<String, NodeStatus> e : this.relays.entrySet()) {
+ String fingerprint = e.getKey();
+ NodeStatus relay = e.getValue();
+ if (!relay.getRunning()) {
+ continue;
+ }
+ boolean isExit = relay.getRelayFlags().contains("Exit") &&
+ !relay.getRelayFlags().contains("BadExit");
+ boolean isGuard = relay.getRelayFlags().contains("Guard");
+ DetailsStatus detailsStatus = this.documentStore.retrieve(
+ DetailsStatus.class, false, fingerprint);
+ if (detailsStatus != null) {
+ double advertisedBandwidth = -1.0;
+ String detailsString = detailsStatus.documentString;
+ Scanner s = new Scanner(detailsString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (!line.startsWith("\"advertised_bandwidth\":")) {
+ continue;
+ }
+ try {
+ advertisedBandwidth = (double) Integer.parseInt(
+ line.split(":")[1].replaceAll(",", ""));
+ } catch (NumberFormatException ex) {
+ /* Handle below. */
+ }
+ break;
+ }
+ s.close();
+ if (advertisedBandwidth >= 0.0) {
+ advertisedBandwidths.put(fingerprint, advertisedBandwidth);
+ totalAdvertisedBandwidth += advertisedBandwidth;
+ }
+ }
+ double consensusWeight = (double) relay.getConsensusWeight();
+ consensusWeights.put(fingerprint, consensusWeight);
+ totalConsensusWeight += consensusWeight;
+ if (consensusContainsBandwidthWeights) {
+ double guardWeight = consensusWeight,
+ middleWeight = consensusWeight,
+ exitWeight = consensusWeight;
+ if (isGuard && isExit) {
+ guardWeight *= wgd;
+ middleWeight *= wmd;
+ exitWeight *= wed;
+ } else if (isGuard) {
+ guardWeight *= wgg;
+ middleWeight *= wmg;
+ exitWeight = 0.0;
+ } else if (isExit) {
+ guardWeight = 0.0;
+ middleWeight *= wme;
+ exitWeight *= wee;
+ } else {
+ guardWeight = 0.0;
+ middleWeight *= wmm;
+ exitWeight = 0.0;
+ }
+ guardWeights.put(fingerprint, guardWeight);
+ middleWeights.put(fingerprint, middleWeight);
+ exitWeights.put(fingerprint, exitWeight);
+ totalGuardWeight += guardWeight;
+ totalMiddleWeight += middleWeight;
+ totalExitWeight += exitWeight;
+ }
+ }
+ for (Map.Entry<String, NodeStatus> e : this.relays.entrySet()) {
+ String fingerprint = e.getKey();
+ NodeStatus relay = e.getValue();
+ if (advertisedBandwidths.containsKey(fingerprint)) {
+ relay.setAdvertisedBandwidthFraction(advertisedBandwidths.get(
+ fingerprint) / totalAdvertisedBandwidth);
+ }
+ if (consensusWeights.containsKey(fingerprint)) {
+ relay.setConsensusWeightFraction(consensusWeights.get(fingerprint)
+ / totalConsensusWeight);
+ }
+ if (guardWeights.containsKey(fingerprint)) {
+ relay.setGuardProbability(guardWeights.get(fingerprint)
+ / totalGuardWeight);
+ }
+ if (middleWeights.containsKey(fingerprint)) {
+ relay.setMiddleProbability(middleWeights.get(fingerprint)
+ / totalMiddleWeight);
+ }
+ if (exitWeights.containsKey(fingerprint)) {
+ relay.setExitProbability(exitWeights.get(fingerprint)
+ / totalExitWeight);
+ }
+ }
+ }
+
+ public String getStatsString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append(" " + Logger.formatDecimalNumber(
+ relayConsensusesProcessed) + " relay consensuses processed\n");
+ sb.append(" " + Logger.formatDecimalNumber(bridgeStatusesProcessed)
+ + " bridge statuses processed\n");
+ return sb.toString();
+ }
+}
+
1
0

[onionoo/master] Simplify weights status updater by taking out threading code.
by karsten@torproject.org 11 Apr '14
by karsten@torproject.org 11 Apr '14
11 Apr '14
commit 97fbdee60d66a627fa9b378fc20767864bf275cc
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Fri Mar 14 12:10:39 2014 +0100
Simplify weights status updater by taking out threading code.
The threading code was never used, because it's broken. No reason to keep
it.
---
.../torproject/onionoo/WeightsStatusUpdater.java | 69 +++++---------------
1 file changed, 15 insertions(+), 54 deletions(-)
diff --git a/src/org/torproject/onionoo/WeightsStatusUpdater.java b/src/org/torproject/onionoo/WeightsStatusUpdater.java
index 5e890ef..79296d3 100644
--- a/src/org/torproject/onionoo/WeightsStatusUpdater.java
+++ b/src/org/torproject/onionoo/WeightsStatusUpdater.java
@@ -3,11 +3,9 @@
package org.torproject.onionoo;
import java.text.SimpleDateFormat;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
@@ -66,6 +64,7 @@ public class WeightsStatusUpdater implements DescriptorListener,
private void processRelayNetworkConsensus(
RelayNetworkStatusConsensus consensus) {
+ // TODO This does not scale for bulk imports.
this.consensuses.add(consensus);
}
@@ -107,62 +106,24 @@ public class WeightsStatusUpdater implements DescriptorListener,
}
}
- // TODO Use 4 workers once threading problems are solved.
- private static final int HISTORY_UPDATER_WORKERS_NUM = 1;
private void updateWeightsHistory(long validAfterMillis,
long freshUntilMillis,
SortedMap<String, double[]> pathSelectionWeights) {
- List<HistoryUpdateWorker> historyUpdateWorkers =
- new ArrayList<HistoryUpdateWorker>();
- for (int i = 0; i < HISTORY_UPDATER_WORKERS_NUM; i++) {
- HistoryUpdateWorker historyUpdateWorker =
- new HistoryUpdateWorker(validAfterMillis, freshUntilMillis,
- pathSelectionWeights, this);
- historyUpdateWorkers.add(historyUpdateWorker);
- historyUpdateWorker.setDaemon(true);
- historyUpdateWorker.start();
- }
- for (HistoryUpdateWorker historyUpdateWorker : historyUpdateWorkers) {
- try {
- historyUpdateWorker.join();
- } catch (InterruptedException e) {
- /* This is not something that we can take care of. Just leave the
- * worker thread alone. */
- }
- }
- }
-
- private class HistoryUpdateWorker extends Thread {
- private long validAfterMillis;
- private long freshUntilMillis;
- private SortedMap<String, double[]> pathSelectionWeights;
- private WeightsStatusUpdater parent;
- public HistoryUpdateWorker(long validAfterMillis,
- long freshUntilMillis,
- SortedMap<String, double[]> pathSelectionWeights,
- WeightsStatusUpdater parent) {
- this.validAfterMillis = validAfterMillis;
- this.freshUntilMillis = freshUntilMillis;
- this.pathSelectionWeights = pathSelectionWeights;
- this.parent = parent;
- }
- public void run() {
- String fingerprint = null;
- double[] weights = null;
- do {
- fingerprint = null;
- synchronized (pathSelectionWeights) {
- if (!pathSelectionWeights.isEmpty()) {
- fingerprint = pathSelectionWeights.firstKey();
- weights = pathSelectionWeights.remove(fingerprint);
- }
- }
- if (fingerprint != null) {
- this.parent.addToHistory(fingerprint, this.validAfterMillis,
- this.freshUntilMillis, weights);
+ String fingerprint = null;
+ double[] weights = null;
+ do {
+ fingerprint = null;
+ synchronized (pathSelectionWeights) {
+ if (!pathSelectionWeights.isEmpty()) {
+ fingerprint = pathSelectionWeights.firstKey();
+ weights = pathSelectionWeights.remove(fingerprint);
}
- } while (fingerprint != null);
- }
+ }
+ if (fingerprint != null) {
+ this.addToHistory(fingerprint, validAfterMillis,
+ freshUntilMillis, weights);
+ }
+ } while (fingerprint != null);
}
private SortedMap<String, double[]> calculatePathSelectionProbabilities(
1
0

11 Apr '14
commit beb3a4e7e9983d42d4044d11c6589e6bcc58edaa
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 10 20:09:56 2014 +0200
Split weights data writer into two classes.
---
src/org/torproject/onionoo/Main.java | 10 +-
src/org/torproject/onionoo/WeightsDataWriter.java | 582 --------------------
.../torproject/onionoo/WeightsDocumentWriter.java | 222 ++++++++
.../torproject/onionoo/WeightsStatusUpdater.java | 394 +++++++++++++
4 files changed, 622 insertions(+), 586 deletions(-)
diff --git a/src/org/torproject/onionoo/Main.java b/src/org/torproject/onionoo/Main.java
index af33124..434d90c 100644
--- a/src/org/torproject/onionoo/Main.java
+++ b/src/org/torproject/onionoo/Main.java
@@ -34,14 +34,16 @@ public class Main {
Logger.printStatusTime("Initialized node data writer");
BandwidthDataWriter bdw = new BandwidthDataWriter(dso, ds, t);
Logger.printStatusTime("Initialized bandwidth data writer");
- WeightsDataWriter wdw = new WeightsDataWriter(dso, ds, t);
- Logger.printStatusTime("Initialized weights data writer");
+ WeightsStatusUpdater wsu = new WeightsStatusUpdater(dso, ds, t);
+ Logger.printStatusTime("Initialized weights status updater");
ClientsStatusUpdater csu = new ClientsStatusUpdater(dso, ds, t);
Logger.printStatusTime("Initialized clients status updater");
UptimeStatusUpdater usu = new UptimeStatusUpdater(dso, ds);
Logger.printStatusTime("Initialized uptime status updater");
- StatusUpdater[] sus = new StatusUpdater[] { ndw, bdw, wdw, csu, usu };
+ StatusUpdater[] sus = new StatusUpdater[] { ndw, bdw, wsu, csu, usu };
+ WeightsDocumentWriter wdw = new WeightsDocumentWriter(dso, ds, t);
+ Logger.printStatusTime("Initialized weights document writer");
ClientsDocumentWriter cdw = new ClientsDocumentWriter(dso, ds, t);
Logger.printStatusTime("Initialized clients document writer");
UptimeDocumentWriter udw = new UptimeDocumentWriter(dso, ds, t);
@@ -93,7 +95,7 @@ public class Main {
}
/* TODO Print status updater statistics for *all* status updaters once
* all data writers have been separated. */
- for (DocumentWriter dw : new DocumentWriter[] { cdw, udw }) {
+ for (DocumentWriter dw : new DocumentWriter[] { wdw, cdw, udw }) {
String statsString = dw.getStatsString();
if (statsString != null) {
Logger.printStatistics(dw.getClass().getSimpleName(),
diff --git a/src/org/torproject/onionoo/WeightsDataWriter.java b/src/org/torproject/onionoo/WeightsDataWriter.java
deleted file mode 100644
index 0d7b815..0000000
--- a/src/org/torproject/onionoo/WeightsDataWriter.java
+++ /dev/null
@@ -1,582 +0,0 @@
-/* Copyright 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.onionoo;
-
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.NetworkStatusEntry;
-import org.torproject.descriptor.RelayNetworkStatusConsensus;
-import org.torproject.descriptor.ServerDescriptor;
-
-public class WeightsDataWriter implements DescriptorListener,
- StatusUpdater, FingerprintListener, DocumentWriter {
-
- private DescriptorSource descriptorSource;
-
- private DocumentStore documentStore;
-
- private long now;
-
- public WeightsDataWriter(DescriptorSource descriptorSource,
- DocumentStore documentStore, Time time) {
- this.descriptorSource = descriptorSource;
- this.documentStore = documentStore;
- this.now = time.currentTimeMillis();
- this.registerDescriptorListeners();
- this.registerFingerprintListeners();
- }
-
- private void registerDescriptorListeners() {
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.RELAY_CONSENSUSES);
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.RELAY_SERVER_DESCRIPTORS);
- }
-
- private void registerFingerprintListeners() {
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.RELAY_CONSENSUSES);
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.RELAY_SERVER_DESCRIPTORS);
- }
-
- public void processDescriptor(Descriptor descriptor, boolean relay) {
- if (descriptor instanceof ServerDescriptor) {
- this.processRelayServerDescriptor((ServerDescriptor) descriptor);
- } else if (descriptor instanceof RelayNetworkStatusConsensus) {
- this.processRelayNetworkConsensus(
- (RelayNetworkStatusConsensus) descriptor);
- }
- }
-
- public void updateStatuses() {
- this.updateWeightsHistories();
- Logger.printStatusTime("Updated weights histories");
- this.updateWeightsStatuses();
- Logger.printStatusTime("Updated weights status files");
- }
-
- public void writeDocuments() {
- this.writeWeightsDataFiles();
- Logger.printStatusTime("Wrote weights document files");
- }
-
- private Set<RelayNetworkStatusConsensus> consensuses =
- new HashSet<RelayNetworkStatusConsensus>();
-
- private void processRelayNetworkConsensus(
- RelayNetworkStatusConsensus consensus) {
- this.consensuses.add(consensus);
- }
-
- private Set<String> updateWeightsStatuses = new HashSet<String>();
-
- private Set<String> updateWeightsDocuments = new HashSet<String>();
-
- private Map<String, Set<String>> descriptorDigestsByFingerprint =
- new HashMap<String, Set<String>>();
-
- private Map<String, Integer> advertisedBandwidths =
- new HashMap<String, Integer>();
-
- private void processRelayServerDescriptor(
- ServerDescriptor serverDescriptor) {
- String digest = serverDescriptor.getServerDescriptorDigest().
- toUpperCase();
- int advertisedBandwidth = Math.min(Math.min(
- serverDescriptor.getBandwidthBurst(),
- serverDescriptor.getBandwidthObserved()),
- serverDescriptor.getBandwidthRate());
- this.advertisedBandwidths.put(digest, advertisedBandwidth);
- String fingerprint = serverDescriptor.getFingerprint();
- this.updateWeightsStatuses.add(fingerprint);
- if (!this.descriptorDigestsByFingerprint.containsKey(
- fingerprint)) {
- this.descriptorDigestsByFingerprint.put(fingerprint,
- new HashSet<String>());
- }
- this.descriptorDigestsByFingerprint.get(fingerprint).add(digest);
- }
-
- private void updateWeightsHistories() {
- for (RelayNetworkStatusConsensus consensus : this.consensuses) {
- long validAfterMillis = consensus.getValidAfterMillis(),
- freshUntilMillis = consensus.getFreshUntilMillis();
- SortedMap<String, double[]> pathSelectionWeights =
- this.calculatePathSelectionProbabilities(consensus);
- this.updateWeightsHistory(validAfterMillis, freshUntilMillis,
- pathSelectionWeights);
- }
- }
-
- // TODO Use 4 workers once threading problems are solved.
- private static final int HISTORY_UPDATER_WORKERS_NUM = 1;
- private void updateWeightsHistory(long validAfterMillis,
- long freshUntilMillis,
- SortedMap<String, double[]> pathSelectionWeights) {
- List<HistoryUpdateWorker> historyUpdateWorkers =
- new ArrayList<HistoryUpdateWorker>();
- for (int i = 0; i < HISTORY_UPDATER_WORKERS_NUM; i++) {
- HistoryUpdateWorker historyUpdateWorker =
- new HistoryUpdateWorker(validAfterMillis, freshUntilMillis,
- pathSelectionWeights, this);
- historyUpdateWorkers.add(historyUpdateWorker);
- historyUpdateWorker.setDaemon(true);
- historyUpdateWorker.start();
- }
- for (HistoryUpdateWorker historyUpdateWorker : historyUpdateWorkers) {
- try {
- historyUpdateWorker.join();
- } catch (InterruptedException e) {
- /* This is not something that we can take care of. Just leave the
- * worker thread alone. */
- }
- }
- }
-
- private class HistoryUpdateWorker extends Thread {
- private long validAfterMillis;
- private long freshUntilMillis;
- private SortedMap<String, double[]> pathSelectionWeights;
- private WeightsDataWriter parent;
- public HistoryUpdateWorker(long validAfterMillis,
- long freshUntilMillis,
- SortedMap<String, double[]> pathSelectionWeights,
- WeightsDataWriter parent) {
- this.validAfterMillis = validAfterMillis;
- this.freshUntilMillis = freshUntilMillis;
- this.pathSelectionWeights = pathSelectionWeights;
- this.parent = parent;
- }
- public void run() {
- String fingerprint = null;
- double[] weights = null;
- do {
- fingerprint = null;
- synchronized (pathSelectionWeights) {
- if (!pathSelectionWeights.isEmpty()) {
- fingerprint = pathSelectionWeights.firstKey();
- weights = pathSelectionWeights.remove(fingerprint);
- }
- }
- if (fingerprint != null) {
- this.parent.addToHistory(fingerprint, this.validAfterMillis,
- this.freshUntilMillis, weights);
- }
- } while (fingerprint != null);
- }
- }
-
- private SortedMap<String, double[]> calculatePathSelectionProbabilities(
- RelayNetworkStatusConsensus consensus) {
- double wgg = 1.0, wgd = 1.0, wmg = 1.0, wmm = 1.0, wme = 1.0,
- wmd = 1.0, wee = 1.0, wed = 1.0;
- SortedMap<String, Integer> bandwidthWeights =
- consensus.getBandwidthWeights();
- if (bandwidthWeights != null) {
- SortedSet<String> missingWeightKeys = new TreeSet<String>(
- Arrays.asList("Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(",")));
- missingWeightKeys.removeAll(bandwidthWeights.keySet());
- if (missingWeightKeys.isEmpty()) {
- wgg = ((double) bandwidthWeights.get("Wgg")) / 10000.0;
- wgd = ((double) bandwidthWeights.get("Wgd")) / 10000.0;
- wmg = ((double) bandwidthWeights.get("Wmg")) / 10000.0;
- wmm = ((double) bandwidthWeights.get("Wmm")) / 10000.0;
- wme = ((double) bandwidthWeights.get("Wme")) / 10000.0;
- wmd = ((double) bandwidthWeights.get("Wmd")) / 10000.0;
- wee = ((double) bandwidthWeights.get("Wee")) / 10000.0;
- wed = ((double) bandwidthWeights.get("Wed")) / 10000.0;
- }
- }
- SortedMap<String, Double>
- advertisedBandwidths = new TreeMap<String, Double>(),
- consensusWeights = new TreeMap<String, Double>(),
- guardWeights = new TreeMap<String, Double>(),
- middleWeights = new TreeMap<String, Double>(),
- exitWeights = new TreeMap<String, Double>();
- double totalAdvertisedBandwidth = 0.0;
- double totalConsensusWeight = 0.0;
- double totalGuardWeight = 0.0;
- double totalMiddleWeight = 0.0;
- double totalExitWeight = 0.0;
- for (NetworkStatusEntry relay :
- consensus.getStatusEntries().values()) {
- String fingerprint = relay.getFingerprint();
- if (!relay.getFlags().contains("Running")) {
- continue;
- }
- boolean isExit = relay.getFlags().contains("Exit") &&
- !relay.getFlags().contains("BadExit");
- boolean isGuard = relay.getFlags().contains("Guard");
- String serverDescriptorDigest = relay.getDescriptor().
- toUpperCase();
- double advertisedBandwidth = 0.0;
- if (!this.advertisedBandwidths.containsKey(
- serverDescriptorDigest)) {
- WeightsStatus weightsStatus = this.documentStore.retrieve(
- WeightsStatus.class, true, fingerprint);
- if (weightsStatus != null) {
- if (!this.descriptorDigestsByFingerprint.containsKey(
- fingerprint)) {
- this.descriptorDigestsByFingerprint.put(fingerprint,
- new HashSet<String>());
- }
- this.descriptorDigestsByFingerprint.get(fingerprint).addAll(
- weightsStatus.advertisedBandwidths.keySet());
- this.advertisedBandwidths.putAll(
- weightsStatus.advertisedBandwidths);
- }
- }
- if (this.advertisedBandwidths.containsKey(
- serverDescriptorDigest)) {
- advertisedBandwidth = (double) this.advertisedBandwidths.get(
- serverDescriptorDigest);
- }
- double consensusWeight = (double) relay.getBandwidth();
- double guardWeight = (double) relay.getBandwidth();
- double middleWeight = (double) relay.getBandwidth();
- double exitWeight = (double) relay.getBandwidth();
- if (isGuard && isExit) {
- guardWeight *= wgd;
- middleWeight *= wmd;
- exitWeight *= wed;
- } else if (isGuard) {
- guardWeight *= wgg;
- middleWeight *= wmg;
- exitWeight = 0.0;
- } else if (isExit) {
- guardWeight = 0.0;
- middleWeight *= wme;
- exitWeight *= wee;
- } else {
- guardWeight = 0.0;
- middleWeight *= wmm;
- exitWeight = 0.0;
- }
- advertisedBandwidths.put(fingerprint, advertisedBandwidth);
- consensusWeights.put(fingerprint, consensusWeight);
- guardWeights.put(fingerprint, guardWeight);
- middleWeights.put(fingerprint, middleWeight);
- exitWeights.put(fingerprint, exitWeight);
- totalAdvertisedBandwidth += advertisedBandwidth;
- totalConsensusWeight += consensusWeight;
- totalGuardWeight += guardWeight;
- totalMiddleWeight += middleWeight;
- totalExitWeight += exitWeight;
- }
- SortedMap<String, double[]> pathSelectionProbabilities =
- new TreeMap<String, double[]>();
- for (NetworkStatusEntry relay :
- consensus.getStatusEntries().values()) {
- String fingerprint = relay.getFingerprint();
- double[] probabilities = new double[] {
- advertisedBandwidths.get(fingerprint)
- / totalAdvertisedBandwidth,
- consensusWeights.get(fingerprint) / totalConsensusWeight,
- guardWeights.get(fingerprint) / totalGuardWeight,
- middleWeights.get(fingerprint) / totalMiddleWeight,
- exitWeights.get(fingerprint) / totalExitWeight };
- pathSelectionProbabilities.put(fingerprint, probabilities);
- }
- return pathSelectionProbabilities;
- }
-
- private void addToHistory(String fingerprint, long validAfterMillis,
- long freshUntilMillis, double[] weights) {
- WeightsStatus weightsStatus = this.documentStore.retrieve(
- WeightsStatus.class, true, fingerprint);
- if (weightsStatus == null) {
- weightsStatus = new WeightsStatus();
- }
- SortedMap<long[], double[]> history = weightsStatus.history;
- long[] interval = new long[] { validAfterMillis, freshUntilMillis };
- if ((history.headMap(interval).isEmpty() ||
- history.headMap(interval).lastKey()[1] <= validAfterMillis) &&
- (history.tailMap(interval).isEmpty() ||
- history.tailMap(interval).firstKey()[0] >= freshUntilMillis)) {
- history.put(interval, weights);
- this.compressHistory(weightsStatus);
- this.addAdvertisedBandwidths(weightsStatus, fingerprint);
- this.documentStore.store(weightsStatus, fingerprint);
- this.updateWeightsStatuses.remove(fingerprint);
- }
- }
-
- private void addAdvertisedBandwidths(WeightsStatus weightsStatus,
- String fingerprint) {
- if (this.descriptorDigestsByFingerprint.containsKey(fingerprint)) {
- for (String descriptorDigest :
- this.descriptorDigestsByFingerprint.get(fingerprint)) {
- if (this.advertisedBandwidths.containsKey(descriptorDigest)) {
- int advertisedBandwidth =
- this.advertisedBandwidths.get(descriptorDigest);
- weightsStatus.advertisedBandwidths.put(descriptorDigest,
- advertisedBandwidth);
- }
- }
- }
- }
-
- private void compressHistory(WeightsStatus weightsStatus) {
- SortedMap<long[], double[]> history = weightsStatus.history;
- SortedMap<long[], double[]> compressedHistory =
- new TreeMap<long[], double[]>(history.comparator());
- long lastStartMillis = 0L, lastEndMillis = 0L;
- double[] lastWeights = null;
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String lastMonthString = "1970-01";
- for (Map.Entry<long[], double[]> e : history.entrySet()) {
- long startMillis = e.getKey()[0], endMillis = e.getKey()[1];
- double[] weights = e.getValue();
- long intervalLengthMillis;
- if (this.now - endMillis <= 7L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 60L * 60L * 1000L;
- } else if (this.now - endMillis <= 31L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 4L * 60L * 60L * 1000L;
- } else if (this.now - endMillis <= 92L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 12L * 60L * 60L * 1000L;
- } else if (this.now - endMillis <= 366L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 2L * 24L * 60L * 60L * 1000L;
- } else {
- intervalLengthMillis = 10L * 24L * 60L * 60L * 1000L;
- }
- String monthString = dateTimeFormat.format(startMillis);
- if (lastEndMillis == startMillis &&
- ((lastEndMillis - 1L) / intervalLengthMillis) ==
- ((endMillis - 1L) / intervalLengthMillis) &&
- lastMonthString.equals(monthString)) {
- double lastIntervalInHours = (double) ((lastEndMillis
- - lastStartMillis) / 60L * 60L * 1000L);
- double currentIntervalInHours = (double) ((endMillis
- - startMillis) / 60L * 60L * 1000L);
- double newIntervalInHours = (double) ((endMillis
- - lastStartMillis) / 60L * 60L * 1000L);
- for (int i = 0; i < lastWeights.length; i++) {
- lastWeights[i] *= lastIntervalInHours;
- lastWeights[i] += weights[i] * currentIntervalInHours;
- lastWeights[i] /= newIntervalInHours;
- }
- lastEndMillis = endMillis;
- } else {
- if (lastStartMillis > 0L) {
- compressedHistory.put(new long[] { lastStartMillis,
- lastEndMillis }, lastWeights);
- }
- lastStartMillis = startMillis;
- lastEndMillis = endMillis;
- lastWeights = weights;
- }
- lastMonthString = monthString;
- }
- if (lastStartMillis > 0L) {
- compressedHistory.put(new long[] { lastStartMillis, lastEndMillis },
- lastWeights);
- }
- weightsStatus.history = compressedHistory;
- }
-
- public void processFingerprints(SortedSet<String> fingerprints,
- boolean relay) {
- if (relay) {
- this.updateWeightsDocuments.addAll(fingerprints);
- }
- }
-
- private void writeWeightsDataFiles() {
- for (String fingerprint : this.updateWeightsDocuments) {
- WeightsStatus weightsStatus = this.documentStore.retrieve(
- WeightsStatus.class, true, fingerprint);
- if (weightsStatus == null) {
- continue;
- }
- SortedMap<long[], double[]> history = weightsStatus.history;
- WeightsDocument weightsDocument = new WeightsDocument();
- weightsDocument.documentString = this.formatHistoryString(
- fingerprint, history);
- this.documentStore.store(weightsDocument, fingerprint);
- }
- Logger.printStatusTime("Wrote weights document files");
- }
-
- private String[] graphTypes = new String[] {
- "advertised_bandwidth_fraction",
- "consensus_weight_fraction",
- "guard_probability",
- "middle_probability",
- "exit_probability"
- };
-
- private String[] graphNames = new String[] {
- "1_week",
- "1_month",
- "3_months",
- "1_year",
- "5_years" };
-
- private long[] graphIntervals = new long[] {
- 7L * 24L * 60L * 60L * 1000L,
- 31L * 24L * 60L * 60L * 1000L,
- 92L * 24L * 60L * 60L * 1000L,
- 366L * 24L * 60L * 60L * 1000L,
- 5L * 366L * 24L * 60L * 60L * 1000L };
-
- private long[] dataPointIntervals = new long[] {
- 60L * 60L * 1000L,
- 4L * 60L * 60L * 1000L,
- 12L * 60L * 60L * 1000L,
- 2L * 24L * 60L * 60L * 1000L,
- 10L * 24L * 60L * 60L * 1000L };
-
- private String formatHistoryString(String fingerprint,
- SortedMap<long[], double[]> history) {
- StringBuilder sb = new StringBuilder();
- sb.append("{\"fingerprint\":\"" + fingerprint + "\"");
- for (int graphTypeIndex = 0; graphTypeIndex < this.graphTypes.length;
- graphTypeIndex++) {
- String graphType = this.graphTypes[graphTypeIndex];
- sb.append(",\n\"" + graphType + "\":{");
- int graphIntervalsWritten = 0;
- for (int graphIntervalIndex = 0; graphIntervalIndex <
- this.graphIntervals.length; graphIntervalIndex++) {
- String timeline = this.formatTimeline(graphTypeIndex,
- graphIntervalIndex, history);
- if (timeline != null) {
- sb.append((graphIntervalsWritten++ > 0 ? "," : "") + "\n"
- + timeline);
- }
- }
- sb.append("}");
- }
- sb.append("\n}\n");
- return sb.toString();
- }
-
- private String formatTimeline(int graphTypeIndex,
- int graphIntervalIndex, SortedMap<long[], double[]> history) {
- String graphName = this.graphNames[graphIntervalIndex];
- long graphInterval = this.graphIntervals[graphIntervalIndex];
- long dataPointInterval =
- this.dataPointIntervals[graphIntervalIndex];
- List<Double> dataPoints = new ArrayList<Double>();
- long intervalStartMillis = ((this.now - graphInterval)
- / dataPointInterval) * dataPointInterval;
- long totalMillis = 0L;
- double totalWeightTimesMillis = 0.0;
- for (Map.Entry<long[], double[]> e : history.entrySet()) {
- long startMillis = e.getKey()[0], endMillis = e.getKey()[1];
- double weight = e.getValue()[graphTypeIndex];
- if (endMillis < intervalStartMillis) {
- continue;
- }
- while ((intervalStartMillis / dataPointInterval) !=
- (endMillis / dataPointInterval)) {
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1.0 : totalWeightTimesMillis / (double) totalMillis);
- totalWeightTimesMillis = 0.0;
- totalMillis = 0L;
- intervalStartMillis += dataPointInterval;
- }
- totalWeightTimesMillis += weight
- * ((double) (endMillis - startMillis));
- totalMillis += (endMillis - startMillis);
- }
- dataPoints.add(totalMillis * 5L < dataPointInterval
- ? -1.0 : totalWeightTimesMillis / (double) totalMillis);
- double maxValue = 0.0;
- int firstNonNullIndex = -1, lastNonNullIndex = -1;
- for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
- dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (firstNonNullIndex < 0) {
- firstNonNullIndex = dataPointIndex;
- }
- lastNonNullIndex = dataPointIndex;
- if (dataPoint > maxValue) {
- maxValue = dataPoint;
- }
- }
- }
- if (firstNonNullIndex < 0) {
- return null;
- }
- long firstDataPointMillis = (((this.now - graphInterval)
- / dataPointInterval) + firstNonNullIndex) * dataPointInterval
- + dataPointInterval / 2L;
- if (graphIntervalIndex > 0 && firstDataPointMillis >=
- this.now - graphIntervals[graphIntervalIndex - 1]) {
- /* Skip weights history object, because it doesn't contain
- * anything new that wasn't already contained in the last
- * weights history object(s). */
- return null;
- }
- long lastDataPointMillis = firstDataPointMillis
- + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
- double factor = ((double) maxValue) / 999.0;
- int count = lastNonNullIndex - firstNonNullIndex + 1;
- StringBuilder sb = new StringBuilder();
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- sb.append("\"" + graphName + "\":{"
- + "\"first\":\"" + dateTimeFormat.format(firstDataPointMillis)
- + "\",\"last\":\"" + dateTimeFormat.format(lastDataPointMillis)
- + "\",\"interval\":" + String.valueOf(dataPointInterval / 1000L)
- + ",\"factor\":" + String.format(Locale.US, "%.9f", factor)
- + ",\"count\":" + String.valueOf(count) + ",\"values\":[");
- int dataPointsWritten = 0, previousNonNullIndex = -2;
- boolean foundTwoAdjacentDataPoints = false;
- for (int dataPointIndex = firstNonNullIndex; dataPointIndex <=
- lastNonNullIndex; dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (dataPointIndex - previousNonNullIndex == 1) {
- foundTwoAdjacentDataPoints = true;
- }
- previousNonNullIndex = dataPointIndex;
- }
- sb.append((dataPointsWritten++ > 0 ? "," : "")
- + (dataPoint < 0.0 ? "null" :
- String.valueOf((long) ((dataPoint * 999.0) / maxValue))));
- }
- sb.append("]}");
- if (foundTwoAdjacentDataPoints) {
- return sb.toString();
- } else {
- return null;
- }
- }
-
- private void updateWeightsStatuses() {
- for (String fingerprint : this.updateWeightsStatuses) {
- WeightsStatus weightsStatus = this.documentStore.retrieve(
- WeightsStatus.class, true, fingerprint);
- if (weightsStatus == null) {
- weightsStatus = new WeightsStatus();
- }
- this.addAdvertisedBandwidths(weightsStatus, fingerprint);
- this.documentStore.store(weightsStatus, fingerprint);
- }
- }
-
- public String getStatsString() {
- /* TODO Add statistics string. */
- return null;
- }
-}
-
diff --git a/src/org/torproject/onionoo/WeightsDocumentWriter.java b/src/org/torproject/onionoo/WeightsDocumentWriter.java
new file mode 100644
index 0000000..9e5355f
--- /dev/null
+++ b/src/org/torproject/onionoo/WeightsDocumentWriter.java
@@ -0,0 +1,222 @@
+/* Copyright 2012--2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+
+public class WeightsDocumentWriter implements FingerprintListener,
+ DocumentWriter {
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ public WeightsDocumentWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore, Time time) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.registerFingerprintListeners();
+ }
+
+ private void registerFingerprintListeners() {
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.RELAY_CONSENSUSES);
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ }
+
+ private Set<String> updateWeightsDocuments = new HashSet<String>();
+
+ public void processFingerprints(SortedSet<String> fingerprints,
+ boolean relay) {
+ if (relay) {
+ this.updateWeightsDocuments.addAll(fingerprints);
+ }
+ }
+
+ public void writeDocuments() {
+ this.writeWeightsDataFiles();
+ Logger.printStatusTime("Wrote weights document files");
+ }
+
+ private void writeWeightsDataFiles() {
+ for (String fingerprint : this.updateWeightsDocuments) {
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus == null) {
+ continue;
+ }
+ SortedMap<long[], double[]> history = weightsStatus.history;
+ WeightsDocument weightsDocument = new WeightsDocument();
+ weightsDocument.documentString = this.formatHistoryString(
+ fingerprint, history);
+ this.documentStore.store(weightsDocument, fingerprint);
+ }
+ }
+
+ private String[] graphTypes = new String[] {
+ "advertised_bandwidth_fraction",
+ "consensus_weight_fraction",
+ "guard_probability",
+ "middle_probability",
+ "exit_probability"
+ };
+
+ private String[] graphNames = new String[] {
+ "1_week",
+ "1_month",
+ "3_months",
+ "1_year",
+ "5_years" };
+
+ private long[] graphIntervals = new long[] {
+ 7L * 24L * 60L * 60L * 1000L,
+ 31L * 24L * 60L * 60L * 1000L,
+ 92L * 24L * 60L * 60L * 1000L,
+ 366L * 24L * 60L * 60L * 1000L,
+ 5L * 366L * 24L * 60L * 60L * 1000L };
+
+ private long[] dataPointIntervals = new long[] {
+ 60L * 60L * 1000L,
+ 4L * 60L * 60L * 1000L,
+ 12L * 60L * 60L * 1000L,
+ 2L * 24L * 60L * 60L * 1000L,
+ 10L * 24L * 60L * 60L * 1000L };
+
+ private String formatHistoryString(String fingerprint,
+ SortedMap<long[], double[]> history) {
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"fingerprint\":\"" + fingerprint + "\"");
+ for (int graphTypeIndex = 0; graphTypeIndex < this.graphTypes.length;
+ graphTypeIndex++) {
+ String graphType = this.graphTypes[graphTypeIndex];
+ sb.append(",\n\"" + graphType + "\":{");
+ int graphIntervalsWritten = 0;
+ for (int graphIntervalIndex = 0; graphIntervalIndex <
+ this.graphIntervals.length; graphIntervalIndex++) {
+ String timeline = this.formatTimeline(graphTypeIndex,
+ graphIntervalIndex, history);
+ if (timeline != null) {
+ sb.append((graphIntervalsWritten++ > 0 ? "," : "") + "\n"
+ + timeline);
+ }
+ }
+ sb.append("}");
+ }
+ sb.append("\n}\n");
+ return sb.toString();
+ }
+
+ private String formatTimeline(int graphTypeIndex,
+ int graphIntervalIndex, SortedMap<long[], double[]> history) {
+ String graphName = this.graphNames[graphIntervalIndex];
+ long graphInterval = this.graphIntervals[graphIntervalIndex];
+ long dataPointInterval =
+ this.dataPointIntervals[graphIntervalIndex];
+ List<Double> dataPoints = new ArrayList<Double>();
+ long intervalStartMillis = ((this.now - graphInterval)
+ / dataPointInterval) * dataPointInterval;
+ long totalMillis = 0L;
+ double totalWeightTimesMillis = 0.0;
+ for (Map.Entry<long[], double[]> e : history.entrySet()) {
+ long startMillis = e.getKey()[0], endMillis = e.getKey()[1];
+ double weight = e.getValue()[graphTypeIndex];
+ if (endMillis < intervalStartMillis) {
+ continue;
+ }
+ while ((intervalStartMillis / dataPointInterval) !=
+ (endMillis / dataPointInterval)) {
+ dataPoints.add(totalMillis * 5L < dataPointInterval
+ ? -1.0 : totalWeightTimesMillis / (double) totalMillis);
+ totalWeightTimesMillis = 0.0;
+ totalMillis = 0L;
+ intervalStartMillis += dataPointInterval;
+ }
+ totalWeightTimesMillis += weight
+ * ((double) (endMillis - startMillis));
+ totalMillis += (endMillis - startMillis);
+ }
+ dataPoints.add(totalMillis * 5L < dataPointInterval
+ ? -1.0 : totalWeightTimesMillis / (double) totalMillis);
+ double maxValue = 0.0;
+ int firstNonNullIndex = -1, lastNonNullIndex = -1;
+ for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
+ dataPointIndex++) {
+ double dataPoint = dataPoints.get(dataPointIndex);
+ if (dataPoint >= 0.0) {
+ if (firstNonNullIndex < 0) {
+ firstNonNullIndex = dataPointIndex;
+ }
+ lastNonNullIndex = dataPointIndex;
+ if (dataPoint > maxValue) {
+ maxValue = dataPoint;
+ }
+ }
+ }
+ if (firstNonNullIndex < 0) {
+ return null;
+ }
+ long firstDataPointMillis = (((this.now - graphInterval)
+ / dataPointInterval) + firstNonNullIndex) * dataPointInterval
+ + dataPointInterval / 2L;
+ if (graphIntervalIndex > 0 && firstDataPointMillis >=
+ this.now - graphIntervals[graphIntervalIndex - 1]) {
+ /* Skip weights history object, because it doesn't contain
+ * anything new that wasn't already contained in the last
+ * weights history object(s). */
+ return null;
+ }
+ long lastDataPointMillis = firstDataPointMillis
+ + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
+ double factor = ((double) maxValue) / 999.0;
+ int count = lastNonNullIndex - firstNonNullIndex + 1;
+ StringBuilder sb = new StringBuilder();
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ sb.append("\"" + graphName + "\":{"
+ + "\"first\":\"" + dateTimeFormat.format(firstDataPointMillis)
+ + "\",\"last\":\"" + dateTimeFormat.format(lastDataPointMillis)
+ + "\",\"interval\":" + String.valueOf(dataPointInterval / 1000L)
+ + ",\"factor\":" + String.format(Locale.US, "%.9f", factor)
+ + ",\"count\":" + String.valueOf(count) + ",\"values\":[");
+ int dataPointsWritten = 0, previousNonNullIndex = -2;
+ boolean foundTwoAdjacentDataPoints = false;
+ for (int dataPointIndex = firstNonNullIndex; dataPointIndex <=
+ lastNonNullIndex; dataPointIndex++) {
+ double dataPoint = dataPoints.get(dataPointIndex);
+ if (dataPoint >= 0.0) {
+ if (dataPointIndex - previousNonNullIndex == 1) {
+ foundTwoAdjacentDataPoints = true;
+ }
+ previousNonNullIndex = dataPointIndex;
+ }
+ sb.append((dataPointsWritten++ > 0 ? "," : "")
+ + (dataPoint < 0.0 ? "null" :
+ String.valueOf((long) ((dataPoint * 999.0) / maxValue))));
+ }
+ sb.append("]}");
+ if (foundTwoAdjacentDataPoints) {
+ return sb.toString();
+ } else {
+ return null;
+ }
+ }
+
+ public String getStatsString() {
+ /* TODO Add statistics string. */
+ return null;
+ }
+}
diff --git a/src/org/torproject/onionoo/WeightsStatusUpdater.java b/src/org/torproject/onionoo/WeightsStatusUpdater.java
new file mode 100644
index 0000000..5e890ef
--- /dev/null
+++ b/src/org/torproject/onionoo/WeightsStatusUpdater.java
@@ -0,0 +1,394 @@
+/* Copyright 2012--2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.NetworkStatusEntry;
+import org.torproject.descriptor.RelayNetworkStatusConsensus;
+import org.torproject.descriptor.ServerDescriptor;
+
+public class WeightsStatusUpdater implements DescriptorListener,
+ StatusUpdater {
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ public WeightsStatusUpdater(DescriptorSource descriptorSource,
+ DocumentStore documentStore, Time time) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.registerDescriptorListeners();
+ }
+
+ private void registerDescriptorListeners() {
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.RELAY_CONSENSUSES);
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ }
+
+ public void processDescriptor(Descriptor descriptor, boolean relay) {
+ if (descriptor instanceof ServerDescriptor) {
+ this.processRelayServerDescriptor((ServerDescriptor) descriptor);
+ } else if (descriptor instanceof RelayNetworkStatusConsensus) {
+ this.processRelayNetworkConsensus(
+ (RelayNetworkStatusConsensus) descriptor);
+ }
+ }
+
+ public void updateStatuses() {
+ this.updateWeightsHistories();
+ Logger.printStatusTime("Updated weights histories");
+ this.updateWeightsStatuses();
+ Logger.printStatusTime("Updated weights status files");
+ }
+
+ private Set<RelayNetworkStatusConsensus> consensuses =
+ new HashSet<RelayNetworkStatusConsensus>();
+
+ private void processRelayNetworkConsensus(
+ RelayNetworkStatusConsensus consensus) {
+ this.consensuses.add(consensus);
+ }
+
+ private Set<String> updateWeightsStatuses = new HashSet<String>();
+
+ private Map<String, Set<String>> descriptorDigestsByFingerprint =
+ new HashMap<String, Set<String>>();
+
+ private Map<String, Integer> advertisedBandwidths =
+ new HashMap<String, Integer>();
+
+ private void processRelayServerDescriptor(
+ ServerDescriptor serverDescriptor) {
+ String digest = serverDescriptor.getServerDescriptorDigest().
+ toUpperCase();
+ int advertisedBandwidth = Math.min(Math.min(
+ serverDescriptor.getBandwidthBurst(),
+ serverDescriptor.getBandwidthObserved()),
+ serverDescriptor.getBandwidthRate());
+ this.advertisedBandwidths.put(digest, advertisedBandwidth);
+ String fingerprint = serverDescriptor.getFingerprint();
+ this.updateWeightsStatuses.add(fingerprint);
+ if (!this.descriptorDigestsByFingerprint.containsKey(
+ fingerprint)) {
+ this.descriptorDigestsByFingerprint.put(fingerprint,
+ new HashSet<String>());
+ }
+ this.descriptorDigestsByFingerprint.get(fingerprint).add(digest);
+ }
+
+ private void updateWeightsHistories() {
+ for (RelayNetworkStatusConsensus consensus : this.consensuses) {
+ long validAfterMillis = consensus.getValidAfterMillis(),
+ freshUntilMillis = consensus.getFreshUntilMillis();
+ SortedMap<String, double[]> pathSelectionWeights =
+ this.calculatePathSelectionProbabilities(consensus);
+ this.updateWeightsHistory(validAfterMillis, freshUntilMillis,
+ pathSelectionWeights);
+ }
+ }
+
+ // TODO Use 4 workers once threading problems are solved.
+ private static final int HISTORY_UPDATER_WORKERS_NUM = 1;
+ private void updateWeightsHistory(long validAfterMillis,
+ long freshUntilMillis,
+ SortedMap<String, double[]> pathSelectionWeights) {
+ List<HistoryUpdateWorker> historyUpdateWorkers =
+ new ArrayList<HistoryUpdateWorker>();
+ for (int i = 0; i < HISTORY_UPDATER_WORKERS_NUM; i++) {
+ HistoryUpdateWorker historyUpdateWorker =
+ new HistoryUpdateWorker(validAfterMillis, freshUntilMillis,
+ pathSelectionWeights, this);
+ historyUpdateWorkers.add(historyUpdateWorker);
+ historyUpdateWorker.setDaemon(true);
+ historyUpdateWorker.start();
+ }
+ for (HistoryUpdateWorker historyUpdateWorker : historyUpdateWorkers) {
+ try {
+ historyUpdateWorker.join();
+ } catch (InterruptedException e) {
+ /* This is not something that we can take care of. Just leave the
+ * worker thread alone. */
+ }
+ }
+ }
+
+ private class HistoryUpdateWorker extends Thread {
+ private long validAfterMillis;
+ private long freshUntilMillis;
+ private SortedMap<String, double[]> pathSelectionWeights;
+ private WeightsStatusUpdater parent;
+ public HistoryUpdateWorker(long validAfterMillis,
+ long freshUntilMillis,
+ SortedMap<String, double[]> pathSelectionWeights,
+ WeightsStatusUpdater parent) {
+ this.validAfterMillis = validAfterMillis;
+ this.freshUntilMillis = freshUntilMillis;
+ this.pathSelectionWeights = pathSelectionWeights;
+ this.parent = parent;
+ }
+ public void run() {
+ String fingerprint = null;
+ double[] weights = null;
+ do {
+ fingerprint = null;
+ synchronized (pathSelectionWeights) {
+ if (!pathSelectionWeights.isEmpty()) {
+ fingerprint = pathSelectionWeights.firstKey();
+ weights = pathSelectionWeights.remove(fingerprint);
+ }
+ }
+ if (fingerprint != null) {
+ this.parent.addToHistory(fingerprint, this.validAfterMillis,
+ this.freshUntilMillis, weights);
+ }
+ } while (fingerprint != null);
+ }
+ }
+
+ private SortedMap<String, double[]> calculatePathSelectionProbabilities(
+ RelayNetworkStatusConsensus consensus) {
+ double wgg = 1.0, wgd = 1.0, wmg = 1.0, wmm = 1.0, wme = 1.0,
+ wmd = 1.0, wee = 1.0, wed = 1.0;
+ SortedMap<String, Integer> bandwidthWeights =
+ consensus.getBandwidthWeights();
+ if (bandwidthWeights != null) {
+ SortedSet<String> missingWeightKeys = new TreeSet<String>(
+ Arrays.asList("Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(",")));
+ missingWeightKeys.removeAll(bandwidthWeights.keySet());
+ if (missingWeightKeys.isEmpty()) {
+ wgg = ((double) bandwidthWeights.get("Wgg")) / 10000.0;
+ wgd = ((double) bandwidthWeights.get("Wgd")) / 10000.0;
+ wmg = ((double) bandwidthWeights.get("Wmg")) / 10000.0;
+ wmm = ((double) bandwidthWeights.get("Wmm")) / 10000.0;
+ wme = ((double) bandwidthWeights.get("Wme")) / 10000.0;
+ wmd = ((double) bandwidthWeights.get("Wmd")) / 10000.0;
+ wee = ((double) bandwidthWeights.get("Wee")) / 10000.0;
+ wed = ((double) bandwidthWeights.get("Wed")) / 10000.0;
+ }
+ }
+ SortedMap<String, Double>
+ advertisedBandwidths = new TreeMap<String, Double>(),
+ consensusWeights = new TreeMap<String, Double>(),
+ guardWeights = new TreeMap<String, Double>(),
+ middleWeights = new TreeMap<String, Double>(),
+ exitWeights = new TreeMap<String, Double>();
+ double totalAdvertisedBandwidth = 0.0;
+ double totalConsensusWeight = 0.0;
+ double totalGuardWeight = 0.0;
+ double totalMiddleWeight = 0.0;
+ double totalExitWeight = 0.0;
+ for (NetworkStatusEntry relay :
+ consensus.getStatusEntries().values()) {
+ String fingerprint = relay.getFingerprint();
+ if (!relay.getFlags().contains("Running")) {
+ continue;
+ }
+ boolean isExit = relay.getFlags().contains("Exit") &&
+ !relay.getFlags().contains("BadExit");
+ boolean isGuard = relay.getFlags().contains("Guard");
+ String serverDescriptorDigest = relay.getDescriptor().
+ toUpperCase();
+ double advertisedBandwidth = 0.0;
+ if (!this.advertisedBandwidths.containsKey(
+ serverDescriptorDigest)) {
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus != null) {
+ if (!this.descriptorDigestsByFingerprint.containsKey(
+ fingerprint)) {
+ this.descriptorDigestsByFingerprint.put(fingerprint,
+ new HashSet<String>());
+ }
+ this.descriptorDigestsByFingerprint.get(fingerprint).addAll(
+ weightsStatus.advertisedBandwidths.keySet());
+ this.advertisedBandwidths.putAll(
+ weightsStatus.advertisedBandwidths);
+ }
+ }
+ if (this.advertisedBandwidths.containsKey(
+ serverDescriptorDigest)) {
+ advertisedBandwidth = (double) this.advertisedBandwidths.get(
+ serverDescriptorDigest);
+ }
+ double consensusWeight = (double) relay.getBandwidth();
+ double guardWeight = (double) relay.getBandwidth();
+ double middleWeight = (double) relay.getBandwidth();
+ double exitWeight = (double) relay.getBandwidth();
+ if (isGuard && isExit) {
+ guardWeight *= wgd;
+ middleWeight *= wmd;
+ exitWeight *= wed;
+ } else if (isGuard) {
+ guardWeight *= wgg;
+ middleWeight *= wmg;
+ exitWeight = 0.0;
+ } else if (isExit) {
+ guardWeight = 0.0;
+ middleWeight *= wme;
+ exitWeight *= wee;
+ } else {
+ guardWeight = 0.0;
+ middleWeight *= wmm;
+ exitWeight = 0.0;
+ }
+ advertisedBandwidths.put(fingerprint, advertisedBandwidth);
+ consensusWeights.put(fingerprint, consensusWeight);
+ guardWeights.put(fingerprint, guardWeight);
+ middleWeights.put(fingerprint, middleWeight);
+ exitWeights.put(fingerprint, exitWeight);
+ totalAdvertisedBandwidth += advertisedBandwidth;
+ totalConsensusWeight += consensusWeight;
+ totalGuardWeight += guardWeight;
+ totalMiddleWeight += middleWeight;
+ totalExitWeight += exitWeight;
+ }
+ SortedMap<String, double[]> pathSelectionProbabilities =
+ new TreeMap<String, double[]>();
+ for (NetworkStatusEntry relay :
+ consensus.getStatusEntries().values()) {
+ String fingerprint = relay.getFingerprint();
+ double[] probabilities = new double[] {
+ advertisedBandwidths.get(fingerprint)
+ / totalAdvertisedBandwidth,
+ consensusWeights.get(fingerprint) / totalConsensusWeight,
+ guardWeights.get(fingerprint) / totalGuardWeight,
+ middleWeights.get(fingerprint) / totalMiddleWeight,
+ exitWeights.get(fingerprint) / totalExitWeight };
+ pathSelectionProbabilities.put(fingerprint, probabilities);
+ }
+ return pathSelectionProbabilities;
+ }
+
+ private void addToHistory(String fingerprint, long validAfterMillis,
+ long freshUntilMillis, double[] weights) {
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus == null) {
+ weightsStatus = new WeightsStatus();
+ }
+ SortedMap<long[], double[]> history = weightsStatus.history;
+ long[] interval = new long[] { validAfterMillis, freshUntilMillis };
+ if ((history.headMap(interval).isEmpty() ||
+ history.headMap(interval).lastKey()[1] <= validAfterMillis) &&
+ (history.tailMap(interval).isEmpty() ||
+ history.tailMap(interval).firstKey()[0] >= freshUntilMillis)) {
+ history.put(interval, weights);
+ this.compressHistory(weightsStatus);
+ this.addAdvertisedBandwidths(weightsStatus, fingerprint);
+ this.documentStore.store(weightsStatus, fingerprint);
+ this.updateWeightsStatuses.remove(fingerprint);
+ }
+ }
+
+ private void addAdvertisedBandwidths(WeightsStatus weightsStatus,
+ String fingerprint) {
+ if (this.descriptorDigestsByFingerprint.containsKey(fingerprint)) {
+ for (String descriptorDigest :
+ this.descriptorDigestsByFingerprint.get(fingerprint)) {
+ if (this.advertisedBandwidths.containsKey(descriptorDigest)) {
+ int advertisedBandwidth =
+ this.advertisedBandwidths.get(descriptorDigest);
+ weightsStatus.advertisedBandwidths.put(descriptorDigest,
+ advertisedBandwidth);
+ }
+ }
+ }
+ }
+
+ private void compressHistory(WeightsStatus weightsStatus) {
+ SortedMap<long[], double[]> history = weightsStatus.history;
+ SortedMap<long[], double[]> compressedHistory =
+ new TreeMap<long[], double[]>(history.comparator());
+ long lastStartMillis = 0L, lastEndMillis = 0L;
+ double[] lastWeights = null;
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String lastMonthString = "1970-01";
+ for (Map.Entry<long[], double[]> e : history.entrySet()) {
+ long startMillis = e.getKey()[0], endMillis = e.getKey()[1];
+ double[] weights = e.getValue();
+ long intervalLengthMillis;
+ if (this.now - endMillis <= 7L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 60L * 60L * 1000L;
+ } else if (this.now - endMillis <= 31L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 4L * 60L * 60L * 1000L;
+ } else if (this.now - endMillis <= 92L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 12L * 60L * 60L * 1000L;
+ } else if (this.now - endMillis <= 366L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 2L * 24L * 60L * 60L * 1000L;
+ } else {
+ intervalLengthMillis = 10L * 24L * 60L * 60L * 1000L;
+ }
+ String monthString = dateTimeFormat.format(startMillis);
+ if (lastEndMillis == startMillis &&
+ ((lastEndMillis - 1L) / intervalLengthMillis) ==
+ ((endMillis - 1L) / intervalLengthMillis) &&
+ lastMonthString.equals(monthString)) {
+ double lastIntervalInHours = (double) ((lastEndMillis
+ - lastStartMillis) / 60L * 60L * 1000L);
+ double currentIntervalInHours = (double) ((endMillis
+ - startMillis) / 60L * 60L * 1000L);
+ double newIntervalInHours = (double) ((endMillis
+ - lastStartMillis) / 60L * 60L * 1000L);
+ for (int i = 0; i < lastWeights.length; i++) {
+ lastWeights[i] *= lastIntervalInHours;
+ lastWeights[i] += weights[i] * currentIntervalInHours;
+ lastWeights[i] /= newIntervalInHours;
+ }
+ lastEndMillis = endMillis;
+ } else {
+ if (lastStartMillis > 0L) {
+ compressedHistory.put(new long[] { lastStartMillis,
+ lastEndMillis }, lastWeights);
+ }
+ lastStartMillis = startMillis;
+ lastEndMillis = endMillis;
+ lastWeights = weights;
+ }
+ lastMonthString = monthString;
+ }
+ if (lastStartMillis > 0L) {
+ compressedHistory.put(new long[] { lastStartMillis, lastEndMillis },
+ lastWeights);
+ }
+ weightsStatus.history = compressedHistory;
+ }
+
+ private void updateWeightsStatuses() {
+ for (String fingerprint : this.updateWeightsStatuses) {
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus == null) {
+ weightsStatus = new WeightsStatus();
+ }
+ this.addAdvertisedBandwidths(weightsStatus, fingerprint);
+ this.documentStore.store(weightsStatus, fingerprint);
+ }
+ }
+
+ public String getStatsString() {
+ /* TODO Add statistics string. */
+ return null;
+ }
+}
+
1
0

11 Apr '14
commit 864c148564a6ec3be0d979b126b8ca88f01c4912
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Apr 8 21:13:02 2014 +0200
Split clients data writer into two classes.
---
src/org/torproject/onionoo/ClientsDataWriter.java | 486 --------------------
.../torproject/onionoo/ClientsDocumentWriter.java | 302 ++++++++++++
.../torproject/onionoo/ClientsStatusUpdater.java | 230 +++++++++
src/org/torproject/onionoo/Main.java | 10 +-
4 files changed, 538 insertions(+), 490 deletions(-)
diff --git a/src/org/torproject/onionoo/ClientsDataWriter.java b/src/org/torproject/onionoo/ClientsDataWriter.java
deleted file mode 100644
index 7662f54..0000000
--- a/src/org/torproject/onionoo/ClientsDataWriter.java
+++ /dev/null
@@ -1,486 +0,0 @@
-/* Copyright 2014 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.onionoo;
-
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.ExtraInfoDescriptor;
-
-/*
- * Example extra-info descriptor used as input:
- *
- * extra-info ndnop2 DE6397A047ABE5F78B4C87AF725047831B221AAB
- * dirreq-stats-end 2014-02-16 16:42:11 (86400 s)
- * dirreq-v3-resp ok=856,not-enough-sigs=0,unavailable=0,not-found=0,
- * not-modified=40,busy=0
- * bridge-stats-end 2014-02-16 16:42:17 (86400 s)
- * bridge-ips ??=8,in=8,se=8
- * bridge-ip-versions v4=8,v6=0
- *
- * Clients status file produced as intermediate output:
- *
- * 2014-02-15 16:42:11 2014-02-16 00:00:00
- * 259.042 in=86.347,se=86.347 v4=259.042
- * 2014-02-16 00:00:00 2014-02-16 16:42:11
- * 592.958 in=197.653,se=197.653 v4=592.958
- *
- * Clients document file produced as output:
- *
- * "1_month":{
- * "first":"2014-02-03 12:00:00",
- * "last":"2014-02-28 12:00:00",
- * "interval":86400,
- * "factor":0.139049349,
- * "count":26,
- * "values":[371,354,349,374,432,null,485,458,493,536,null,null,524,576,
- * 607,622,null,635,null,566,774,999,945,690,656,681],
- * "countries":{"cn":0.0192,"in":0.1768,"ir":0.2487,"ru":0.0104,
- * "se":0.1698,"sy":0.0325,"us":0.0406},
- * "transports":{"obfs2":0.4581},
- * "versions":{"v4":1.0000}}
- */
-public class ClientsDataWriter implements DescriptorListener,
- StatusUpdater, FingerprintListener, DocumentWriter {
-
- private DescriptorSource descriptorSource;
-
- private DocumentStore documentStore;
-
- private long now;
-
- public ClientsDataWriter(DescriptorSource descriptorSource,
- DocumentStore documentStore, Time time) {
- this.descriptorSource = descriptorSource;
- this.documentStore = documentStore;
- this.now = time.currentTimeMillis();
- this.registerDescriptorListeners();
- this.registerFingerprintListeners();
- }
-
- private void registerDescriptorListeners() {
- this.descriptorSource.registerDescriptorListener(this,
- DescriptorType.BRIDGE_EXTRA_INFOS);
- }
-
- private void registerFingerprintListeners() {
- this.descriptorSource.registerFingerprintListener(this,
- DescriptorType.BRIDGE_EXTRA_INFOS);
- }
-
- public void processDescriptor(Descriptor descriptor, boolean relay) {
- if (descriptor instanceof ExtraInfoDescriptor && !relay) {
- this.processBridgeExtraInfoDescriptor(
- (ExtraInfoDescriptor) descriptor);
- }
- }
-
- private static final long ONE_HOUR_MILLIS = 60L * 60L * 1000L,
- ONE_DAY_MILLIS = 24L * ONE_HOUR_MILLIS;
-
- private SortedMap<String, SortedSet<ClientsHistory>> newResponses =
- new TreeMap<String, SortedSet<ClientsHistory>>();
-
- private void processBridgeExtraInfoDescriptor(
- ExtraInfoDescriptor descriptor) {
- long dirreqStatsEndMillis = descriptor.getDirreqStatsEndMillis();
- long dirreqStatsIntervalLengthMillis =
- descriptor.getDirreqStatsIntervalLength() * 1000L;
- SortedMap<String, Integer> responses = descriptor.getDirreqV3Resp();
- if (dirreqStatsEndMillis < 0L ||
- dirreqStatsIntervalLengthMillis != ONE_DAY_MILLIS ||
- responses == null || !responses.containsKey("ok")) {
- return;
- }
- double okResponses = (double) (responses.get("ok") - 4);
- if (okResponses < 0.0) {
- return;
- }
- String hashedFingerprint = descriptor.getFingerprint().toUpperCase();
- long dirreqStatsStartMillis = dirreqStatsEndMillis
- - dirreqStatsIntervalLengthMillis;
- long utcBreakMillis = (dirreqStatsEndMillis / ONE_DAY_MILLIS)
- * ONE_DAY_MILLIS;
- for (int i = 0; i < 2; i++) {
- long startMillis = i == 0 ? dirreqStatsStartMillis : utcBreakMillis;
- long endMillis = i == 0 ? utcBreakMillis : dirreqStatsEndMillis;
- if (startMillis >= endMillis) {
- continue;
- }
- double totalResponses = okResponses
- * ((double) (endMillis - startMillis))
- / ((double) ONE_DAY_MILLIS);
- SortedMap<String, Double> responsesByCountry =
- this.weightResponsesWithUniqueIps(totalResponses,
- descriptor.getBridgeIps(), "??");
- SortedMap<String, Double> responsesByTransport =
- this.weightResponsesWithUniqueIps(totalResponses,
- descriptor.getBridgeIpTransports(), "<??>");
- SortedMap<String, Double> responsesByVersion =
- this.weightResponsesWithUniqueIps(totalResponses,
- descriptor.getBridgeIpVersions(), "");
- ClientsHistory newResponseHistory = new ClientsHistory(
- startMillis, endMillis, totalResponses, responsesByCountry,
- responsesByTransport, responsesByVersion);
- if (!this.newResponses.containsKey(hashedFingerprint)) {
- this.newResponses.put(hashedFingerprint,
- new TreeSet<ClientsHistory>());
- }
- this.newResponses.get(hashedFingerprint).add(
- newResponseHistory);
- }
- }
-
- private SortedMap<String, Double> weightResponsesWithUniqueIps(
- double totalResponses, SortedMap<String, Integer> uniqueIps,
- String omitString) {
- SortedMap<String, Double> weightedResponses =
- new TreeMap<String, Double>();
- int totalUniqueIps = 0;
- if (uniqueIps != null) {
- for (Map.Entry<String, Integer> e : uniqueIps.entrySet()) {
- if (e.getValue() > 4) {
- totalUniqueIps += e.getValue() - 4;
- }
- }
- }
- if (totalUniqueIps > 0) {
- for (Map.Entry<String, Integer> e : uniqueIps.entrySet()) {
- if (!e.getKey().equals(omitString) && e.getValue() > 4) {
- weightedResponses.put(e.getKey(),
- (((double) (e.getValue() - 4)) * totalResponses)
- / ((double) totalUniqueIps));
- }
- }
- }
- return weightedResponses;
- }
-
- public void updateStatuses() {
- for (Map.Entry<String, SortedSet<ClientsHistory>> e :
- this.newResponses.entrySet()) {
- String hashedFingerprint = e.getKey();
- ClientsStatus clientsStatus = this.documentStore.retrieve(
- ClientsStatus.class, true, hashedFingerprint);
- if (clientsStatus == null) {
- clientsStatus = new ClientsStatus();
- }
- this.addToHistory(clientsStatus, e.getValue());
- this.compressHistory(clientsStatus);
- this.documentStore.store(clientsStatus, hashedFingerprint);
- }
- Logger.printStatusTime("Updated clients status files");
- }
-
- private void addToHistory(ClientsStatus clientsStatus,
- SortedSet<ClientsHistory> newIntervals) {
- SortedSet<ClientsHistory> history = clientsStatus.history;
- for (ClientsHistory interval : newIntervals) {
- if ((history.headSet(interval).isEmpty() ||
- history.headSet(interval).last().endMillis <=
- interval.startMillis) &&
- (history.tailSet(interval).isEmpty() ||
- history.tailSet(interval).first().startMillis >=
- interval.endMillis)) {
- history.add(interval);
- }
- }
- }
-
- private void compressHistory(ClientsStatus clientsStatus) {
- SortedSet<ClientsHistory> history = clientsStatus.history;
- SortedSet<ClientsHistory> compressedHistory =
- new TreeSet<ClientsHistory>();
- ClientsHistory lastResponses = null;
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String lastMonthString = "1970-01";
- for (ClientsHistory responses : history) {
- long intervalLengthMillis;
- if (this.now - responses.endMillis <=
- 92L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 24L * 60L * 60L * 1000L;
- } else if (this.now - responses.endMillis <=
- 366L * 24L * 60L * 60L * 1000L) {
- intervalLengthMillis = 2L * 24L * 60L * 60L * 1000L;
- } else {
- intervalLengthMillis = 10L * 24L * 60L * 60L * 1000L;
- }
- String monthString = dateTimeFormat.format(responses.startMillis);
- if (lastResponses != null &&
- lastResponses.endMillis == responses.startMillis &&
- ((lastResponses.endMillis - 1L) / intervalLengthMillis) ==
- ((responses.endMillis - 1L) / intervalLengthMillis) &&
- lastMonthString.equals(monthString)) {
- lastResponses.addResponses(responses);
- } else {
- if (lastResponses != null) {
- compressedHistory.add(lastResponses);
- }
- lastResponses = responses;
- }
- lastMonthString = monthString;
- }
- if (lastResponses != null) {
- compressedHistory.add(lastResponses);
- }
- clientsStatus.history = compressedHistory;
- }
-
- public void processFingerprints(SortedSet<String> fingerprints,
- boolean relay) {
- if (!relay) {
- this.updateDocuments.addAll(fingerprints);
- }
- }
-
- private SortedSet<String> updateDocuments = new TreeSet<String>();
-
- public void writeDocuments() {
- for (String hashedFingerprint : this.updateDocuments) {
- ClientsStatus clientsStatus = this.documentStore.retrieve(
- ClientsStatus.class, true, hashedFingerprint);
- if (clientsStatus == null) {
- continue;
- }
- SortedSet<ClientsHistory> history = clientsStatus.history;
- ClientsDocument clientsDocument = new ClientsDocument();
- clientsDocument.documentString = this.formatHistoryString(
- hashedFingerprint, history);
- this.documentStore.store(clientsDocument, hashedFingerprint);
- }
- Logger.printStatusTime("Wrote clients document files");
- }
-
- private String[] graphNames = new String[] {
- "1_week",
- "1_month",
- "3_months",
- "1_year",
- "5_years" };
-
- private long[] graphIntervals = new long[] {
- 7L * 24L * 60L * 60L * 1000L,
- 31L * 24L * 60L * 60L * 1000L,
- 92L * 24L * 60L * 60L * 1000L,
- 366L * 24L * 60L * 60L * 1000L,
- 5L * 366L * 24L * 60L * 60L * 1000L };
-
- private long[] dataPointIntervals = new long[] {
- 24L * 60L * 60L * 1000L,
- 24L * 60L * 60L * 1000L,
- 24L * 60L * 60L * 1000L,
- 2L * 24L * 60L * 60L * 1000L,
- 10L * 24L * 60L * 60L * 1000L };
-
- private String formatHistoryString(String hashedFingerprint,
- SortedSet<ClientsHistory> history) {
- StringBuilder sb = new StringBuilder();
- sb.append("{\"fingerprint\":\"" + hashedFingerprint + "\"");
- sb.append(",\n\"average_clients\":{");
- int graphIntervalsWritten = 0;
- for (int graphIntervalIndex = 0; graphIntervalIndex <
- this.graphIntervals.length; graphIntervalIndex++) {
- String timeline = this.formatTimeline(graphIntervalIndex, history);
- if (timeline != null) {
- sb.append((graphIntervalsWritten++ > 0 ? "," : "") + "\n"
- + timeline);
- }
- }
- sb.append("}");
- sb.append("\n}\n");
- return sb.toString();
- }
-
- private String formatTimeline(int graphIntervalIndex,
- SortedSet<ClientsHistory> history) {
- String graphName = this.graphNames[graphIntervalIndex];
- long graphInterval = this.graphIntervals[graphIntervalIndex];
- long dataPointInterval =
- this.dataPointIntervals[graphIntervalIndex];
- List<Double> dataPoints = new ArrayList<Double>();
- long intervalStartMillis = ((this.now - graphInterval)
- / dataPointInterval) * dataPointInterval;
- long millis = 0L;
- double responses = 0.0, totalResponses = 0.0;
- SortedMap<String, Double>
- totalResponsesByCountry = new TreeMap<String, Double>(),
- totalResponsesByTransport = new TreeMap<String, Double>(),
- totalResponsesByVersion = new TreeMap<String, Double>();
- for (ClientsHistory hist : history) {
- if (hist.endMillis < intervalStartMillis) {
- continue;
- }
- while ((intervalStartMillis / dataPointInterval) !=
- (hist.endMillis / dataPointInterval)) {
- dataPoints.add(millis * 2L < dataPointInterval
- ? -1.0 : responses * ((double) ONE_DAY_MILLIS)
- / (((double) millis) * 10.0));
- responses = 0.0;
- millis = 0L;
- intervalStartMillis += dataPointInterval;
- }
- responses += hist.totalResponses;
- totalResponses += hist.totalResponses;
- for (Map.Entry<String, Double> e :
- hist.responsesByCountry.entrySet()) {
- if (!totalResponsesByCountry.containsKey(e.getKey())) {
- totalResponsesByCountry.put(e.getKey(), 0.0);
- }
- totalResponsesByCountry.put(e.getKey(), e.getValue()
- + totalResponsesByCountry.get(e.getKey()));
- }
- for (Map.Entry<String, Double> e :
- hist.responsesByTransport.entrySet()) {
- if (!totalResponsesByTransport.containsKey(e.getKey())) {
- totalResponsesByTransport.put(e.getKey(), 0.0);
- }
- totalResponsesByTransport.put(e.getKey(), e.getValue()
- + totalResponsesByTransport.get(e.getKey()));
- }
- for (Map.Entry<String, Double> e :
- hist.responsesByVersion.entrySet()) {
- if (!totalResponsesByVersion.containsKey(e.getKey())) {
- totalResponsesByVersion.put(e.getKey(), 0.0);
- }
- totalResponsesByVersion.put(e.getKey(), e.getValue()
- + totalResponsesByVersion.get(e.getKey()));
- }
- millis += (hist.endMillis - hist.startMillis);
- }
- dataPoints.add(millis * 2L < dataPointInterval
- ? -1.0 : responses * ((double) ONE_DAY_MILLIS)
- / (((double) millis) * 10.0));
- double maxValue = 0.0;
- int firstNonNullIndex = -1, lastNonNullIndex = -1;
- for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
- dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (firstNonNullIndex < 0) {
- firstNonNullIndex = dataPointIndex;
- }
- lastNonNullIndex = dataPointIndex;
- if (dataPoint > maxValue) {
- maxValue = dataPoint;
- }
- }
- }
- if (firstNonNullIndex < 0) {
- return null;
- }
- long firstDataPointMillis = (((this.now - graphInterval)
- / dataPointInterval) + firstNonNullIndex) * dataPointInterval
- + dataPointInterval / 2L;
- if (graphIntervalIndex > 0 && firstDataPointMillis >=
- this.now - graphIntervals[graphIntervalIndex - 1]) {
- /* Skip clients history object, because it doesn't contain
- * anything new that wasn't already contained in the last
- * clients history object(s). */
- return null;
- }
- long lastDataPointMillis = firstDataPointMillis
- + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
- double factor = ((double) maxValue) / 999.0;
- int count = lastNonNullIndex - firstNonNullIndex + 1;
- StringBuilder sb = new StringBuilder();
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- sb.append("\"" + graphName + "\":{"
- + "\"first\":\"" + dateTimeFormat.format(firstDataPointMillis)
- + "\",\"last\":\"" + dateTimeFormat.format(lastDataPointMillis)
- + "\",\"interval\":" + String.valueOf(dataPointInterval / 1000L)
- + ",\"factor\":" + String.format(Locale.US, "%.9f", factor)
- + ",\"count\":" + String.valueOf(count) + ",\"values\":[");
- int dataPointsWritten = 0, previousNonNullIndex = -2;
- boolean foundTwoAdjacentDataPoints = false;
- for (int dataPointIndex = firstNonNullIndex; dataPointIndex <=
- lastNonNullIndex; dataPointIndex++) {
- double dataPoint = dataPoints.get(dataPointIndex);
- if (dataPoint >= 0.0) {
- if (dataPointIndex - previousNonNullIndex == 1) {
- foundTwoAdjacentDataPoints = true;
- }
- previousNonNullIndex = dataPointIndex;
- }
- sb.append((dataPointsWritten++ > 0 ? "," : "")
- + (dataPoint < 0.0 ? "null" :
- String.valueOf((long) ((dataPoint * 999.0) / maxValue))));
- }
- sb.append("]");
- if (!totalResponsesByCountry.isEmpty()) {
- sb.append(",\"countries\":{");
- int written = 0;
- for (Map.Entry<String, Double> e :
- totalResponsesByCountry.entrySet()) {
- if (e.getValue() > totalResponses / 100.0) {
- sb.append((written++ > 0 ? "," : "") + "\"" + e.getKey()
- + "\":" + String.format(Locale.US, "%.4f",
- e.getValue() / totalResponses));
- }
- }
- sb.append("}");
- }
- if (!totalResponsesByTransport.isEmpty()) {
- sb.append(",\"transports\":{");
- int written = 0;
- for (Map.Entry<String, Double> e :
- totalResponsesByTransport.entrySet()) {
- if (e.getValue() > totalResponses / 100.0) {
- sb.append((written++ > 0 ? "," : "") + "\"" + e.getKey()
- + "\":" + String.format(Locale.US, "%.4f",
- e.getValue() / totalResponses));
- }
- }
- sb.append("}");
- }
- if (!totalResponsesByVersion.isEmpty()) {
- sb.append(",\"versions\":{");
- int written = 0;
- for (Map.Entry<String, Double> e :
- totalResponsesByVersion.entrySet()) {
- if (e.getValue() > totalResponses / 100.0) {
- sb.append((written++ > 0 ? "," : "") + "\"" + e.getKey()
- + "\":" + String.format(Locale.US, "%.4f",
- e.getValue() / totalResponses));
- }
- }
- sb.append("}");
- }
- sb.append("}");
- if (foundTwoAdjacentDataPoints) {
- return sb.toString();
- } else {
- return null;
- }
- }
-
- public String getStatsString() {
- int newIntervals = 0;
- for (SortedSet<ClientsHistory> hist : this.newResponses.values()) {
- newIntervals += hist.size();
- }
- StringBuilder sb = new StringBuilder();
- sb.append(" "
- + Logger.formatDecimalNumber(newIntervals / 2)
- + " client statistics processed from extra-info descriptors\n");
- sb.append(" "
- + Logger.formatDecimalNumber(this.newResponses.size())
- + " client status files updated\n");
- sb.append(" "
- + Logger.formatDecimalNumber(this.updateDocuments.size())
- + " client document files updated\n");
- return sb.toString();
- }
-}
-
diff --git a/src/org/torproject/onionoo/ClientsDocumentWriter.java b/src/org/torproject/onionoo/ClientsDocumentWriter.java
new file mode 100644
index 0000000..9d3b8dc
--- /dev/null
+++ b/src/org/torproject/onionoo/ClientsDocumentWriter.java
@@ -0,0 +1,302 @@
+/* Copyright 2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+/*
+ * Clients status file produced as intermediate output:
+ *
+ * 2014-02-15 16:42:11 2014-02-16 00:00:00
+ * 259.042 in=86.347,se=86.347 v4=259.042
+ * 2014-02-16 00:00:00 2014-02-16 16:42:11
+ * 592.958 in=197.653,se=197.653 v4=592.958
+ *
+ * Clients document file produced as output:
+ *
+ * "1_month":{
+ * "first":"2014-02-03 12:00:00",
+ * "last":"2014-02-28 12:00:00",
+ * "interval":86400,
+ * "factor":0.139049349,
+ * "count":26,
+ * "values":[371,354,349,374,432,null,485,458,493,536,null,null,524,576,
+ * 607,622,null,635,null,566,774,999,945,690,656,681],
+ * "countries":{"cn":0.0192,"in":0.1768,"ir":0.2487,"ru":0.0104,
+ * "se":0.1698,"sy":0.0325,"us":0.0406},
+ * "transports":{"obfs2":0.4581},
+ * "versions":{"v4":1.0000}}
+ */
+public class ClientsDocumentWriter implements FingerprintListener,
+ DocumentWriter {
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ public ClientsDocumentWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore, Time time) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.registerFingerprintListeners();
+ }
+
+ private void registerFingerprintListeners() {
+ this.descriptorSource.registerFingerprintListener(this,
+ DescriptorType.BRIDGE_EXTRA_INFOS);
+ }
+
+ private SortedSet<String> updateDocuments = new TreeSet<String>();
+
+ public void processFingerprints(SortedSet<String> fingerprints,
+ boolean relay) {
+ if (!relay) {
+ this.updateDocuments.addAll(fingerprints);
+ }
+ }
+
+ private int writtenDocuments = 0;
+
+ public void writeDocuments() {
+ for (String hashedFingerprint : this.updateDocuments) {
+ ClientsStatus clientsStatus = this.documentStore.retrieve(
+ ClientsStatus.class, true, hashedFingerprint);
+ if (clientsStatus == null) {
+ continue;
+ }
+ SortedSet<ClientsHistory> history = clientsStatus.history;
+ ClientsDocument clientsDocument = new ClientsDocument();
+ clientsDocument.documentString = this.formatHistoryString(
+ hashedFingerprint, history);
+ this.documentStore.store(clientsDocument, hashedFingerprint);
+ this.writtenDocuments++;
+ }
+ Logger.printStatusTime("Wrote clients document files");
+ }
+
+ private String[] graphNames = new String[] {
+ "1_week",
+ "1_month",
+ "3_months",
+ "1_year",
+ "5_years" };
+
+ private long[] graphIntervals = new long[] {
+ 7L * 24L * 60L * 60L * 1000L,
+ 31L * 24L * 60L * 60L * 1000L,
+ 92L * 24L * 60L * 60L * 1000L,
+ 366L * 24L * 60L * 60L * 1000L,
+ 5L * 366L * 24L * 60L * 60L * 1000L };
+
+ private long[] dataPointIntervals = new long[] {
+ 24L * 60L * 60L * 1000L,
+ 24L * 60L * 60L * 1000L,
+ 24L * 60L * 60L * 1000L,
+ 2L * 24L * 60L * 60L * 1000L,
+ 10L * 24L * 60L * 60L * 1000L };
+
+ private String formatHistoryString(String hashedFingerprint,
+ SortedSet<ClientsHistory> history) {
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"fingerprint\":\"" + hashedFingerprint + "\"");
+ sb.append(",\n\"average_clients\":{");
+ int graphIntervalsWritten = 0;
+ for (int graphIntervalIndex = 0; graphIntervalIndex <
+ this.graphIntervals.length; graphIntervalIndex++) {
+ String timeline = this.formatTimeline(graphIntervalIndex, history);
+ if (timeline != null) {
+ sb.append((graphIntervalsWritten++ > 0 ? "," : "") + "\n"
+ + timeline);
+ }
+ }
+ sb.append("}");
+ sb.append("\n}\n");
+ return sb.toString();
+ }
+
+ private static final long ONE_HOUR_MILLIS = 60L * 60L * 1000L,
+ ONE_DAY_MILLIS = 24L * ONE_HOUR_MILLIS;
+
+ private String formatTimeline(int graphIntervalIndex,
+ SortedSet<ClientsHistory> history) {
+ String graphName = this.graphNames[graphIntervalIndex];
+ long graphInterval = this.graphIntervals[graphIntervalIndex];
+ long dataPointInterval =
+ this.dataPointIntervals[graphIntervalIndex];
+ List<Double> dataPoints = new ArrayList<Double>();
+ long intervalStartMillis = ((this.now - graphInterval)
+ / dataPointInterval) * dataPointInterval;
+ long millis = 0L;
+ double responses = 0.0, totalResponses = 0.0;
+ SortedMap<String, Double>
+ totalResponsesByCountry = new TreeMap<String, Double>(),
+ totalResponsesByTransport = new TreeMap<String, Double>(),
+ totalResponsesByVersion = new TreeMap<String, Double>();
+ for (ClientsHistory hist : history) {
+ if (hist.endMillis < intervalStartMillis) {
+ continue;
+ }
+ while ((intervalStartMillis / dataPointInterval) !=
+ (hist.endMillis / dataPointInterval)) {
+ dataPoints.add(millis * 2L < dataPointInterval
+ ? -1.0 : responses * ((double) ONE_DAY_MILLIS)
+ / (((double) millis) * 10.0));
+ responses = 0.0;
+ millis = 0L;
+ intervalStartMillis += dataPointInterval;
+ }
+ responses += hist.totalResponses;
+ totalResponses += hist.totalResponses;
+ for (Map.Entry<String, Double> e :
+ hist.responsesByCountry.entrySet()) {
+ if (!totalResponsesByCountry.containsKey(e.getKey())) {
+ totalResponsesByCountry.put(e.getKey(), 0.0);
+ }
+ totalResponsesByCountry.put(e.getKey(), e.getValue()
+ + totalResponsesByCountry.get(e.getKey()));
+ }
+ for (Map.Entry<String, Double> e :
+ hist.responsesByTransport.entrySet()) {
+ if (!totalResponsesByTransport.containsKey(e.getKey())) {
+ totalResponsesByTransport.put(e.getKey(), 0.0);
+ }
+ totalResponsesByTransport.put(e.getKey(), e.getValue()
+ + totalResponsesByTransport.get(e.getKey()));
+ }
+ for (Map.Entry<String, Double> e :
+ hist.responsesByVersion.entrySet()) {
+ if (!totalResponsesByVersion.containsKey(e.getKey())) {
+ totalResponsesByVersion.put(e.getKey(), 0.0);
+ }
+ totalResponsesByVersion.put(e.getKey(), e.getValue()
+ + totalResponsesByVersion.get(e.getKey()));
+ }
+ millis += (hist.endMillis - hist.startMillis);
+ }
+ dataPoints.add(millis * 2L < dataPointInterval
+ ? -1.0 : responses * ((double) ONE_DAY_MILLIS)
+ / (((double) millis) * 10.0));
+ double maxValue = 0.0;
+ int firstNonNullIndex = -1, lastNonNullIndex = -1;
+ for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
+ dataPointIndex++) {
+ double dataPoint = dataPoints.get(dataPointIndex);
+ if (dataPoint >= 0.0) {
+ if (firstNonNullIndex < 0) {
+ firstNonNullIndex = dataPointIndex;
+ }
+ lastNonNullIndex = dataPointIndex;
+ if (dataPoint > maxValue) {
+ maxValue = dataPoint;
+ }
+ }
+ }
+ if (firstNonNullIndex < 0) {
+ return null;
+ }
+ long firstDataPointMillis = (((this.now - graphInterval)
+ / dataPointInterval) + firstNonNullIndex) * dataPointInterval
+ + dataPointInterval / 2L;
+ if (graphIntervalIndex > 0 && firstDataPointMillis >=
+ this.now - graphIntervals[graphIntervalIndex - 1]) {
+ /* Skip clients history object, because it doesn't contain
+ * anything new that wasn't already contained in the last
+ * clients history object(s). */
+ return null;
+ }
+ long lastDataPointMillis = firstDataPointMillis
+ + (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
+ double factor = ((double) maxValue) / 999.0;
+ int count = lastNonNullIndex - firstNonNullIndex + 1;
+ StringBuilder sb = new StringBuilder();
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ sb.append("\"" + graphName + "\":{"
+ + "\"first\":\"" + dateTimeFormat.format(firstDataPointMillis)
+ + "\",\"last\":\"" + dateTimeFormat.format(lastDataPointMillis)
+ + "\",\"interval\":" + String.valueOf(dataPointInterval / 1000L)
+ + ",\"factor\":" + String.format(Locale.US, "%.9f", factor)
+ + ",\"count\":" + String.valueOf(count) + ",\"values\":[");
+ int dataPointsWritten = 0, previousNonNullIndex = -2;
+ boolean foundTwoAdjacentDataPoints = false;
+ for (int dataPointIndex = firstNonNullIndex; dataPointIndex <=
+ lastNonNullIndex; dataPointIndex++) {
+ double dataPoint = dataPoints.get(dataPointIndex);
+ if (dataPoint >= 0.0) {
+ if (dataPointIndex - previousNonNullIndex == 1) {
+ foundTwoAdjacentDataPoints = true;
+ }
+ previousNonNullIndex = dataPointIndex;
+ }
+ sb.append((dataPointsWritten++ > 0 ? "," : "")
+ + (dataPoint < 0.0 ? "null" :
+ String.valueOf((long) ((dataPoint * 999.0) / maxValue))));
+ }
+ sb.append("]");
+ if (!totalResponsesByCountry.isEmpty()) {
+ sb.append(",\"countries\":{");
+ int written = 0;
+ for (Map.Entry<String, Double> e :
+ totalResponsesByCountry.entrySet()) {
+ if (e.getValue() > totalResponses / 100.0) {
+ sb.append((written++ > 0 ? "," : "") + "\"" + e.getKey()
+ + "\":" + String.format(Locale.US, "%.4f",
+ e.getValue() / totalResponses));
+ }
+ }
+ sb.append("}");
+ }
+ if (!totalResponsesByTransport.isEmpty()) {
+ sb.append(",\"transports\":{");
+ int written = 0;
+ for (Map.Entry<String, Double> e :
+ totalResponsesByTransport.entrySet()) {
+ if (e.getValue() > totalResponses / 100.0) {
+ sb.append((written++ > 0 ? "," : "") + "\"" + e.getKey()
+ + "\":" + String.format(Locale.US, "%.4f",
+ e.getValue() / totalResponses));
+ }
+ }
+ sb.append("}");
+ }
+ if (!totalResponsesByVersion.isEmpty()) {
+ sb.append(",\"versions\":{");
+ int written = 0;
+ for (Map.Entry<String, Double> e :
+ totalResponsesByVersion.entrySet()) {
+ if (e.getValue() > totalResponses / 100.0) {
+ sb.append((written++ > 0 ? "," : "") + "\"" + e.getKey()
+ + "\":" + String.format(Locale.US, "%.4f",
+ e.getValue() / totalResponses));
+ }
+ }
+ sb.append("}");
+ }
+ sb.append("}");
+ if (foundTwoAdjacentDataPoints) {
+ return sb.toString();
+ } else {
+ return null;
+ }
+ }
+
+ public String getStatsString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append(" " + Logger.formatDecimalNumber(this.writtenDocuments)
+ + " clients document files updated\n");
+ return sb.toString();
+ }
+}
diff --git a/src/org/torproject/onionoo/ClientsStatusUpdater.java b/src/org/torproject/onionoo/ClientsStatusUpdater.java
new file mode 100644
index 0000000..e15c11a
--- /dev/null
+++ b/src/org/torproject/onionoo/ClientsStatusUpdater.java
@@ -0,0 +1,230 @@
+/* Copyright 2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.SimpleDateFormat;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.ExtraInfoDescriptor;
+
+/*
+ * Example extra-info descriptor used as input:
+ *
+ * extra-info ndnop2 DE6397A047ABE5F78B4C87AF725047831B221AAB
+ * dirreq-stats-end 2014-02-16 16:42:11 (86400 s)
+ * dirreq-v3-resp ok=856,not-enough-sigs=0,unavailable=0,not-found=0,
+ * not-modified=40,busy=0
+ * bridge-stats-end 2014-02-16 16:42:17 (86400 s)
+ * bridge-ips ??=8,in=8,se=8
+ * bridge-ip-versions v4=8,v6=0
+ *
+ * Clients status file produced as intermediate output:
+ *
+ * 2014-02-15 16:42:11 2014-02-16 00:00:00
+ * 259.042 in=86.347,se=86.347 v4=259.042
+ * 2014-02-16 00:00:00 2014-02-16 16:42:11
+ * 592.958 in=197.653,se=197.653 v4=592.958
+ */
+public class ClientsStatusUpdater implements DescriptorListener,
+ StatusUpdater {
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private long now;
+
+ public ClientsStatusUpdater(DescriptorSource descriptorSource,
+ DocumentStore documentStore, Time time) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ this.now = time.currentTimeMillis();
+ this.registerDescriptorListeners();
+ }
+
+ private void registerDescriptorListeners() {
+ this.descriptorSource.registerDescriptorListener(this,
+ DescriptorType.BRIDGE_EXTRA_INFOS);
+ }
+
+ public void processDescriptor(Descriptor descriptor, boolean relay) {
+ if (descriptor instanceof ExtraInfoDescriptor && !relay) {
+ this.processBridgeExtraInfoDescriptor(
+ (ExtraInfoDescriptor) descriptor);
+ }
+ }
+
+ private static final long ONE_HOUR_MILLIS = 60L * 60L * 1000L,
+ ONE_DAY_MILLIS = 24L * ONE_HOUR_MILLIS;
+
+ private SortedMap<String, SortedSet<ClientsHistory>> newResponses =
+ new TreeMap<String, SortedSet<ClientsHistory>>();
+
+ private void processBridgeExtraInfoDescriptor(
+ ExtraInfoDescriptor descriptor) {
+ long dirreqStatsEndMillis = descriptor.getDirreqStatsEndMillis();
+ long dirreqStatsIntervalLengthMillis =
+ descriptor.getDirreqStatsIntervalLength() * 1000L;
+ SortedMap<String, Integer> responses = descriptor.getDirreqV3Resp();
+ if (dirreqStatsEndMillis < 0L ||
+ dirreqStatsIntervalLengthMillis != ONE_DAY_MILLIS ||
+ responses == null || !responses.containsKey("ok")) {
+ return;
+ }
+ double okResponses = (double) (responses.get("ok") - 4);
+ if (okResponses < 0.0) {
+ return;
+ }
+ String hashedFingerprint = descriptor.getFingerprint().toUpperCase();
+ long dirreqStatsStartMillis = dirreqStatsEndMillis
+ - dirreqStatsIntervalLengthMillis;
+ long utcBreakMillis = (dirreqStatsEndMillis / ONE_DAY_MILLIS)
+ * ONE_DAY_MILLIS;
+ for (int i = 0; i < 2; i++) {
+ long startMillis = i == 0 ? dirreqStatsStartMillis : utcBreakMillis;
+ long endMillis = i == 0 ? utcBreakMillis : dirreqStatsEndMillis;
+ if (startMillis >= endMillis) {
+ continue;
+ }
+ double totalResponses = okResponses
+ * ((double) (endMillis - startMillis))
+ / ((double) ONE_DAY_MILLIS);
+ SortedMap<String, Double> responsesByCountry =
+ this.weightResponsesWithUniqueIps(totalResponses,
+ descriptor.getBridgeIps(), "??");
+ SortedMap<String, Double> responsesByTransport =
+ this.weightResponsesWithUniqueIps(totalResponses,
+ descriptor.getBridgeIpTransports(), "<??>");
+ SortedMap<String, Double> responsesByVersion =
+ this.weightResponsesWithUniqueIps(totalResponses,
+ descriptor.getBridgeIpVersions(), "");
+ ClientsHistory newResponseHistory = new ClientsHistory(
+ startMillis, endMillis, totalResponses, responsesByCountry,
+ responsesByTransport, responsesByVersion);
+ if (!this.newResponses.containsKey(hashedFingerprint)) {
+ this.newResponses.put(hashedFingerprint,
+ new TreeSet<ClientsHistory>());
+ }
+ this.newResponses.get(hashedFingerprint).add(
+ newResponseHistory);
+ }
+ }
+
+ private SortedMap<String, Double> weightResponsesWithUniqueIps(
+ double totalResponses, SortedMap<String, Integer> uniqueIps,
+ String omitString) {
+ SortedMap<String, Double> weightedResponses =
+ new TreeMap<String, Double>();
+ int totalUniqueIps = 0;
+ if (uniqueIps != null) {
+ for (Map.Entry<String, Integer> e : uniqueIps.entrySet()) {
+ if (e.getValue() > 4) {
+ totalUniqueIps += e.getValue() - 4;
+ }
+ }
+ }
+ if (totalUniqueIps > 0) {
+ for (Map.Entry<String, Integer> e : uniqueIps.entrySet()) {
+ if (!e.getKey().equals(omitString) && e.getValue() > 4) {
+ weightedResponses.put(e.getKey(),
+ (((double) (e.getValue() - 4)) * totalResponses)
+ / ((double) totalUniqueIps));
+ }
+ }
+ }
+ return weightedResponses;
+ }
+
+ public void updateStatuses() {
+ for (Map.Entry<String, SortedSet<ClientsHistory>> e :
+ this.newResponses.entrySet()) {
+ String hashedFingerprint = e.getKey();
+ ClientsStatus clientsStatus = this.documentStore.retrieve(
+ ClientsStatus.class, true, hashedFingerprint);
+ if (clientsStatus == null) {
+ clientsStatus = new ClientsStatus();
+ }
+ this.addToHistory(clientsStatus, e.getValue());
+ this.compressHistory(clientsStatus);
+ this.documentStore.store(clientsStatus, hashedFingerprint);
+ }
+ Logger.printStatusTime("Updated clients status files");
+ }
+
+ private void addToHistory(ClientsStatus clientsStatus,
+ SortedSet<ClientsHistory> newIntervals) {
+ SortedSet<ClientsHistory> history = clientsStatus.history;
+ for (ClientsHistory interval : newIntervals) {
+ if ((history.headSet(interval).isEmpty() ||
+ history.headSet(interval).last().endMillis <=
+ interval.startMillis) &&
+ (history.tailSet(interval).isEmpty() ||
+ history.tailSet(interval).first().startMillis >=
+ interval.endMillis)) {
+ history.add(interval);
+ }
+ }
+ }
+
+ private void compressHistory(ClientsStatus clientsStatus) {
+ SortedSet<ClientsHistory> history = clientsStatus.history;
+ SortedSet<ClientsHistory> compressedHistory =
+ new TreeSet<ClientsHistory>();
+ ClientsHistory lastResponses = null;
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String lastMonthString = "1970-01";
+ for (ClientsHistory responses : history) {
+ long intervalLengthMillis;
+ if (this.now - responses.endMillis <=
+ 92L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 24L * 60L * 60L * 1000L;
+ } else if (this.now - responses.endMillis <=
+ 366L * 24L * 60L * 60L * 1000L) {
+ intervalLengthMillis = 2L * 24L * 60L * 60L * 1000L;
+ } else {
+ intervalLengthMillis = 10L * 24L * 60L * 60L * 1000L;
+ }
+ String monthString = dateTimeFormat.format(responses.startMillis);
+ if (lastResponses != null &&
+ lastResponses.endMillis == responses.startMillis &&
+ ((lastResponses.endMillis - 1L) / intervalLengthMillis) ==
+ ((responses.endMillis - 1L) / intervalLengthMillis) &&
+ lastMonthString.equals(monthString)) {
+ lastResponses.addResponses(responses);
+ } else {
+ if (lastResponses != null) {
+ compressedHistory.add(lastResponses);
+ }
+ lastResponses = responses;
+ }
+ lastMonthString = monthString;
+ }
+ if (lastResponses != null) {
+ compressedHistory.add(lastResponses);
+ }
+ clientsStatus.history = compressedHistory;
+ }
+
+ public String getStatsString() {
+ int newIntervals = 0;
+ for (SortedSet<ClientsHistory> hist : this.newResponses.values()) {
+ newIntervals += hist.size();
+ }
+ StringBuilder sb = new StringBuilder();
+ sb.append(" "
+ + Logger.formatDecimalNumber(newIntervals / 2)
+ + " client statistics processed from extra-info descriptors\n");
+ sb.append(" "
+ + Logger.formatDecimalNumber(this.newResponses.size())
+ + " client status files updated\n");
+ return sb.toString();
+ }
+}
+
diff --git a/src/org/torproject/onionoo/Main.java b/src/org/torproject/onionoo/Main.java
index 1a5d083..af33124 100644
--- a/src/org/torproject/onionoo/Main.java
+++ b/src/org/torproject/onionoo/Main.java
@@ -36,12 +36,14 @@ public class Main {
Logger.printStatusTime("Initialized bandwidth data writer");
WeightsDataWriter wdw = new WeightsDataWriter(dso, ds, t);
Logger.printStatusTime("Initialized weights data writer");
- ClientsDataWriter cdw = new ClientsDataWriter(dso, ds, t);
- Logger.printStatusTime("Initialized clients data writer");
+ ClientsStatusUpdater csu = new ClientsStatusUpdater(dso, ds, t);
+ Logger.printStatusTime("Initialized clients status updater");
UptimeStatusUpdater usu = new UptimeStatusUpdater(dso, ds);
Logger.printStatusTime("Initialized uptime status updater");
- StatusUpdater[] sus = new StatusUpdater[] { ndw, bdw, wdw, cdw, usu };
+ StatusUpdater[] sus = new StatusUpdater[] { ndw, bdw, wdw, csu, usu };
+ ClientsDocumentWriter cdw = new ClientsDocumentWriter(dso, ds, t);
+ Logger.printStatusTime("Initialized clients document writer");
UptimeDocumentWriter udw = new UptimeDocumentWriter(dso, ds, t);
Logger.printStatusTime("Initialized uptime document writer");
DocumentWriter[] dws = new DocumentWriter[] { ndw, bdw, wdw, cdw,
@@ -91,7 +93,7 @@ public class Main {
}
/* TODO Print status updater statistics for *all* status updaters once
* all data writers have been separated. */
- for (DocumentWriter dw : new DocumentWriter[] { udw }) {
+ for (DocumentWriter dw : new DocumentWriter[] { cdw, udw }) {
String statsString = dw.getStatsString();
if (statsString != null) {
Logger.printStatistics(dw.getClass().getSimpleName(),
1
0

[onionoo/master] Move (de-)serialization code into WeightsStatus.
by karsten@torproject.org 11 Apr '14
by karsten@torproject.org 11 Apr '14
11 Apr '14
commit d3638d4c9cf350ab5eb6f319e453a94163838236
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Apr 8 23:58:18 2014 +0200
Move (de-)serialization code into WeightsStatus.
---
src/org/torproject/onionoo/DocumentStore.java | 6 +-
src/org/torproject/onionoo/WeightsDataWriter.java | 161 ++++++---------------
src/org/torproject/onionoo/WeightsStatus.java | 91 ++++++++++++
3 files changed, 143 insertions(+), 115 deletions(-)
diff --git a/src/org/torproject/onionoo/DocumentStore.java b/src/org/torproject/onionoo/DocumentStore.java
index 387f34c..5da7267 100644
--- a/src/org/torproject/onionoo/DocumentStore.java
+++ b/src/org/torproject/onionoo/DocumentStore.java
@@ -196,7 +196,8 @@ public class DocumentStore {
document instanceof UptimeDocument) {
Gson gson = new Gson();
documentString = gson.toJson(this);
- } else if (document instanceof ClientsStatus ||
+ } else if (document instanceof WeightsStatus ||
+ document instanceof ClientsStatus ||
document instanceof UptimeStatus) {
documentString = document.toDocumentString();
} else {
@@ -289,7 +290,8 @@ public class DocumentStore {
documentType.equals(UptimeDocument.class)) {
return this.retrieveParsedDocumentFile(documentType,
documentString);
- } else if (documentType.equals(ClientsStatus.class) ||
+ } else if (documentType.equals(WeightsStatus.class) ||
+ documentType.equals(ClientsStatus.class) ||
documentType.equals(UptimeStatus.class)) {
return this.retrieveParsedStatusFile(documentType, documentString);
} else {
diff --git a/src/org/torproject/onionoo/WeightsDataWriter.java b/src/org/torproject/onionoo/WeightsDataWriter.java
index d230662..0d7b815 100644
--- a/src/org/torproject/onionoo/WeightsDataWriter.java
+++ b/src/org/torproject/onionoo/WeightsDataWriter.java
@@ -2,17 +2,14 @@
* See LICENSE for licensing information */
package org.torproject.onionoo;
-import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.Scanner;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
@@ -230,7 +227,19 @@ public class WeightsDataWriter implements DescriptorListener,
double advertisedBandwidth = 0.0;
if (!this.advertisedBandwidths.containsKey(
serverDescriptorDigest)) {
- this.readHistoryFromDisk(fingerprint);
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus != null) {
+ if (!this.descriptorDigestsByFingerprint.containsKey(
+ fingerprint)) {
+ this.descriptorDigestsByFingerprint.put(fingerprint,
+ new HashSet<String>());
+ }
+ this.descriptorDigestsByFingerprint.get(fingerprint).addAll(
+ weightsStatus.advertisedBandwidths.keySet());
+ this.advertisedBandwidths.putAll(
+ weightsStatus.advertisedBandwidths);
+ }
}
if (this.advertisedBandwidths.containsKey(
serverDescriptorDigest)) {
@@ -288,93 +297,42 @@ public class WeightsDataWriter implements DescriptorListener,
private void addToHistory(String fingerprint, long validAfterMillis,
long freshUntilMillis, double[] weights) {
- SortedMap<long[], double[]> history =
- this.readHistoryFromDisk(fingerprint);
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus == null) {
+ weightsStatus = new WeightsStatus();
+ }
+ SortedMap<long[], double[]> history = weightsStatus.history;
long[] interval = new long[] { validAfterMillis, freshUntilMillis };
if ((history.headMap(interval).isEmpty() ||
history.headMap(interval).lastKey()[1] <= validAfterMillis) &&
(history.tailMap(interval).isEmpty() ||
history.tailMap(interval).firstKey()[0] >= freshUntilMillis)) {
history.put(interval, weights);
- history = this.compressHistory(history);
- this.writeHistoryToDisk(fingerprint, history);
+ this.compressHistory(weightsStatus);
+ this.addAdvertisedBandwidths(weightsStatus, fingerprint);
+ this.documentStore.store(weightsStatus, fingerprint);
this.updateWeightsStatuses.remove(fingerprint);
}
}
- private SortedMap<long[], double[]> readHistoryFromDisk(
+ private void addAdvertisedBandwidths(WeightsStatus weightsStatus,
String fingerprint) {
- SortedMap<long[], double[]> history =
- new TreeMap<long[], double[]>(new Comparator<long[]>() {
- public int compare(long[] a, long[] b) {
- return a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;
- }
- });
- WeightsStatus weightsStatus = this.documentStore.retrieve(
- WeightsStatus.class, false, fingerprint);
- if (weightsStatus != null) {
- String historyString = weightsStatus.documentString;
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setLenient(false);
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- try {
- Scanner s = new Scanner(historyString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- String[] parts = line.split(" ");
- if (parts.length == 2) {
- String descriptorDigest = parts[0];
- int advertisedBandwidth = Integer.parseInt(parts[1]);
- if (!this.descriptorDigestsByFingerprint.containsKey(
- fingerprint)) {
- this.descriptorDigestsByFingerprint.put(fingerprint,
- new HashSet<String>());
- }
- this.descriptorDigestsByFingerprint.get(fingerprint).add(
- descriptorDigest);
- this.advertisedBandwidths.put(descriptorDigest,
- advertisedBandwidth);
- continue;
- }
- if (parts.length != 9) {
- System.err.println("Illegal line '" + line + "' in weights "
- + "history for fingerprint '" + fingerprint + "'. "
- + "Skipping this line.");
- continue;
- }
- if (parts[4].equals("NaN")) {
- /* Remove corrupt lines written on 2013-07-07 and the days
- * after. */
- continue;
- }
- long validAfterMillis = dateTimeFormat.parse(parts[0]
- + " " + parts[1]).getTime();
- long freshUntilMillis = dateTimeFormat.parse(parts[2]
- + " " + parts[3]).getTime();
- long[] interval = new long[] { validAfterMillis,
- freshUntilMillis };
- double[] weights = new double[] {
- Double.parseDouble(parts[4]),
- Double.parseDouble(parts[5]),
- Double.parseDouble(parts[6]),
- Double.parseDouble(parts[7]),
- Double.parseDouble(parts[8]) };
- history.put(interval, weights);
+ if (this.descriptorDigestsByFingerprint.containsKey(fingerprint)) {
+ for (String descriptorDigest :
+ this.descriptorDigestsByFingerprint.get(fingerprint)) {
+ if (this.advertisedBandwidths.containsKey(descriptorDigest)) {
+ int advertisedBandwidth =
+ this.advertisedBandwidths.get(descriptorDigest);
+ weightsStatus.advertisedBandwidths.put(descriptorDigest,
+ advertisedBandwidth);
}
- s.close();
- } catch (ParseException e) {
- System.err.println("Could not parse timestamp while reading "
- + "weights history for fingerprint '" + fingerprint + "'. "
- + "Skipping.");
- e.printStackTrace();
}
}
- return history;
}
- private SortedMap<long[], double[]> compressHistory(
- SortedMap<long[], double[]> history) {
+ private void compressHistory(WeightsStatus weightsStatus) {
+ SortedMap<long[], double[]> history = weightsStatus.history;
SortedMap<long[], double[]> compressedHistory =
new TreeMap<long[], double[]>(history.comparator());
long lastStartMillis = 0L, lastEndMillis = 0L;
@@ -429,39 +387,7 @@ public class WeightsDataWriter implements DescriptorListener,
compressedHistory.put(new long[] { lastStartMillis, lastEndMillis },
lastWeights);
}
- return compressedHistory;
- }
-
- private void writeHistoryToDisk(String fingerprint,
- SortedMap<long[], double[]> history) {
- StringBuilder sb = new StringBuilder();
- if (this.descriptorDigestsByFingerprint.containsKey(fingerprint)) {
- for (String descriptorDigest :
- this.descriptorDigestsByFingerprint.get(fingerprint)) {
- if (this.advertisedBandwidths.containsKey(descriptorDigest)) {
- int advertisedBandwidth =
- this.advertisedBandwidths.get(descriptorDigest);
- sb.append(descriptorDigest + " "
- + String.valueOf(advertisedBandwidth) + "\n");
- }
- }
- }
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- for (Map.Entry<long[], double[]> e : history.entrySet()) {
- long[] fresh = e.getKey();
- double[] weights = e.getValue();
- sb.append(dateTimeFormat.format(fresh[0]) + " "
- + dateTimeFormat.format(fresh[1]));
- for (double weight : weights) {
- sb.append(String.format(" %.12f", weight));
- }
- sb.append("\n");
- }
- WeightsStatus weightsStatus = new WeightsStatus();
- weightsStatus.documentString = sb.toString();
- this.documentStore.store(weightsStatus, fingerprint);
+ weightsStatus.history = compressedHistory;
}
public void processFingerprints(SortedSet<String> fingerprints,
@@ -473,13 +399,18 @@ public class WeightsDataWriter implements DescriptorListener,
private void writeWeightsDataFiles() {
for (String fingerprint : this.updateWeightsDocuments) {
- SortedMap<long[], double[]> history =
- this.readHistoryFromDisk(fingerprint);
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus == null) {
+ continue;
+ }
+ SortedMap<long[], double[]> history = weightsStatus.history;
WeightsDocument weightsDocument = new WeightsDocument();
weightsDocument.documentString = this.formatHistoryString(
fingerprint, history);
this.documentStore.store(weightsDocument, fingerprint);
}
+ Logger.printStatusTime("Wrote weights document files");
}
private String[] graphTypes = new String[] {
@@ -633,9 +564,13 @@ public class WeightsDataWriter implements DescriptorListener,
private void updateWeightsStatuses() {
for (String fingerprint : this.updateWeightsStatuses) {
- SortedMap<long[], double[]> history =
- this.readHistoryFromDisk(fingerprint);
- this.writeHistoryToDisk(fingerprint, history);
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, true, fingerprint);
+ if (weightsStatus == null) {
+ weightsStatus = new WeightsStatus();
+ }
+ this.addAdvertisedBandwidths(weightsStatus, fingerprint);
+ this.documentStore.store(weightsStatus, fingerprint);
}
}
diff --git a/src/org/torproject/onionoo/WeightsStatus.java b/src/org/torproject/onionoo/WeightsStatus.java
index f8f78ad..4d92f30 100644
--- a/src/org/torproject/onionoo/WeightsStatus.java
+++ b/src/org/torproject/onionoo/WeightsStatus.java
@@ -1,5 +1,96 @@
package org.torproject.onionoo;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.SortedMap;
+import java.util.TimeZone;
+import java.util.TreeMap;
+
class WeightsStatus extends Document {
+
+ SortedMap<long[], double[]> history = new TreeMap<long[], double[]>(
+ new Comparator<long[]>() {
+ public int compare(long[] a, long[] b) {
+ return a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;
+ }
+ });
+
+ Map<String, Integer> advertisedBandwidths =
+ new HashMap<String, Integer>();
+
+ public void fromDocumentString(String documentString) {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setLenient(false);
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ try {
+ Scanner s = new Scanner(documentString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ String[] parts = line.split(" ");
+ if (parts.length == 2) {
+ String descriptorDigest = parts[0];
+ int advertisedBandwidth = Integer.parseInt(parts[1]);
+ this.advertisedBandwidths.put(descriptorDigest,
+ advertisedBandwidth);
+ continue;
+ }
+ if (parts.length != 9) {
+ System.err.println("Illegal line '" + line + "' in weights "
+ + "status file. Skipping this line.");
+ continue;
+ }
+ if (parts[4].equals("NaN")) {
+ /* Remove corrupt lines written on 2013-07-07 and the days
+ * after. */
+ continue;
+ }
+ long validAfterMillis = dateTimeFormat.parse(parts[0]
+ + " " + parts[1]).getTime();
+ long freshUntilMillis = dateTimeFormat.parse(parts[2]
+ + " " + parts[3]).getTime();
+ long[] interval = new long[] { validAfterMillis,
+ freshUntilMillis };
+ double[] weights = new double[] {
+ Double.parseDouble(parts[4]),
+ Double.parseDouble(parts[5]),
+ Double.parseDouble(parts[6]),
+ Double.parseDouble(parts[7]),
+ Double.parseDouble(parts[8]) };
+ this.history.put(interval, weights);
+ }
+ s.close();
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp while reading "
+ + "weights status file. Skipping.");
+ e.printStackTrace();
+ }
+ }
+
+ public String toDocumentString() {
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<String, Integer> e :
+ this.advertisedBandwidths.entrySet()) {
+ sb.append(e.getKey() + " " + String.valueOf(e.getValue()) + "\n");
+ }
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ for (Map.Entry<long[], double[]> e : history.entrySet()) {
+ long[] fresh = e.getKey();
+ double[] weights = e.getValue();
+ sb.append(dateTimeFormat.format(fresh[0]) + " "
+ + dateTimeFormat.format(fresh[1]));
+ for (double weight : weights) {
+ sb.append(String.format(" %.12f", weight));
+ }
+ sb.append("\n");
+ }
+ return sb.toString();
+ }
}
1
0

[onionoo/master] Split DataWriter interface into two interfaces.
by karsten@torproject.org 11 Apr '14
by karsten@torproject.org 11 Apr '14
11 Apr '14
commit 09f83c84dd160d86c75cd4957625aefe1ee5c8ec
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Mar 13 15:35:40 2014 +0100
Split DataWriter interface into two interfaces.
Classes implementing DataWriter perform two tasks: update internal status
files and rewrite output document files. These tasks could also be
performed by two separate classes, and there may be cases when we want to
rewrite output document files using already existing internal status
files. As the first step in splitting up these classes, split the
DataWriter interface into two interfaces StatusUpdater and DocumentWriter.
The current classes that implement DataWriter need to implement the two
new interfaces.
---
.../torproject/onionoo/BandwidthDataWriter.java | 6 +++---
src/org/torproject/onionoo/ClientsDataWriter.java | 6 +++---
src/org/torproject/onionoo/DataWriter.java | 11 -----------
src/org/torproject/onionoo/DocumentWriter.java | 11 +++++++++++
src/org/torproject/onionoo/Main.java | 20 ++++++++++++--------
src/org/torproject/onionoo/NodeDataWriter.java | 6 +++---
src/org/torproject/onionoo/StatusUpdater.java | 11 +++++++++++
src/org/torproject/onionoo/UptimeDataWriter.java | 6 +++---
src/org/torproject/onionoo/WeightsDataWriter.java | 6 +++---
9 files changed, 49 insertions(+), 34 deletions(-)
diff --git a/src/org/torproject/onionoo/BandwidthDataWriter.java b/src/org/torproject/onionoo/BandwidthDataWriter.java
index 67dc427..227df2b 100644
--- a/src/org/torproject/onionoo/BandwidthDataWriter.java
+++ b/src/org/torproject/onionoo/BandwidthDataWriter.java
@@ -35,8 +35,8 @@ import org.torproject.descriptor.ExtraInfoDescriptor;
* last 3 days in the bandwidth document may not be equivalent to the last
* 3 days as of publishing the document, but that's something clients can
* work around. */
-public class BandwidthDataWriter implements DataWriter,
- DescriptorListener, FingerprintListener {
+public class BandwidthDataWriter implements DescriptorListener,
+ StatusUpdater, FingerprintListener, DocumentWriter {
private DescriptorSource descriptorSource;
@@ -88,7 +88,7 @@ public class BandwidthDataWriter implements DataWriter,
/* Status files are already updated while processing descriptors. */
}
- public void updateDocuments() {
+ public void writeDocuments() {
/* Document files are already updated while processing descriptors. */
}
diff --git a/src/org/torproject/onionoo/ClientsDataWriter.java b/src/org/torproject/onionoo/ClientsDataWriter.java
index b045bfa..2ccdff7 100644
--- a/src/org/torproject/onionoo/ClientsDataWriter.java
+++ b/src/org/torproject/onionoo/ClientsDataWriter.java
@@ -51,8 +51,8 @@ import org.torproject.descriptor.ExtraInfoDescriptor;
* "transports":{"obfs2":0.4581},
* "versions":{"v4":1.0000}}
*/
-public class ClientsDataWriter implements DataWriter, DescriptorListener,
- FingerprintListener {
+public class ClientsDataWriter implements DescriptorListener,
+ StatusUpdater, FingerprintListener, DocumentWriter {
private static class ResponseHistory
implements Comparable<ResponseHistory> {
@@ -418,7 +418,7 @@ public class ClientsDataWriter implements DataWriter, DescriptorListener,
private SortedSet<String> updateDocuments = new TreeSet<String>();
- public void updateDocuments() {
+ public void writeDocuments() {
for (String hashedFingerprint : this.updateDocuments) {
SortedSet<ResponseHistory> history =
this.readHistory(hashedFingerprint);
diff --git a/src/org/torproject/onionoo/DataWriter.java b/src/org/torproject/onionoo/DataWriter.java
deleted file mode 100644
index c374153..0000000
--- a/src/org/torproject/onionoo/DataWriter.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package org.torproject.onionoo;
-
-public interface DataWriter {
-
- public void updateStatuses();
-
- public void updateDocuments();
-
- public String getStatsString();
-}
-
diff --git a/src/org/torproject/onionoo/DocumentWriter.java b/src/org/torproject/onionoo/DocumentWriter.java
new file mode 100644
index 0000000..4cdeef9
--- /dev/null
+++ b/src/org/torproject/onionoo/DocumentWriter.java
@@ -0,0 +1,11 @@
+/* Copyright 2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+public interface DocumentWriter {
+
+ public abstract void writeDocuments();
+
+ public abstract String getStatsString();
+}
+
diff --git a/src/org/torproject/onionoo/Main.java b/src/org/torproject/onionoo/Main.java
index 5631a5e..abae5cf 100644
--- a/src/org/torproject/onionoo/Main.java
+++ b/src/org/torproject/onionoo/Main.java
@@ -40,7 +40,9 @@ public class Main {
Logger.printStatusTime("Initialized clients data writer");
UptimeDataWriter udw = new UptimeDataWriter(dso, ds, t);
Logger.printStatusTime("Initialized uptime data writer");
- DataWriter[] dws = new DataWriter[] { ndw, bdw, wdw, cdw, udw };
+ StatusUpdater[] sus = new StatusUpdater[] { ndw, bdw, wdw, cdw, udw };
+ DocumentWriter[] dws = new DocumentWriter[] { ndw, bdw, wdw, cdw,
+ udw };
Logger.printStatus("Reading descriptors.");
dso.readRelayNetworkConsensuses();
@@ -61,13 +63,13 @@ public class Main {
Logger.printStatusTime("Read bridge-pool assignments");
Logger.printStatus("Updating internal status files.");
- for (DataWriter dw : dws) {
- dw.updateStatuses();
+ for (StatusUpdater su : sus) {
+ su.updateStatuses();
}
Logger.printStatus("Updating document files.");
- for (DataWriter dw : dws) {
- dw.updateDocuments();
+ for (DocumentWriter dw : dws) {
+ dw.writeDocuments();
}
Logger.printStatus("Shutting down.");
@@ -77,13 +79,15 @@ public class Main {
Logger.printStatusTime("Flushed document cache");
Logger.printStatus("Gathering statistics.");
- for (DataWriter dw : dws) {
- String statsString = dw.getStatsString();
+ for (StatusUpdater su : sus) {
+ String statsString = su.getStatsString();
if (statsString != null) {
- Logger.printStatistics(dw.getClass().getSimpleName(),
+ Logger.printStatistics(su.getClass().getSimpleName(),
statsString);
}
}
+ /* TODO Print status updater statistics once all data writers have
+ * been separated. */
Logger.printStatistics("Descriptor source", dso.getStatsString());
Logger.printStatistics("Document store", ds.getStatsString());
Logger.printStatistics("GeoIP lookup service", ls.getStatsString());
diff --git a/src/org/torproject/onionoo/NodeDataWriter.java b/src/org/torproject/onionoo/NodeDataWriter.java
index b59500a..5941b1c 100644
--- a/src/org/torproject/onionoo/NodeDataWriter.java
+++ b/src/org/torproject/onionoo/NodeDataWriter.java
@@ -33,8 +33,8 @@ import org.torproject.onionoo.LookupService.LookupResult;
* The parts of details files coming from server descriptors always come
* from the last known descriptor of a relay or bridge, not from the
* descriptor that was last referenced in a network status. */
-public class NodeDataWriter implements DataWriter, DescriptorListener,
- FingerprintListener {
+public class NodeDataWriter implements DescriptorListener, StatusUpdater,
+ FingerprintListener, DocumentWriter {
private DescriptorSource descriptorSource;
@@ -148,7 +148,7 @@ public class NodeDataWriter implements DataWriter, DescriptorListener,
Logger.printStatusTime("Wrote out summary");
}
- public void updateDocuments() {
+ public void writeDocuments() {
this.writeOutDetails();
Logger.printStatusTime("Wrote detail data files");
}
diff --git a/src/org/torproject/onionoo/StatusUpdater.java b/src/org/torproject/onionoo/StatusUpdater.java
new file mode 100644
index 0000000..fb82182
--- /dev/null
+++ b/src/org/torproject/onionoo/StatusUpdater.java
@@ -0,0 +1,11 @@
+/* Copyright 2014 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+public interface StatusUpdater {
+
+ public abstract void updateStatuses();
+
+ public abstract String getStatsString();
+}
+
diff --git a/src/org/torproject/onionoo/UptimeDataWriter.java b/src/org/torproject/onionoo/UptimeDataWriter.java
index e2b1bee..4692b18 100644
--- a/src/org/torproject/onionoo/UptimeDataWriter.java
+++ b/src/org/torproject/onionoo/UptimeDataWriter.java
@@ -20,8 +20,8 @@ import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
-public class UptimeDataWriter implements DataWriter, DescriptorListener,
- FingerprintListener {
+public class UptimeDataWriter implements DescriptorListener,
+ StatusUpdater, FingerprintListener, DocumentWriter {
private DescriptorSource descriptorSource;
@@ -305,7 +305,7 @@ public class UptimeDataWriter implements DataWriter, DescriptorListener,
}
}
- public void updateDocuments() {
+ public void writeDocuments() {
SortedSet<UptimeHistory>
knownRelayStatuses = new TreeSet<UptimeHistory>(),
knownBridgeStatuses = new TreeSet<UptimeHistory>();
diff --git a/src/org/torproject/onionoo/WeightsDataWriter.java b/src/org/torproject/onionoo/WeightsDataWriter.java
index 21c3272..d230662 100644
--- a/src/org/torproject/onionoo/WeightsDataWriter.java
+++ b/src/org/torproject/onionoo/WeightsDataWriter.java
@@ -25,8 +25,8 @@ import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
import org.torproject.descriptor.ServerDescriptor;
-public class WeightsDataWriter implements DataWriter, DescriptorListener,
- FingerprintListener {
+public class WeightsDataWriter implements DescriptorListener,
+ StatusUpdater, FingerprintListener, DocumentWriter {
private DescriptorSource descriptorSource;
@@ -73,7 +73,7 @@ public class WeightsDataWriter implements DataWriter, DescriptorListener,
Logger.printStatusTime("Updated weights status files");
}
- public void updateDocuments() {
+ public void writeDocuments() {
this.writeWeightsDataFiles();
Logger.printStatusTime("Wrote weights document files");
}
1
0