tor-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
June 2013
- 19 participants
- 1571 discussions

[tor-browser-bundle/master] Force locale to be 'C' for purposes of deterministic sort.
by mikeperry@torproject.org 28 Jun '13
by mikeperry@torproject.org 28 Jun '13
28 Jun '13
commit 144fac56e4b4cb5ba29f92448463e2bfa5e73627
Author: Mike Perry <mikeperry-git(a)torproject.org>
Date: Fri Jun 28 12:36:08 2013 -0700
Force locale to be 'C' for purposes of deterministic sort.
Non-English builders were sometimes sorting in their native locales, and
sometimes not. I have no idea what was up with that...
---
gitian/build-helpers/dzip.sh | 3 ++-
gitian/build-helpers/re-dzip.sh | 3 ++-
gitian/descriptors/linux/gitian-bundle.yml | 1 +
gitian/descriptors/linux/gitian-firefox.yml | 1 +
gitian/descriptors/linux/gitian-tor.yml | 1 +
gitian/descriptors/mac/gitian-bundle.yml | 1 +
gitian/descriptors/mac/gitian-firefox.yml | 1 +
gitian/descriptors/mac/gitian-tor.yml | 1 +
gitian/descriptors/windows/gitian-bundle.yml | 1 +
gitian/descriptors/windows/gitian-firefox.yml | 1 +
gitian/descriptors/windows/gitian-tor.yml | 1 +
11 files changed, 13 insertions(+), 2 deletions(-)
diff --git a/gitian/build-helpers/dzip.sh b/gitian/build-helpers/dzip.sh
index daac42b..5f20abf 100755
--- a/gitian/build-helpers/dzip.sh
+++ b/gitian/build-helpers/dzip.sh
@@ -1,5 +1,6 @@
#!/bin/sh
-# Crappy determistic zip wrapper
+# Crappy deterministic zip wrapper
+export LC_ALL=C
ZIPFILE=$1
shift
diff --git a/gitian/build-helpers/re-dzip.sh b/gitian/build-helpers/re-dzip.sh
index 0367844..78d1d8d 100755
--- a/gitian/build-helpers/re-dzip.sh
+++ b/gitian/build-helpers/re-dzip.sh
@@ -1,5 +1,6 @@
#!/bin/sh
-# Crappy determistic zip repackager
+# Crappy deterministic zip repackager
+export LC_ALL=C
ZIPFILE=`basename $1`
diff --git a/gitian/descriptors/linux/gitian-bundle.yml b/gitian/descriptors/linux/gitian-bundle.yml
index 7d61b94..b733766 100644
--- a/gitian/descriptors/linux/gitian-bundle.yml
+++ b/gitian/descriptors/linux/gitian-bundle.yml
@@ -43,6 +43,7 @@ script: |
export LD_PRELOAD=/usr/lib/faketime/libfaketime.so.1
export FAKETIME=$REFERENCE_DATETIME
export TZ=UTC
+ export LC_ALL=C
export TORBROWSER_VERSION=`cat bare-version`
umask 0022
#
diff --git a/gitian/descriptors/linux/gitian-firefox.yml b/gitian/descriptors/linux/gitian-firefox.yml
index 41a5562..f45d71e 100644
--- a/gitian/descriptors/linux/gitian-firefox.yml
+++ b/gitian/descriptors/linux/gitian-firefox.yml
@@ -39,6 +39,7 @@ script: |
export TZ=UTC
export LD_PRELOAD=/usr/lib/faketime/libfaketime.so.1
export FAKETIME=$REFERENCE_DATETIME
+ export LC_ALL=C
umask 0022
#
# Config options for hardening-wrapper
diff --git a/gitian/descriptors/linux/gitian-tor.yml b/gitian/descriptors/linux/gitian-tor.yml
index 2ab9b20..4a4f7e4 100644
--- a/gitian/descriptors/linux/gitian-tor.yml
+++ b/gitian/descriptors/linux/gitian-tor.yml
@@ -31,6 +31,7 @@ script: |
export LD_PRELOAD=/usr/lib/faketime/libfaketime.so.1
export FAKETIME=$REFERENCE_DATETIME
export TZ=UTC
+ export LC_ALL=C
umask 0022
#
# Config options for hardening-wrapper
diff --git a/gitian/descriptors/mac/gitian-bundle.yml b/gitian/descriptors/mac/gitian-bundle.yml
index d696fdb..1871ff9 100644
--- a/gitian/descriptors/mac/gitian-bundle.yml
+++ b/gitian/descriptors/mac/gitian-bundle.yml
@@ -40,6 +40,7 @@ script: |
export FAKETIME=$REFERENCE_DATETIME
export TZ=UTC
export TORBROWSER_VERSION=`cat bare-version`
+ export LC_ALL=C
umask 0022
#
mkdir -p $OUTDIR/
diff --git a/gitian/descriptors/mac/gitian-firefox.yml b/gitian/descriptors/mac/gitian-firefox.yml
index b7361be..8a683aa 100644
--- a/gitian/descriptors/mac/gitian-firefox.yml
+++ b/gitian/descriptors/mac/gitian-firefox.yml
@@ -37,6 +37,7 @@ script: |
export CXXFLAGS=$CFLAGS
export LDFLAGS=$CFLAGS
export PATH="$PATH:/usr/apple-osx/bin/"
+ export LC_ALL=C
umask 0022
#
mkdir -p $INSTDIR/TorBrowser.app/Contents/MacOS/
diff --git a/gitian/descriptors/mac/gitian-tor.yml b/gitian/descriptors/mac/gitian-tor.yml
index d2ccd90..f3f5b91 100644
--- a/gitian/descriptors/mac/gitian-tor.yml
+++ b/gitian/descriptors/mac/gitian-tor.yml
@@ -34,6 +34,7 @@ script: |
export LD_PRELOAD=/usr/lib/faketime/libfaketime.so.1
export FAKETIME=$REFERENCE_DATETIME
export TZ=UTC
+ export LC_ALL=C
umask 0022
#
#export CFLAGS="-isysroot /usr/lib/apple/SDKs/MacOSX10.6.sdk/"
diff --git a/gitian/descriptors/windows/gitian-bundle.yml b/gitian/descriptors/windows/gitian-bundle.yml
index 714a173..c4a686b 100644
--- a/gitian/descriptors/windows/gitian-bundle.yml
+++ b/gitian/descriptors/windows/gitian-bundle.yml
@@ -42,6 +42,7 @@ script: |
export FAKETIME=$REFERENCE_DATETIME
export TZ=UTC
export TORBROWSER_VERSION=`cat bare-version`
+ export LC_ALL=C
umask 0022
#
mkdir -p $OUTDIR/
diff --git a/gitian/descriptors/windows/gitian-firefox.yml b/gitian/descriptors/windows/gitian-firefox.yml
index a881440..af827c9 100644
--- a/gitian/descriptors/windows/gitian-firefox.yml
+++ b/gitian/descriptors/windows/gitian-firefox.yml
@@ -33,6 +33,7 @@ script: |
export LD_PRELOAD=/usr/lib/faketime/libfaketime.so.1
export FAKETIME=$REFERENCE_DATETIME
export TZ=UTC
+ export LC_ALL=C
umask 0022
#
mkdir -p $INSTDIR/Data/profile/preferences
diff --git a/gitian/descriptors/windows/gitian-tor.yml b/gitian/descriptors/windows/gitian-tor.yml
index aec8c30..e0cb3b5 100644
--- a/gitian/descriptors/windows/gitian-tor.yml
+++ b/gitian/descriptors/windows/gitian-tor.yml
@@ -30,6 +30,7 @@ script: |
export LD_PRELOAD=/usr/lib/faketime/libfaketime.so.1
export FAKETIME=$REFERENCE_DATETIME
export TZ=UTC
+ export LC_ALL=C
export CFLAGS="-mwindows"
export LDFLAGS="-mwindows"
# XXX: Hardening options cause the exe's to crash.. not sure why
1
0

28 Jun '13
commit c728cc6871664c61cd5efbc672282160e7f511de
Author: David Fifield <david(a)bamsoftware.com>
Date: Fri Jun 28 09:52:49 2013 -0700
Add another public key pin for www.google.com.
1e3f66cfa0eb03136297fdb238ad6619c30ff375. It shouldn't be necessary to
update this so frequently. See
https://trac.torproject.org/projects/tor/ticket/9167 for a ticket to
find out what's going wrong.
---
flashproxy-reg-appspot | 1 +
1 file changed, 1 insertion(+)
diff --git a/flashproxy-reg-appspot b/flashproxy-reg-appspot
index bb9a610..27bb09b 100755
--- a/flashproxy-reg-appspot
+++ b/flashproxy-reg-appspot
@@ -66,6 +66,7 @@ PUBKEY_SHA1 = tuple(x.decode("hex") for x in (
"c70ccd442ff4528c603aefef85206fd693990e09",
"1697e17a8a3317f031721b7b6293cd50643bbbd3",
"291e750bafedac444486327e50f26f64d840991a",
+ "1e3f66cfa0eb03136297fdb238ad6619c30ff375",
))
class options(object):
1
0

28 Jun '13
commit add5e770ef22a6fbf95bb0ee2f4bbb13094b942e
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Fri Jun 28 19:03:25 2013 +0200
Add Runa's TBB forensics analysis.
---
techreports.bib | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/techreports.bib b/techreports.bib
index 523a02d..e81e948 100644
--- a/techreports.bib
+++ b/techreports.bib
@@ -1,3 +1,14 @@
+@techreport{tor-2013-06-001,
+ author = {Runa A. Sandvik},
+ title = {Forensic Analysis of the {Tor Browser Bundle} on {OS X},
+ {Linux}, and {Windows}},
+ institution = {The Tor Project},
+ number = {2013-06-001},
+ year = {2013},
+ month = {June},
+ url = {https://research.torproject.org/techreports/tbb-forensic-analysis-2013-06-28.pdf},
+}
+
@techreport{tor-2013-02-001,
author = {Philipp Winter},
title = {Design Requirements for a {Tor} Censorship Analysis Tool},
1
0

28 Jun '13
commit 3c6e9e3e5876280616cba520744fe5386b4a318d
Author: Runa A. Sandvik <runa.sandvik(a)gmail.com>
Date: Fri Jun 28 12:18:00 2013 -0400
add tbb-forensic-analysis report
---
2013/tbb-forensic-analysis/.gitignore | 3 +
.../tbb-forensic-analysis.tex | 654 ++++++++++++++++++++
2013/tbb-forensic-analysis/tortechrep.cls | 1 +
3 files changed, 658 insertions(+)
diff --git a/2013/tbb-forensic-analysis/.gitignore b/2013/tbb-forensic-analysis/.gitignore
new file mode 100644
index 0000000..622d4bc
--- /dev/null
+++ b/2013/tbb-forensic-analysis/.gitignore
@@ -0,0 +1,3 @@
+tbb-forensic-analysis.pdf
+tbb-forensic-analysis-2013-06-28.pdf
+
diff --git a/2013/tbb-forensic-analysis/tbb-forensic-analysis.tex b/2013/tbb-forensic-analysis/tbb-forensic-analysis.tex
new file mode 100644
index 0000000..bb022df
--- /dev/null
+++ b/2013/tbb-forensic-analysis/tbb-forensic-analysis.tex
@@ -0,0 +1,654 @@
+\documentclass{tortechrep}
+\usepackage{url}
+\usepackage{graphicx}
+\usepackage{enumerate}
+\usepackage{hyperref}
+
+\begin{document}
+
+\title{
+ Forensic Analysis of the Tor Browser Bundle \\
+ on OS X, Linux, and Windows
+}
+
+\author{Runa A. Sandvik}
+
+\contact{\href{mailto:runa@torproject.org}{runa@torproject.org}}
+\reportid{2013-06-001}
+\date{June 28, 2013}
+
+\maketitle
+
+\section{Introduction}
+% motivation
+With an estimated 100,000 downloads every
+month\footnote{\url{https://webstats.torproject.org/webalizer/www.torproject.org/usage\_201305.html}}
+the Tor Browser Bundle is the most popular software package offered on
+the Tor Project website. A lot of work has been put into making the Tor
+Browser safe for use with Tor\footnote{\url{https://www.torproject.org/projects/torbrowser/design/}},
+including the use of extra patches against this browser to enhance
+privacy and security. The Tor Browser Bundle also aims to ensure that
+the user is able to completely and safely remove the bundle without
+leaving other traces on her computer.
+
+In an effort to further enhance the security of the Tor Browser Bundle,
+we performed a forensic analysis of the bundle (version 2.3.25-6,
+64-bit) on three different operating systems: OS X 10.8, Debian 6.0
+Squeeze Linux, and Windows 7. Our objective was to find traces left by
+the Tor Browser Bundle and then find ways to counter forensic analysis
+in three different scenarios:
+
+\begin{enumerate}[(a)]
+ \item On a machine that the user does not own, such as a machine in
+ a library or Internet caf\'e.
+\end{enumerate}
+\begin{enumerate}[(b)]
+ \item On a machine that the user does own, but does not have
+ administrative rights on.
+\end{enumerate}
+\begin{enumerate}[(c)]
+ \item On a machine that the user does have administrative rights on,
+ but where the user is non-technical and does not know where to
+ find traces of the Tor Browser Bundle or how to remove them.
+\end{enumerate}
+
+In the following, we discuss the objective, scope, and limitations for
+this analysis. We then look into the traces found on the different
+operating systems and suggest possible mitigations for some of them. We
+conclude with ideas for further analysis work.
+
+\section{Scope}
+The primary scope of this forensic analysis was to set up, use, and
+analyze three operating systems for any changes that may have been made
+specifically by the use of the Tor Browser Bundle. We built three
+separate virtual machines, one for each operating system, with default
+installation settings. We did not download the Tor Browser Bundle using
+a browser, but instead connected an external drive which we then copied
+the bundle from. We made a decision to only consider traces left by the
+Tor Browser Bundle after the bundle had been deleted and the system had
+been completely shut down.
+
+\section{Limitations}
+The objective, scope, and tools used during this analysis introduced a
+few limitations that we feel is worth considering when reading this
+report. Additionally, we had to assume a number of things about the end
+user, her system, and how she is using the Tor Browser Bundle.
+
+\subsection{Objective}
+The objective assumes that the user either does not have administrative
+rights on the machine, or does not know how to find and remove traces of
+the Tor Browser Bundle. A technical user with administrative rights on
+her system will be able to mitigate a number of the traces found.
+
+\subsection{Scope}
+All three operating systems were installed with default settings and
+values. The Tor Browser Bundle was copied from an attached external
+drive to the user's Desktop or home directory. Once the user finished
+browsing, the Tor Browser Bundle directory and archive was moved to the
+trash can, and the trash can was then emptied. The system was completely
+shut down once the bundle had been deleted.
+
+We did not consider traces which are not directly related to the Tor
+Browser Bundle, such as the presence of an external drive. Additionally,
+we did not consider traces left after using the Tor Browser Bundle while
+the bundle was still present on the system, or the system had not been
+completely shut down.
+
+We believe it is likely that a different scenario would reveal
+additional traces of the Tor Browser Bundle on the user's system.
+
+\subsection{Tools}
+We used a range of different tools to perform the forensic analysis, all
+of which are free and available online. The following three tools were
+all used both before and after we ran the Tor Browser Bundle:
+
+\begin{itemize}
+ \item
+ \textbf{dd}\footnote{\url{http://www.debianhelp.co.uk/ddcommand.htm}} -
+ create a backup image of the virtual drive.
+ \item
+ \textbf{rsync}\footnote{\url{http://packages.debian.org/squeeze/rsync}} -
+ copy all the files on the system over to an external drive.
+ \item \textbf{md5deep} and
+ \textbf{hashdeep}\footnote{\url{http://packages.debian.org/squeeze/md5deep}}
+ - compute hashes for every file on the drive, and later compare hashes
+ of the clean image against hashes of the tainted image. A new or
+ changed hash indicates a new or changed file.
+\end{itemize}
+
+We also performed a run-time analysis of the Tor Browser Bundle on
+Windows 7 using
+Noriben\footnote{\url{https://www.novainfosec.com/2013/04/17/noriben-your-personal-portable-malware-sandbox/}}
+and
+procmon\footnote{\url{http://technet.microsoft.com/en-us/sysinternals/bb896645.aspx}}.
+This allowed us to create a report of everything the Tor Browser Bundle
+did while it was running. A similar analysis was not performed on OS X
+or Linux due to time constraints.
+
+An analyst with access to a different set of tools, such as commercial
+tools, might find traces which we were unable to find.
+
+\section{Process}
+We followed roughly the same testing process for all three operating
+systems. We set up a separate virtual machine for each operating system,
+logged in with the account we created during the installation process,
+installed available updates and shut it down cleanly. We used a normal
+user account on Linux, a non-root administrative account on OS X, and an
+administrative account on Windows.
+
+Once the operating system had been set up, we connected the virtual
+drive to another virtual machine, used dd to create an image of the
+drive, used hashdeep to compute hashes for every file on the drive, and
+then rsync to copy all the files over to an external drive. It is
+important to note that we used hashdeep and rsync on the original
+virtual drive, not on the copy we created with dd.
+
+After having secured a copy of the clean virtual machine, we rebooted
+the system, connected an external drive, and copied the Tor Browser
+Bundle from the external drive to the Desktop or user's home directory.
+
+We started the Tor Browser Bundle by clicking on the package archive to
+extract it, and then clicking on the Tor Browser Bundle executable to
+run it. On Debian Linux, we also used the command line to extract the
+archive with \textit{tar -zxvf} and start the bundle with
+\textit{./start-tor-browser}.
+
+We waited for the Tor Browser to confirm we were connected to the
+network by loading \url{https://check.torproject.org/}. We then browsed a
+couple of different pages and clicked on a few links before shutting it
+down by closing the Tor Browser and clicking on the \textit{Exit}-button in
+Vidalia. The Tor Browser did not crash and we did not see any error
+messages.
+
+We deleted the Tor Browser Bundle folder and package archive by moving
+all components into the Trash/Recycle Bin, clicking on it and choosing
+Empty Trash/Empty Recycle Bin. On Linux, we also deleted the Tor Browser
+folder and package archive using \textit{rm -rf} on the command line.
+
+We repeated the steps with dd, rsync, and hashdeep to create a copy of
+the tainted virtual machine. On Windows, we also used Noriben and
+procmon as previously noted.
+
+\section{Results}
+The following sections list the traces found which directly relate to
+the Tor Browser Bundle. Each issue has its own ticket in the bug
+tracker\footnote{\url{https://bugs.torproject.org/}}. The full list of
+traces can be found in
+\href{https://bugs.torproject.org/8166}{\#8166} for
+Linux,
+\href{https://bugs.torproject.org/6846}{\#6846} for
+OS X, and
+\href{https://bugs.torproject.org/6845}{\#6845} for
+Windows.
+
+The majority of the issues found show traces of the Tor Browser Bundle
+package on the user's system. \textit{Issue 6} describes the only known
+instance of browsing history leakage, other than perhaps swap
+files/partitions.
+
+A number of the issues below are related to default operating system
+behavior, such as the use of Spotlight on OS X and Windows Search. The
+easiest way to avoid leaving traces on a computer system is to use
+\textit{The Amnesic Incognito Live System (TAILS)}\footnote{\url{https://tails.boum.org/}}.
+
+\subsection{OS X}
+\subsubsection{Issue 1: Apple System Log (ASL)}
+The Apple System Log is a background process that allows messages from
+different parts of the operating system to be recorded in several ways.
+We were able to find traces of the Tor Browser Bundle in the following
+files:
+
+\begin{itemize}
+ \item /var/log/asl/2013.05.22.U0.G80.asl
+ \item /var/log/asl/2013.05.22.U501.asl
+\end{itemize}
+
+We were not able to examine the following files, but they may contain
+traces of the bundle:
+
+\begin{itemize}
+ \item /var/log/asl/StoreData
+ \item /var/log/asl/SweepStore
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8982}{\#8982}.
+
+\subsubsection{Issue 2: Crash Reporter and Diagnostic Messages}
+The Crash Reporter on OS X will collect information about any
+application that crashes or hangs. We did not encounter any problems
+when running the Tor Browser Bundle, but we still found traces of the
+bundle in the following files:
+
+\begin{itemize}
+ \item /Library/Application Support/CrashReporter/ \\ Intervals\_00000000-0000-1000-8000-000C2976590B.plist
+ \item /var/log/DiagnosticMessages/2013.05.22.asl
+\end{itemize}
+
+We were not able to examine the following file, but it might contain
+traces of the bundle:
+
+\begin{itemize}
+ \item /var/log/DiagnosticMessages/StoreData
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8983}{\#8983}.
+
+\subsubsection{Issue 3: FSEvents API}
+The FSEvents API allows applications to register for notifications of
+changes to a given directory tree. Whenever the filesystem is changed,
+the kernel passes notifications to a process called fseventsd.
+
+The following file contains the path to the attached external drive, the
+path to the Tor Browser Bundle on the Desktop, and the path to the Tor
+Browser Bundle in the Trash:
+
+\begin{itemize}
+ \item /.fseventsd/0000000000172019
+\end{itemize}
+
+We were not able to examine the other files in the \textit{.fseventsd}
+directory, which may also contain traces of the bundle. This issue has
+been documented as
+\href{https://bugs.torproject.org/8984}{\#8984}.
+
+\subsubsection{Issue 4: HFS+}
+HFS+ is the default filesystem on OS X; it supports journaling, quotas,
+Finder information in metadata, hard and symbolic links, aliases, etc.
+HFS+ also supports hot file clustering, which tracks read-only files
+that are frequently requested and then moves them into a "hot zone". The
+hot file clustering scheme uses an on-disk B-Tree file for tracking.
+
+We were not able to examine the following files, which may contain
+traces of the bundle:
+
+\begin{itemize}
+ \item /.hotfiles.btree
+ \item /.journal
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8985}{\#8985}.
+
+\subsubsection{Issue 5: Preferences}
+OS X applications store preference settings in plist files, and the
+files below are related to system fonts, the file manager, recent items,
+and the Tor Browser Bundle. These files all contain traces of the Tor
+Browser Bundle:
+
+\begin{itemize}
+ \item /Users/runa/Library/Preferences/com.apple.ATS.plist
+ \item /Users/runa/Library/Preferences/com.apple.finder.plist
+ \item /Users/runa/Library/Preferences/com.apple.recentitems.plist
+ \item /Users/runa/Library/Preferences/org.mozilla.torbrowser.plist
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8986}{\#8986}.
+
+\subsubsection{Issue 6: Saved Application State}
+Resume is one of the new features in OS X 10.7 and 10.8. The feature
+allows applications to save their last known state when they are closed,
+and then return to this state when they are later reopened.
+
+While the Tor Browser does not use this feature, it does leak
+information in the files which are written to the
+\textit{/Users/runa/Library/Saved Application State/} directory:
+
+\begin{itemize}
+ \item /Users/runa/Library/Saved Application State/org.mozilla.torbrowser.savedState/data.data
+ \item /Users/runa/Library/Saved Application State/org.mozilla.torbrowser.savedState/window\_3.data
+ \item /Users/runa/Library/Saved Application State/org.mozilla.torbrowser.savedState/windows.plist
+\end{itemize}
+
+The \textit{windows.plist} file contains the HTML title tag of the last
+active tab in the Tor Browser (or currently active tab, if the browser
+is still open). This has been documented as
+\href{https://bugs.torproject.org/8987}{\#8987}.
+
+Thanks to community review of our findings, we have a potential fix for
+this issue which we will include in version 3.0alpha2 of the Tor Browser
+Bundle.
+
+\subsubsection{Issue 7: Spotlight}
+Spotlight, and the Metadata Server (mds), indexes all items and files on
+a system and allows the user to perform system-wide searches for all
+sorts of items; documents, pictures, applications, system preferences,
+etc.
+
+We were not able to examine the following files, but it is likely that
+Spotlight and mds picked up the Tor Browser Bundle at some point:
+
+\begin{itemize}
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/.store.db
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/0.indexGroups
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/0.indexHead
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/0.indexIds
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/0.indexUpdates
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/journalAttr.3
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/journals.live/journal.20916
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/journals.live/journal.21051
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/live.0.indexGroups
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/live.0.indexHead
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/live.0.indexIds
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/live.0.indexUpdates
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/permStore
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/reverseDirectoryStore
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/reverseStore.updates
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/shutdown\_time
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/store.updates
+ \item /.Spotlight-V100/Store-V2/5D1FD6C7-8789-4860-9B72-6325801BFADD/tmp.spotlight.loc
+ \item /var/db/mds/messages/se\_SecurityMessages
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8988}{\#8988}.
+
+\subsubsection{Issue 8: Swap}
+OS X relies on swap files and paging for memory and cache management. We
+were not able to examine the swap file, but it is likely that the
+following file contains traces of the bundle:
+
+\begin{itemize}
+ \item /var/vm/swapfile0
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8989}{\#8989}.
+
+\subsubsection{Issue 9: Temporary data}
+OS X stores per-user temporary files and caches in \textit{/var/folders/}. The
+following files contain the path to the Tor Browser Bundle on the Desktop and
+in the Trash:
+
+\begin{itemize}
+ \item /var/folders/fb/v5wqpgls029d8tp\_pcjy0yth0000gn/C/com.apple.LaunchServices-036501.csstore
+ \item /var/folders/fb/v5wqpgls029d8tp\_pcjy0yth0000gn/C/ \\ com.apple.QuickLook.thumbnailcache/index.sqlite
+ \item /var/folders/zz/zyxvpxvq6csfxvn\_n0000000000000/C/com.apple.LaunchServices-0360.csstore
+ \item /var/folders/fb/v5wqpgls029d8tp\_pcjy0yth0000gn/C/ \\ com.apple.QuickLook.thumbnailcache/thumbnails.data
+\end{itemize}
+
+These files also contain strings such as
+\textit{org.torproject.torbrowserbundle}, \textit{org.mozilla.torbrowser},
+\textit{torbrowser\_en-us.app}, \textit{torbrowser.app},
+\textit{net.vidalia-project.vidalia}, and \textit{vidalia.app}.
+
+We were not able to examine the last file, \textit{thumbnails.data}, but it
+might contain traces of the bundle as well. This issue has been
+documented as
+\href{https://bugs.torproject.org/8990}{\#8990}.
+
+\subsection{Debian GNU/Linux with GNOME}
+\subsubsection{Issue 10: Bash History}
+Bash is the default shell/command processor on Linux and keeps a record
+of commands typed by the user. The file below contains lines showing we
+extracted and ran the Tor Browser Bundle. This trace is specific to the
+user shell being \textit{/bin/bash}. Other shells and window managers
+will give different results:
+
+\begin{itemize}
+ \item /home/runa/.bash\_history:
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8697}{\#8697}.
+
+\subsubsection{Issue 11: GVFS}
+GVFS is the virtual filesystem for the GNOME desktop. This result will
+vary depending on the window manager used. The following file contains
+the filename of the Tor Browser Bundle tarball,
+\textit{tor-browser-gnu-linux-x86\_64-2.3.25-5-dev-en-US.tar.gz}:
+
+\begin{itemize}
+ \item /home/runa/.local/share/gvfs-metadata/home
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8695}{\#8695}.
+
+After deleting the Tor Browser Bundle by moving the folder and package
+archive into the Trash/Recycle Bin, clicking on it and choosing Empty
+Trash/Empty Recycle Bin, we noticed that the following file contained
+lines indicating that the Tor Browser Bundle had been deleted:
+
+\begin{itemize}
+ \item /home/runa/.local/share/gvfs-metadata/home-c0ca7993.log
+\end{itemize}
+
+Traces in this file include lines such as
+\textit{/.local/share/Trash/expunged/3864782161/start-tor-browser} and
+\textit{/.local/share/Trash/expunged/3864782161/App/tor}. This issue has
+been documented as
+\href{https://bugs.torproject.org/8707}{\#8707}.
+
+\subsubsection{Issue 12: Recently Used}
+The following file contains information about recently used files,
+including the Tor Browser Bundle. The file contains the filename of the
+Tor Browser Bundle tarball,
+\textit{tor-browser-gnu-linux-x86\_64-2.3.25-5-dev-en-US.tar.gz}, as
+well as the time and date the bundle was added, modified, and visited:
+
+\begin{itemize}
+ \item /home/runa/.recently-used.xbel
+\end{itemize}
+
+The file \textit{.recently-used} could also exist. This issue has been
+documented as \href{https://bugs.torproject.org/8706}{\#8706}.
+
+\subsubsection{Issue 13: X Session Manager}
+In the X Window System, an X session manager is a session management
+program, a program that can save and restore the current state of a set
+of running applications. The file listed below contains the following
+string, \textit{"Window manager warning: Buggy client sent a
+\_NET\_ACTIVE\_WINDOW message with a timestamp of 0 for 0x3800089 (Tor
+Browse)"}:
+
+\begin{itemize}
+ \item /home/runa/.xsession-errors
+\end{itemize}
+
+The file \textit{.xsession-errors.old} could also exist. This issue has been
+documented as \href{https://bugs.torproject.org/8696}{\#8696}.
+
+\subsection{Windows}
+\subsubsection{Issue 14: Prefetch}
+Windows keeps track of the way the system starts and which programs the
+user commonly opens. This information is saved as a number of small
+files in the \textit{Prefetch} folder. The files below may contain data
+and elements of executable code:
+
+\begin{itemize}
+ \item C:\textbackslash{}Windows\textbackslash{}Prefetch\textbackslash{}START TOR BROWSER.EXE-F5557FAC.pf
+ \item C:\textbackslash{}Windows\textbackslash{}Prefetch\textbackslash{}TBB-FIREFOX.EXE-350502C5.pf
+ \item C:\textbackslash{}Windows\textbackslash{}Prefetch\textbackslash{}TOR-BROWSER-2.3.25-6\_EN-US.EX-1354A499.pf
+ \item C:\textbackslash{}Windows\textbackslash{}Prefetch\textbackslash{}TOR.EXE-D7159D93.pf
+ \item C:\textbackslash{}Windows\textbackslash{}Prefetch\textbackslash{}VIDALIA.EXE-5167E0BC.pf
+\end{itemize}
+
+The following cache files are most likely similar to prefetch files. We
+were not able to examine these files, but they may contain traces of the
+Tor Browser Bundle:
+
+\begin{itemize}
+ \item C:\textbackslash{}Users\textbackslash{}runa\textbackslash{}AppData\textbackslash{}Local\textbackslash{}Microsoft\textbackslash{}Windows\textbackslash{}Caches\textbackslash{}cversions.1.db
+ \item C:\textbackslash{}Users\textbackslash{}runa\textbackslash{}AppData\textbackslash{}Local\textbackslash{}Microsoft\textbackslash{}Windows\textbackslash{}Caches\{AFBF9F1A-8EE8-4C77-AF34-C647E37CA0D9\}.1.ver0x0000000000000006.db
+ \item C:\textbackslash{}Windows\textbackslash{}AppCompat\textbackslash{}Programs\textbackslash{}RecentFileCache.bcf
+\end{itemize}
+
+This issue has been documented as
+\href{https://bugs.torproject.org/8916}{\#8916}.
+
+\subsubsection{Issue 15: Thumbnail Cache}
+Windows stores thumbnails of graphics files, and certain document and
+movie files, in Thumbnail Cache files. The following files contain the
+Onion Logo icon associated with the Tor Browser Bundle:
+
+\begin{itemize}
+ \item C:\textbackslash{}Users\textbackslash{}Runa\textbackslash{}AppData\textbackslash{}Local\textbackslash{}Microsoft\textbackslash{}Windows\textbackslash{}Explorer\textbackslash{}thumbcache\_32.db
+ \item C:\textbackslash{}Users\textbackslash{}Runa\textbackslash{}AppData\textbackslash{}Local\textbackslash{}Microsoft\textbackslash{}Windows\textbackslash{}Explorer\textbackslash{}thumbcache\_96.db
+ \item C:\textbackslash{}Users\textbackslash{}Runa\textbackslash{}AppData\textbackslash{}Local\textbackslash{}Microsoft\textbackslash{}Windows\textbackslash{}Explorer\textbackslash{}thumbcache\_256.db
+\end{itemize}
+
+Other Thumbnail Cache files, such as \textit{thumbcache\_1024.db},
+\textit{thumbcache\_sr.db}, \textit{thumbcache\_idx.db}, and
+\textit{IconCache.db}, may also contain the Onion Logo icon. This issue
+has been documented as
+\href{https://bugs.torproject.org/8921}{\#8921}.
+
+One possible solution would be to drop the Onion Logo icon and use a
+standard Windows icon instead, assuming this does not confuse our
+Windows users too much.
+
+\subsubsection{Issue 16: Windows Paging File}
+Microsoft Windows uses a paging file, called \textit{pagefile.sys,} to store
+frames of memory that do not currently fit into physical memory. The
+file \textit{C:\textbackslash{}pagefile.sys} contains information about
+the attached external drive, as well as the filename for the Tor Browser
+Bundle executable. This issue has been documented as
+\href{https://bugs.torproject.org/8918}{\#8918}.
+
+\subsubsection{Issue 17: Windows Registry}
+The Windows Registry is a database that stores various configuration
+settings and options for the operating system. \textit{HKEY\_CURRENT\_USER},
+abbreviated \textit{HKCU}, stores settings that are specific to the currently
+logged-in user. Each user's settings are stored in files called
+\textit{NTUSER.DAT} and \textit{UsrClass.dat}.
+
+The path to the Tor Browser Bundle executable is listed in the following
+two files:
+
+\begin{itemize}
+ \item C:\textbackslash{}Users\textbackslash{}runa\textbackslash{}AppData\textbackslash{}Local\textbackslash{}Microsoft\textbackslash{}Windows\textbackslash{}UsrClass.dat
+ \item C:\textbackslash{}Users\textbackslash{}runa\textbackslash{}AppData\textbackslash{}Local\textbackslash{}Microsoft\textbackslash{}Windows\textbackslash{}UsrClass.dat.LOG1
+\end{itemize}
+
+We did not find traces of the Tor Browser Bundle in any of the
+\textit{NTUSER.DAT} files. It is likely that we would have seen
+different results had we used Windows XP, due to a change in registry
+handling between Windows XP/Vista and Windows 7. This issue has been
+documented as \href{https://bugs.torproject.org/8919}{\#8919}.
+
+\subsubsection{Issue 18: Windows Search}
+Windows Search, which is enabled by default, builds a full-text index of
+files on the computer. One component of Windows Search is the Indexer,
+which crawls the file system on initial setup, and then listens for file
+system notifications to index changed files. Windows Search writes a
+number of files to
+\textit{C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}}:
+
+\begin{itemize}
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}
+ \\ GatherLogs\textbackslash{}SystemIndex\textbackslash{}SystemIndex.1.Crwl
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}
+ \\ GatherLogs\textbackslash{}SystemIndex\textbackslash{}SystemIndex.1.gthr
+ \item C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}MSS.chk
+ \item C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}MSS.log
+ \item C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}MSS00007.log
+ \item C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}MSS00008.log
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}00010004.ci
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}00010004.dir
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}00010004.wid
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}00010004.wsb
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}CiAB0002.001
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}CiAB0002.002
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}CiAD0002.001
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}CiAD0002.002
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}INDEX.000
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}INDEX.001
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ Indexer\textbackslash{}CiFiles\textbackslash{}INDEX.002
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ PropMap\textbackslash{}CiPT0000.000
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ PropMap\textbackslash{}CiPT0000.001
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ PropMap\textbackslash{}CiPT0000.002
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ SecStore\textbackslash{}CiST0000.000
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ SecStore\textbackslash{}CiST0000.001
+ \item
+ C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Projects\textbackslash{}SystemIndex\textbackslash{}
+ \\ SecStore\textbackslash{}CiST0000.002
+ \item C:\textbackslash{}ProgramData\textbackslash{}Microsoft\textbackslash{}Search\textbackslash{}Data\textbackslash{}Applications\textbackslash{}Windows\textbackslash{}Windows.edb
+\end{itemize}
+
+We were not able to examine the Windows Search database files, but
+it is likely that Windows Search picked up the Tor Browser Bundle at
+some point. This issue has been documented as
+\href{https://bugs.torproject.org/8920}{\#8920}.
+
+\section{Further work}
+The Tor Browser Bundle aims to ensure that no traces are left on the
+user's system. However, a number of the traces listed in this report are
+related to default operating system settings, some of which the bundle
+might not be able to remove. We therefore propose the creation of a
+document which lists steps our users can take to mitigate these traces
+on the different operating systems.
+
+The scope of this analysis covered traces left by the Tor Browser Bundle
+itself, not traces left by other applications while downloading the
+bundle. The results in this report would have been slightly different
+had we included traces of downloading the bundle from a browser. We
+propose to expand the scope of a future analysis to also include
+downloading the Tor Browser Bundle with a default browser.
+
+The goal of this analysis was to identify traces left behind by the Tor
+Browser Bundle after extracting, using, and deleting the bundle. The Tor
+Browser Bundle uses Firefox Private Browsing mode by default, which
+should prevent browsing history from being written to disk. We propose
+to watch the Tor Browser Bundle directory itself for browsing history
+leaks, before the bundle is deleted, for example via automated tests to
+watch for regressions by either Mozilla or us.
+
+The forensic analysis was performed with one specific version of the Tor
+Browser Bundle. Other packages, such as the Pluggable Transports Tor
+Browser
+Bundle\footnote{\url{https://www.torproject.org/docs/pluggable-transports.html.en\#download}}
+and the experimental Tor Browser Bundle without
+Vidalia\footnote{\url{https://blog.torproject.org/blog/announcing-tor-browser-bundle-30alpha1}},
+and newer versions of the bundle may leave a different set of traces on
+the user's system. We propose to include forensic analysis in our build
+infrastructure so that we can test a number of Tor Browser Bundle
+packages on a regular basis.
+
+As noted in the tools section, we performed a run-time analysis of the
+Tor Browser Bundle on Windows 7. We were not able to perform a similar
+analysis on OS X and Linux due to time constraints. We propose to
+perform a run-time analysis of the Tor Browser Bundle on OS X and Linux
+to rule out any additional traces.
+
+\section*{Acknowledgments}
+Thanks to Mike Perry, Philipp Winter, and Steve Lord, for providing
+valuable feedback for this technical report.
+
+\end{document}
diff --git a/2013/tbb-forensic-analysis/tortechrep.cls b/2013/tbb-forensic-analysis/tortechrep.cls
new file mode 120000
index 0000000..4c24db2
--- /dev/null
+++ b/2013/tbb-forensic-analysis/tortechrep.cls
@@ -0,0 +1 @@
+../../tortechrep.cls
\ No newline at end of file
1
0

[onionoo/master] Refactor more parts in preparation for moving to a database.
by karsten@torproject.org 28 Jun '13
by karsten@torproject.org 28 Jun '13
28 Jun '13
commit 4ce5a2bb8ab487176642dd0ebfd5ef1d960bc5ea
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Jun 20 18:06:42 2013 +0200
Refactor more parts in preparation for moving to a database.
- Introduce a new document type hierarchy for internal text-based status
documents (*Status) and JSON-formatted output documents (*Document).
Prepares moving all formatting and parsing code to DocumentStore or
Document subtypes, so that *Writer classes won't have to worry about
document formats anymore.
- Rename Node to NodeStatus and make it handle its own formatting and
parsing. DocumentStore now returns NodeStatus objects instead of one
large summary status documents containing all nodes. Prepares moving to
a database design where each NodeStatus object will have its own
database entry. DocumentStore still reads and writes a single summary
status document, so that file formats don't change, but keeps a local
NodeStatus cache in memory and writes this file to disk before exiting.
- Don't share CurrentNodes code between back-end and front-end. Move
back-end-only functionality to new NodeDataWriter and front-end-only
functionality to ResourceServlet.
- Rename DetailDataWriter to DetailsDataWriter for consistency with other
document types.
---
.../torproject/onionoo/BandwidthDataWriter.java | 35 +-
src/org/torproject/onionoo/BandwidthDocument.java | 30 +
src/org/torproject/onionoo/BandwidthStatus.java | 7 +
src/org/torproject/onionoo/CurrentNodes.java | 562 --------------
src/org/torproject/onionoo/DescriptorSource.java | 2 +
src/org/torproject/onionoo/DetailDataWriter.java | 810 -------------------
src/org/torproject/onionoo/DetailsDataWriter.java | 821 ++++++++++++++++++++
src/org/torproject/onionoo/DetailsDocument.java | 71 ++
src/org/torproject/onionoo/Document.java | 8 +
src/org/torproject/onionoo/DocumentStore.java | 360 +++++++--
src/org/torproject/onionoo/LookupService.java | 1 -
src/org/torproject/onionoo/Main.java | 60 +-
src/org/torproject/onionoo/Node.java | 280 -------
src/org/torproject/onionoo/NodeDataWriter.java | 237 ++++++
src/org/torproject/onionoo/NodeStatus.java | 482 ++++++++++++
src/org/torproject/onionoo/ResourceServlet.java | 64 +-
src/org/torproject/onionoo/SummaryDocument.java | 27 +
src/org/torproject/onionoo/UpdateStatus.java | 7 +
src/org/torproject/onionoo/WeightsDataWriter.java | 35 +-
src/org/torproject/onionoo/WeightsDocument.java | 31 +
src/org/torproject/onionoo/WeightsStatus.java | 5 +
21 files changed, 2144 insertions(+), 1791 deletions(-)
diff --git a/src/org/torproject/onionoo/BandwidthDataWriter.java b/src/org/torproject/onionoo/BandwidthDataWriter.java
index 664c050..7203b6d 100644
--- a/src/org/torproject/onionoo/BandwidthDataWriter.java
+++ b/src/org/torproject/onionoo/BandwidthDataWriter.java
@@ -42,6 +42,8 @@ public class BandwidthDataWriter {
private DocumentStore documentStore;
+ private SortedSet<String> currentFingerprints = new TreeSet<String>();
+
public BandwidthDataWriter(DescriptorSource descriptorSource,
DocumentStore documentStore) {
this.descriptorSource = descriptorSource;
@@ -55,12 +57,9 @@ public class BandwidthDataWriter {
this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
- private SortedSet<String> currentFingerprints = new TreeSet<String>();
- public void setCurrentRelays(SortedMap<String, Node> currentRelays) {
- this.currentFingerprints.addAll(currentRelays.keySet());
- }
- public void setCurrentBridges(SortedMap<String, Node> currentBridges) {
- this.currentFingerprints.addAll(currentBridges.keySet());
+ public void setCurrentNodes(
+ SortedMap<String, NodeStatus> currentNodes) {
+ this.currentFingerprints.addAll(currentNodes.keySet());
}
public void readExtraInfoDescriptors() {
@@ -131,11 +130,12 @@ public class BandwidthDataWriter {
private void readHistoryFromDisk(String fingerprint,
SortedMap<Long, long[]> writeHistory,
SortedMap<Long, long[]> readHistory) {
- String historyString = this.documentStore.retrieve(
- DocumentType.STATUS_BANDWIDTH, fingerprint);
- if (historyString == null) {
+ BandwidthStatus bandwidthStatus = this.documentStore.retrieve(
+ BandwidthStatus.class, false, fingerprint);
+ if (bandwidthStatus == null) {
return;
}
+ String historyString = bandwidthStatus.documentString;
try {
Scanner s = new Scanner(historyString);
while (s.hasNextLine()) {
@@ -232,9 +232,9 @@ public class BandwidthDataWriter {
+ this.dateTimeFormat.format(v[1]) + " "
+ String.valueOf(v[2]) + "\n");
}
- String historyString = sb.toString();
- this.documentStore.store(historyString, DocumentType.STATUS_BANDWIDTH,
- fingerprint);
+ BandwidthStatus bandwidthStatus = new BandwidthStatus();
+ bandwidthStatus.documentString = sb.toString();
+ this.documentStore.store(bandwidthStatus, fingerprint);
}
private void writeBandwidthDataFileToDisk(String fingerprint,
@@ -253,9 +253,9 @@ public class BandwidthDataWriter {
sb.append("{\"fingerprint\":\"" + fingerprint + "\",\n"
+ "\"write_history\":{\n" + writeHistoryString + "},\n"
+ "\"read_history\":{\n" + readHistoryString + "}}\n");
- String historyString = sb.toString();
- this.documentStore.store(historyString, DocumentType.OUT_BANDWIDTH,
- fingerprint);
+ BandwidthDocument bandwidthDocument = new BandwidthDocument();
+ bandwidthDocument.documentString = sb.toString();
+ this.documentStore.store(bandwidthDocument, fingerprint);
}
private String[] graphNames = new String[] {
@@ -377,15 +377,14 @@ public class BandwidthDataWriter {
public void deleteObsoleteBandwidthFiles() {
SortedSet<String> obsoleteBandwidthFiles = this.documentStore.list(
- DocumentType.OUT_BANDWIDTH);
+ BandwidthDocument.class, false);
for (String fingerprint : this.currentFingerprints) {
if (obsoleteBandwidthFiles.contains(fingerprint)) {
obsoleteBandwidthFiles.remove(fingerprint);
}
}
for (String fingerprint : obsoleteBandwidthFiles) {
- this.documentStore.remove(DocumentType.OUT_BANDWIDTH,
- fingerprint);
+ this.documentStore.remove(BandwidthDocument.class, fingerprint);
}
}
}
diff --git a/src/org/torproject/onionoo/BandwidthDocument.java b/src/org/torproject/onionoo/BandwidthDocument.java
new file mode 100644
index 0000000..01b87de
--- /dev/null
+++ b/src/org/torproject/onionoo/BandwidthDocument.java
@@ -0,0 +1,30 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.util.List;
+import java.util.Map;
+
+class BandwidthDocument extends Document {
+
+ class BandwidthHistory {
+ String first;
+ String last;
+ Integer interval;
+ Double factor;
+ Integer count;
+ List<Integer> values;
+ }
+
+ class NodeBandwidth {
+ String fingerprint;
+ Map<String, BandwidthHistory> write_history;
+ Map<String, BandwidthHistory> read_history;
+ }
+
+ String relays_published;
+ List<NodeBandwidth> relays;
+ String bridges_published;
+ List<NodeBandwidth> bridges;
+}
+
diff --git a/src/org/torproject/onionoo/BandwidthStatus.java b/src/org/torproject/onionoo/BandwidthStatus.java
new file mode 100644
index 0000000..bf6f504
--- /dev/null
+++ b/src/org/torproject/onionoo/BandwidthStatus.java
@@ -0,0 +1,7 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+class BandwidthStatus extends Document {
+}
+
diff --git a/src/org/torproject/onionoo/CurrentNodes.java b/src/org/torproject/onionoo/CurrentNodes.java
deleted file mode 100644
index 9e27f5b..0000000
--- a/src/org/torproject/onionoo/CurrentNodes.java
+++ /dev/null
@@ -1,562 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.onionoo;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Scanner;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.torproject.descriptor.BridgeNetworkStatus;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.NetworkStatusEntry;
-import org.torproject.descriptor.RelayNetworkStatusConsensus;
-import org.torproject.onionoo.LookupService.LookupResult;
-
-/* Store relays and bridges that have been running in the past seven
- * days. */
-public class CurrentNodes {
-
- private DescriptorSource descriptorSource;
-
- private LookupService lookupService;
-
- private DocumentStore documentStore;
-
- /* Initialize an instance for the back-end that is read-only and doesn't
- * support parsing new descriptor contents. */
- public CurrentNodes(DocumentStore documentStore) {
- this(null, null, documentStore);
- }
-
- public CurrentNodes(DescriptorSource descriptorSource,
- LookupService lookupService, DocumentStore documentStore) {
- this.descriptorSource = descriptorSource;
- this.lookupService = lookupService;
- this.documentStore = documentStore;
- }
-
- public void readStatusSummary() {
- String summaryString = this.documentStore.retrieve(
- DocumentType.STATUS_SUMMARY);
- this.initializeFromSummaryString(summaryString);
- }
-
- public void readOutSummary() {
- String summaryString = this.documentStore.retrieve(
- DocumentType.OUT_SUMMARY);
- this.initializeFromSummaryString(summaryString);
- }
-
- private void initializeFromSummaryString(String summaryString) {
- if (summaryString == null) {
- return;
- }
- Scanner s = new Scanner(summaryString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- this.parseSummaryFileLine(line);
- }
- s.close();
- }
-
- private void parseSummaryFileLine(String line) {
- boolean isRelay;
- String nickname, fingerprint, address, countryCode = "??",
- hostName = null, defaultPolicy = null, portList = null,
- aSNumber = null;
- SortedSet<String> orAddressesAndPorts, exitAddresses, relayFlags;
- long publishedOrValidAfterMillis, consensusWeight = -1L,
- lastRdnsLookup = -1L, firstSeenMillis, lastChangedAddresses;
- int orPort, dirPort;
- try {
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- String[] parts = line.split(" ");
- isRelay = parts[0].equals("r");
- if (parts.length < 9) {
- System.err.println("Too few space-separated values in line '"
- + line + "'. Skipping.");
- return;
- }
- nickname = parts[1];
- fingerprint = parts[2];
- String addresses = parts[3];
- orAddressesAndPorts = new TreeSet<String>();
- exitAddresses = new TreeSet<String>();
- if (addresses.contains(";")) {
- String[] addressParts = addresses.split(";", -1);
- if (addressParts.length != 3) {
- System.err.println("Invalid addresses entry in line '" + line
- + "'. Skipping.");
- return;
- }
- address = addressParts[0];
- if (addressParts[1].length() > 0) {
- orAddressesAndPorts.addAll(Arrays.asList(
- addressParts[1].split("\\+")));
- }
- if (addressParts[2].length() > 0) {
- exitAddresses.addAll(Arrays.asList(
- addressParts[2].split("\\+")));
- }
- } else {
- address = addresses;
- }
- publishedOrValidAfterMillis = dateTimeFormat.parse(
- parts[4] + " " + parts[5]).getTime();
- orPort = Integer.parseInt(parts[6]);
- dirPort = Integer.parseInt(parts[7]);
- relayFlags = new TreeSet<String>(
- Arrays.asList(parts[8].split(",")));
- if (parts.length > 9) {
- consensusWeight = Long.parseLong(parts[9]);
- }
- if (parts.length > 10) {
- countryCode = parts[10];
- }
- if (parts.length > 12) {
- hostName = parts[11].equals("null") ? null : parts[11];
- lastRdnsLookup = Long.parseLong(parts[12]);
- }
- if (parts.length > 14) {
- if (!parts[13].equals("null")) {
- defaultPolicy = parts[13];
- }
- if (!parts[14].equals("null")) {
- portList = parts[14];
- }
- }
- firstSeenMillis = publishedOrValidAfterMillis;
- if (parts.length > 16) {
- firstSeenMillis = dateTimeFormat.parse(parts[15] + " "
- + parts[16]).getTime();
- }
- lastChangedAddresses = publishedOrValidAfterMillis;
- if (parts.length > 18 && !parts[17].equals("null")) {
- lastChangedAddresses = dateTimeFormat.parse(parts[17] + " "
- + parts[18]).getTime();
- }
- if (parts.length > 19) {
- aSNumber = parts[19];
- }
- } catch (NumberFormatException e) {
- System.err.println("Number format exception while parsing line '"
- + line + "': " + e.getMessage() + ". Skipping.");
- return;
- } catch (ParseException e) {
- System.err.println("Parse exception while parsing line '" + line
- + "': " + e.getMessage() + ". Skipping.");
- return;
- } catch (Exception e) {
- /* This catch block is only here to handle yet unknown errors. It
- * should go away once we're sure what kind of errors can occur. */
- System.err.println("Unknown exception while parsing line '" + line
- + "': " + e.getMessage() + ". Skipping.");
- return;
- }
- if (isRelay) {
- this.addRelay(nickname, fingerprint, address,
- orAddressesAndPorts, exitAddresses,
- publishedOrValidAfterMillis, orPort, dirPort, relayFlags,
- consensusWeight, countryCode, hostName, lastRdnsLookup,
- defaultPolicy, portList, firstSeenMillis,
- lastChangedAddresses, aSNumber);
- } else {
- this.addBridge(nickname, fingerprint, address,
- orAddressesAndPorts, exitAddresses,
- publishedOrValidAfterMillis, orPort, dirPort, relayFlags,
- consensusWeight, countryCode, hostName, lastRdnsLookup,
- defaultPolicy, portList, firstSeenMillis,
- lastChangedAddresses, aSNumber);
- }
- }
-
- public void writeStatusSummary() {
- String summaryString = this.writeSummaryString(true);
- this.documentStore.store(summaryString, DocumentType.STATUS_SUMMARY);
- }
-
- public void writeOutSummary() {
- String summaryString = this.writeSummaryString(false);
- this.documentStore.store(summaryString, DocumentType.OUT_SUMMARY);
- this.documentStore.store(String.valueOf(System.currentTimeMillis()),
- DocumentType.OUT_UPDATE);
- }
-
- /* Write internal relay search data to a string. */
- private String writeSummaryString(boolean includeOldNodes) {
- StringBuilder sb = new StringBuilder();
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- Collection<Node> relays = includeOldNodes
- ? this.knownRelays.values() : this.getCurrentRelays().values();
- for (Node entry : relays) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String address = entry.getAddress();
- StringBuilder addressesBuilder = new StringBuilder();
- addressesBuilder.append(address + ";");
- int written = 0;
- for (String orAddressAndPort : entry.getOrAddressesAndPorts()) {
- addressesBuilder.append((written++ > 0 ? "+" : "") +
- orAddressAndPort);
- }
- addressesBuilder.append(";");
- written = 0;
- for (String exitAddress : entry.getExitAddresses()) {
- addressesBuilder.append((written++ > 0 ? "+" : "")
- + exitAddress);
- }
- String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
- String orPort = String.valueOf(entry.getOrPort());
- String dirPort = String.valueOf(entry.getDirPort());
- StringBuilder flagsBuilder = new StringBuilder();
- written = 0;
- for (String relayFlag : entry.getRelayFlags()) {
- flagsBuilder.append((written++ > 0 ? "," : "") + relayFlag);
- }
- String consensusWeight = String.valueOf(entry.getConsensusWeight());
- String countryCode = entry.getCountryCode() != null
- ? entry.getCountryCode() : "??";
- String hostName = entry.getHostName() != null
- ? entry.getHostName() : "null";
- long lastRdnsLookup = entry.getLastRdnsLookup();
- String defaultPolicy = entry.getDefaultPolicy() != null
- ? entry.getDefaultPolicy() : "null";
- String portList = entry.getPortList() != null
- ? entry.getPortList() : "null";
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- String lastChangedAddresses = dateTimeFormat.format(
- entry.getLastChangedOrAddress());
- String aSNumber = entry.getASNumber() != null
- ? entry.getASNumber() : "null";
- sb.append("r " + nickname + " " + fingerprint + " "
- + addressesBuilder.toString() + " " + lastSeen + " "
- + orPort + " " + dirPort + " " + flagsBuilder.toString() + " "
- + consensusWeight + " " + countryCode + " " + hostName + " "
- + String.valueOf(lastRdnsLookup) + " " + defaultPolicy + " "
- + portList + " " + firstSeen + " " + lastChangedAddresses
- + " " + aSNumber + "\n");
- }
- Collection<Node> bridges = includeOldNodes
- ? this.knownBridges.values() : this.getCurrentBridges().values();
- for (Node entry : bridges) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String published = dateTimeFormat.format(
- entry.getLastSeenMillis());
- String address = entry.getAddress();
- StringBuilder addressesBuilder = new StringBuilder();
- addressesBuilder.append(address + ";");
- int written = 0;
- for (String orAddressAndPort : entry.getOrAddressesAndPorts()) {
- addressesBuilder.append((written++ > 0 ? "+" : "") +
- orAddressAndPort);
- }
- addressesBuilder.append(";");
- String orPort = String.valueOf(entry.getOrPort());
- String dirPort = String.valueOf(entry.getDirPort());
- StringBuilder flagsBuilder = new StringBuilder();
- written = 0;
- for (String relayFlag : entry.getRelayFlags()) {
- flagsBuilder.append((written++ > 0 ? "," : "") + relayFlag);
- }
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- sb.append("b " + nickname + " " + fingerprint + " "
- + addressesBuilder.toString() + " " + published + " " + orPort
- + " " + dirPort + " " + flagsBuilder.toString()
- + " -1 ?? null -1 null null " + firstSeen + " null null "
- + "null\n");
- }
- return sb.toString();
- }
-
- private long lastValidAfterMillis = 0L;
- private long lastPublishedMillis = 0L;
-
- public void readRelayNetworkConsensuses() {
- if (this.descriptorSource == null) {
- System.err.println("Not configured to read relay network "
- + "consensuses.");
- return;
- }
- DescriptorQueue descriptorQueue =
- this.descriptorSource.getDescriptorQueue(
- DescriptorType.RELAY_CONSENSUSES,
- DescriptorHistory.RELAY_CONSENSUS_HISTORY);
- Descriptor descriptor;
- while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
- if (descriptor instanceof RelayNetworkStatusConsensus) {
- updateRelayNetworkStatusConsensus(
- (RelayNetworkStatusConsensus) descriptor);
- }
- }
- }
-
- public void setRelayRunningBits() {
- if (this.lastValidAfterMillis > 0L) {
- for (Node entry : this.knownRelays.values()) {
- entry.setRunning(entry.getLastSeenMillis() ==
- this.lastValidAfterMillis);
- }
- }
- }
-
- SortedMap<String, Integer> lastBandwidthWeights = null;
- public SortedMap<String, Integer> getLastBandwidthWeights() {
- return this.lastBandwidthWeights;
- }
- private void updateRelayNetworkStatusConsensus(
- RelayNetworkStatusConsensus consensus) {
- long validAfterMillis = consensus.getValidAfterMillis();
- for (NetworkStatusEntry entry :
- consensus.getStatusEntries().values()) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String address = entry.getAddress();
- SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
- entry.getOrAddresses());
- int orPort = entry.getOrPort();
- int dirPort = entry.getDirPort();
- SortedSet<String> relayFlags = entry.getFlags();
- long consensusWeight = entry.getBandwidth();
- String defaultPolicy = entry.getDefaultPolicy();
- String portList = entry.getPortList();
- this.addRelay(nickname, fingerprint, address, orAddressesAndPorts,
- null, validAfterMillis, orPort, dirPort, relayFlags,
- consensusWeight, null, null, -1L, defaultPolicy, portList,
- validAfterMillis, validAfterMillis, null);
- }
- if (this.lastValidAfterMillis == validAfterMillis) {
- this.lastBandwidthWeights = consensus.getBandwidthWeights();
- }
- }
-
- public void addRelay(String nickname, String fingerprint,
- String address, SortedSet<String> orAddressesAndPorts,
- SortedSet<String> exitAddresses, long lastSeenMillis, int orPort,
- int dirPort, SortedSet<String> relayFlags, long consensusWeight,
- String countryCode, String hostName, long lastRdnsLookup,
- String defaultPolicy, String portList, long firstSeenMillis,
- long lastChangedAddresses, String aSNumber) {
- /* Remember addresses and OR/dir ports that the relay advertised at
- * the given time. */
- SortedMap<Long, Set<String>> lastAddresses =
- new TreeMap<Long, Set<String>>(Collections.reverseOrder());
- Set<String> addresses = new HashSet<String>();
- addresses.add(address + ":" + orPort);
- if (dirPort > 0) {
- addresses.add(address + ":" + dirPort);
- }
- addresses.addAll(orAddressesAndPorts);
- lastAddresses.put(lastChangedAddresses, addresses);
- /* See if there's already an entry for this relay. */
- if (this.knownRelays.containsKey(fingerprint)) {
- Node existingEntry = this.knownRelays.get(fingerprint);
- if (lastSeenMillis < existingEntry.getLastSeenMillis()) {
- /* Use latest information for nickname, current addresses, etc. */
- nickname = existingEntry.getNickname();
- address = existingEntry.getAddress();
- orAddressesAndPorts = existingEntry.getOrAddressesAndPorts();
- exitAddresses = existingEntry.getExitAddresses();
- lastSeenMillis = existingEntry.getLastSeenMillis();
- orPort = existingEntry.getOrPort();
- dirPort = existingEntry.getDirPort();
- relayFlags = existingEntry.getRelayFlags();
- consensusWeight = existingEntry.getConsensusWeight();
- countryCode = existingEntry.getCountryCode();
- defaultPolicy = existingEntry.getDefaultPolicy();
- portList = existingEntry.getPortList();
- }
- if (hostName == null &&
- existingEntry.getAddress().equals(address)) {
- /* Re-use reverse DNS lookup results if available. */
- hostName = existingEntry.getHostName();
- lastRdnsLookup = existingEntry.getLastRdnsLookup();
- }
- /* Update relay-history fields. */
- firstSeenMillis = Math.min(firstSeenMillis,
- existingEntry.getFirstSeenMillis());
- lastAddresses.putAll(existingEntry.getLastAddresses());
- }
- /* Add or update entry. */
- Node entry = new Node(nickname, fingerprint, address,
- orAddressesAndPorts, exitAddresses, lastSeenMillis, orPort,
- dirPort, relayFlags, consensusWeight, countryCode, hostName,
- lastRdnsLookup, defaultPolicy, portList, firstSeenMillis,
- lastAddresses, aSNumber);
- this.knownRelays.put(fingerprint, entry);
- /* If this entry comes from a new consensus, update our global last
- * valid-after time. */
- if (lastSeenMillis > this.lastValidAfterMillis) {
- this.lastValidAfterMillis = lastSeenMillis;
- }
- }
-
- public void lookUpCitiesAndASes() {
- SortedSet<String> addressStrings = new TreeSet<String>();
- for (Node relay : this.knownRelays.values()) {
- addressStrings.add(relay.getAddress());
- }
- if (addressStrings.isEmpty()) {
- System.err.println("No relay IP addresses to resolve to cities or "
- + "ASN.");
- return;
- }
- SortedMap<String, LookupResult> lookupResults =
- this.lookupService.lookup(addressStrings);
- for (Node relay : knownRelays.values()) {
- String addressString = relay.getAddress();
- if (lookupResults.containsKey(addressString)) {
- LookupResult lookupResult = lookupResults.get(addressString);
- relay.setCountryCode(lookupResult.countryCode);
- relay.setCountryName(lookupResult.countryName);
- relay.setRegionName(lookupResult.regionName);
- relay.setCityName(lookupResult.cityName);
- relay.setLatitude(lookupResult.latitude);
- relay.setLongitude(lookupResult.longitude);
- relay.setASNumber(lookupResult.aSNumber);
- relay.setASName(lookupResult.aSName);
- }
- }
- }
-
- public void readBridgeNetworkStatuses() {
- if (this.descriptorSource == null) {
- System.err.println("Not configured to read bridge network "
- + "statuses.");
- return;
- }
- DescriptorQueue descriptorQueue =
- this.descriptorSource.getDescriptorQueue(
- DescriptorType.BRIDGE_STATUSES,
- DescriptorHistory.BRIDGE_STATUS_HISTORY);
- Descriptor descriptor;
- while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
- if (descriptor instanceof BridgeNetworkStatus) {
- updateBridgeNetworkStatus((BridgeNetworkStatus) descriptor);
- }
- }
- }
-
- public void setBridgeRunningBits() {
- if (this.lastPublishedMillis > 0L) {
- for (Node entry : this.knownBridges.values()) {
- entry.setRunning(entry.getRelayFlags().contains("Running") &&
- entry.getLastSeenMillis() == this.lastPublishedMillis);
- }
- }
- }
-
- private void updateBridgeNetworkStatus(BridgeNetworkStatus status) {
- long publishedMillis = status.getPublishedMillis();
- for (NetworkStatusEntry entry : status.getStatusEntries().values()) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String address = entry.getAddress();
- SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
- entry.getOrAddresses());
- int orPort = entry.getOrPort();
- int dirPort = entry.getDirPort();
- SortedSet<String> relayFlags = entry.getFlags();
- this.addBridge(nickname, fingerprint, address, orAddressesAndPorts,
- null, publishedMillis, orPort, dirPort, relayFlags, -1, "??",
- null, -1L, null, null, publishedMillis, -1L, null);
- }
- }
-
- public void addBridge(String nickname, String fingerprint,
- String address, SortedSet<String> orAddressesAndPorts,
- SortedSet<String> exitAddresses, long lastSeenMillis, int orPort,
- int dirPort, SortedSet<String> relayFlags, long consensusWeight,
- String countryCode, String hostname, long lastRdnsLookup,
- String defaultPolicy, String portList, long firstSeenMillis,
- long lastChangedAddresses, String aSNumber) {
- /* See if there's already an entry for this bridge. */
- if (this.knownBridges.containsKey(fingerprint)) {
- Node existingEntry = this.knownBridges.get(fingerprint);
- if (lastSeenMillis < existingEntry.getLastSeenMillis()) {
- /* Use latest information for nickname, current addresses, etc. */
- nickname = existingEntry.getNickname();
- address = existingEntry.getAddress();
- orAddressesAndPorts = existingEntry.getOrAddressesAndPorts();
- exitAddresses = existingEntry.getExitAddresses();
- lastSeenMillis = existingEntry.getLastSeenMillis();
- orPort = existingEntry.getOrPort();
- dirPort = existingEntry.getDirPort();
- relayFlags = existingEntry.getRelayFlags();
- consensusWeight = existingEntry.getConsensusWeight();
- countryCode = existingEntry.getCountryCode();
- defaultPolicy = existingEntry.getDefaultPolicy();
- portList = existingEntry.getPortList();
- aSNumber = existingEntry.getASNumber();
- }
- /* Update relay-history fields. */
- firstSeenMillis = Math.min(firstSeenMillis,
- existingEntry.getFirstSeenMillis());
- }
- /* Add or update entry. */
- Node entry = new Node(nickname, fingerprint, address,
- orAddressesAndPorts, exitAddresses, lastSeenMillis, orPort,
- dirPort, relayFlags, consensusWeight, countryCode, hostname,
- lastRdnsLookup, defaultPolicy, portList, firstSeenMillis, null,
- aSNumber);
- this.knownBridges.put(fingerprint, entry);
- /* If this entry comes from a new status, update our global last
- * published time. */
- if (lastSeenMillis > this.lastPublishedMillis) {
- this.lastPublishedMillis = lastSeenMillis;
- }
- }
-
- private SortedMap<String, Node> knownRelays =
- new TreeMap<String, Node>();
- public SortedMap<String, Node> getCurrentRelays() {
- long cutoff = this.lastValidAfterMillis
- - 7L * 24L * 60L * 60L * 1000L;
- SortedMap<String, Node> currentRelays = new TreeMap<String, Node>();
- for (Map.Entry<String, Node> e : this.knownRelays.entrySet()) {
- if (e.getValue().getLastSeenMillis() >= cutoff) {
- currentRelays.put(e.getKey(), e.getValue());
- }
- }
- return currentRelays;
- }
-
- private SortedMap<String, Node> knownBridges =
- new TreeMap<String, Node>();
- public SortedMap<String, Node> getCurrentBridges() {
- long cutoff = this.lastPublishedMillis - 7L * 24L * 60L * 60L * 1000L;
- SortedMap<String, Node> currentBridges = new TreeMap<String, Node>();
- for (Map.Entry<String, Node> e : this.knownBridges.entrySet()) {
- if (e.getValue().getLastSeenMillis() >= cutoff) {
- currentBridges.put(e.getKey(), e.getValue());
- }
- }
- return currentBridges;
- }
-
- public long getLastValidAfterMillis() {
- return this.lastValidAfterMillis;
- }
-
- public long getLastPublishedMillis() {
- return this.lastPublishedMillis;
- }
-}
-
diff --git a/src/org/torproject/onionoo/DescriptorSource.java b/src/org/torproject/onionoo/DescriptorSource.java
index 5936a93..23febc8 100644
--- a/src/org/torproject/onionoo/DescriptorSource.java
+++ b/src/org/torproject/onionoo/DescriptorSource.java
@@ -1,3 +1,5 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
package org.torproject.onionoo;
import java.io.BufferedReader;
diff --git a/src/org/torproject/onionoo/DetailDataWriter.java b/src/org/torproject/onionoo/DetailDataWriter.java
deleted file mode 100644
index e95a919..0000000
--- a/src/org/torproject/onionoo/DetailDataWriter.java
+++ /dev/null
@@ -1,810 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.onionoo;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Scanner;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.apache.commons.lang.StringEscapeUtils;
-
-import org.torproject.descriptor.BridgePoolAssignment;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.ExitList;
-import org.torproject.descriptor.ExitListEntry;
-import org.torproject.descriptor.ServerDescriptor;
-
-/* Write updated detail data files to disk and delete files of relays or
- * bridges that fell out of the summary list.
- *
- * The parts of details files coming from server descriptors always come
- * from the last known descriptor of a relay or bridge, not from the
- * descriptor that was last referenced in a network status. */
-public class DetailDataWriter {
-
- private DescriptorSource descriptorSource;
-
- private DocumentStore documentStore;
-
- public DetailDataWriter(DescriptorSource descriptorSource,
- DocumentStore documentStore) {
- this.descriptorSource = descriptorSource;
- this.documentStore = documentStore;
- }
-
- private SortedMap<String, Node> relays;
- public void setCurrentRelays(SortedMap<String, Node> currentRelays) {
- this.relays = currentRelays;
- }
-
- private SortedMap<String, Node> bridges;
- public void setCurrentBridges(SortedMap<String, Node> currentBridges) {
- this.bridges = currentBridges;
- }
-
- private static final long RDNS_LOOKUP_MAX_REQUEST_MILLIS = 10L * 1000L;
- private static final long RDNS_LOOKUP_MAX_DURATION_MILLIS = 5L * 60L
- * 1000L;
- private static final long RDNS_LOOKUP_MAX_AGE_MILLIS = 12L * 60L * 60L
- * 1000L;
- private static final int RDNS_LOOKUP_WORKERS_NUM = 5;
- private Set<String> rdnsLookupJobs;
- private Map<String, String> rdnsLookupResults;
- private long startedRdnsLookups;
- private List<RdnsLookupWorker> rdnsLookupWorkers;
- public void startReverseDomainNameLookups() {
- this.startedRdnsLookups = System.currentTimeMillis();
- this.rdnsLookupJobs = new HashSet<String>();
- for (Node relay : relays.values()) {
- if (relay.getLastRdnsLookup() < this.startedRdnsLookups
- - RDNS_LOOKUP_MAX_AGE_MILLIS) {
- this.rdnsLookupJobs.add(relay.getAddress());
- }
- }
- this.rdnsLookupResults = new HashMap<String, String>();
- this.rdnsLookupWorkers = new ArrayList<RdnsLookupWorker>();
- for (int i = 0; i < RDNS_LOOKUP_WORKERS_NUM; i++) {
- RdnsLookupWorker rdnsLookupWorker = new RdnsLookupWorker();
- this.rdnsLookupWorkers.add(rdnsLookupWorker);
- rdnsLookupWorker.setDaemon(true);
- rdnsLookupWorker.start();
- }
- }
-
- public void finishReverseDomainNameLookups() {
- for (RdnsLookupWorker rdnsLookupWorker : this.rdnsLookupWorkers) {
- try {
- rdnsLookupWorker.join();
- } catch (InterruptedException e) {
- /* This is not something that we can take care of. Just leave the
- * worker thread alone. */
- }
- }
- synchronized (this.rdnsLookupResults) {
- for (Node relay : relays.values()) {
- if (this.rdnsLookupResults.containsKey(relay.getAddress())) {
- relay.setHostName(this.rdnsLookupResults.get(
- relay.getAddress()));
- relay.setLastRdnsLookup(this.startedRdnsLookups);
- }
- }
- }
- }
-
- private class RdnsLookupWorker extends Thread {
- public void run() {
- while (System.currentTimeMillis() - RDNS_LOOKUP_MAX_DURATION_MILLIS
- <= startedRdnsLookups) {
- String rdnsLookupJob = null;
- synchronized (rdnsLookupJobs) {
- for (String job : rdnsLookupJobs) {
- rdnsLookupJob = job;
- rdnsLookupJobs.remove(job);
- break;
- }
- }
- if (rdnsLookupJob == null) {
- break;
- }
- RdnsLookupRequest request = new RdnsLookupRequest(this,
- rdnsLookupJob);
- request.setDaemon(true);
- request.start();
- try {
- Thread.sleep(RDNS_LOOKUP_MAX_REQUEST_MILLIS);
- } catch (InterruptedException e) {
- /* Getting interrupted should be the default case. */
- }
- String hostName = request.getHostName();
- if (hostName != null) {
- synchronized (rdnsLookupResults) {
- rdnsLookupResults.put(rdnsLookupJob, hostName);
- }
- }
- }
- }
- }
-
- private class RdnsLookupRequest extends Thread {
- RdnsLookupWorker parent;
- String address, hostName;
- public RdnsLookupRequest(RdnsLookupWorker parent, String address) {
- this.parent = parent;
- this.address = address;
- }
- public void run() {
- try {
- String result = InetAddress.getByName(this.address).getHostName();
- synchronized (this) {
- this.hostName = result;
- }
- } catch (UnknownHostException e) {
- /* We'll try again the next time. */
- }
- this.parent.interrupt();
- }
- public synchronized String getHostName() {
- return hostName;
- }
- }
-
- private Map<String, ServerDescriptor> relayServerDescriptors =
- new HashMap<String, ServerDescriptor>();
- public void readRelayServerDescriptors() {
- /* Don't remember which server descriptors we already parsed. If we
- * parse a server descriptor now and first learn about the relay in a
- * later consensus, we'll never write the descriptor content anywhere.
- * The result would be details files containing no descriptor parts
- * until the relay publishes the next descriptor. */
- DescriptorQueue descriptorQueue =
- this.descriptorSource.getDescriptorQueue(
- DescriptorType.RELAY_SERVER_DESCRIPTORS);
- Descriptor descriptor;
- while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
- if (descriptor instanceof ServerDescriptor) {
- ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor;
- String fingerprint = serverDescriptor.getFingerprint();
- if (!this.relayServerDescriptors.containsKey(fingerprint) ||
- this.relayServerDescriptors.get(fingerprint).
- getPublishedMillis()
- < serverDescriptor.getPublishedMillis()) {
- this.relayServerDescriptors.put(fingerprint,
- serverDescriptor);
- }
- }
- }
- }
-
- public void calculatePathSelectionProbabilities(
- SortedMap<String, Integer> bandwidthWeights) {
- boolean consensusContainsBandwidthWeights = false;
- double wgg = 0.0, wgd = 0.0, wmg = 0.0, wmm = 0.0, wme = 0.0,
- wmd = 0.0, wee = 0.0, wed = 0.0;
- if (bandwidthWeights != null) {
- SortedSet<String> weightKeys = new TreeSet<String>(Arrays.asList(
- "Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(",")));
- weightKeys.removeAll(bandwidthWeights.keySet());
- if (weightKeys.isEmpty()) {
- consensusContainsBandwidthWeights = true;
- wgg = ((double) bandwidthWeights.get("Wgg")) / 10000.0;
- wgd = ((double) bandwidthWeights.get("Wgd")) / 10000.0;
- wmg = ((double) bandwidthWeights.get("Wmg")) / 10000.0;
- wmm = ((double) bandwidthWeights.get("Wmm")) / 10000.0;
- wme = ((double) bandwidthWeights.get("Wme")) / 10000.0;
- wmd = ((double) bandwidthWeights.get("Wmd")) / 10000.0;
- wee = ((double) bandwidthWeights.get("Wee")) / 10000.0;
- wed = ((double) bandwidthWeights.get("Wed")) / 10000.0;
- }
- } else {
- System.err.println("Could not determine most recent Wxx parameter "
- + "values, probably because we didn't parse a consensus in "
- + "this execution. All relays' guard/middle/exit weights are "
- + "going to be 0.0.");
- }
- SortedMap<String, Double>
- advertisedBandwidths = new TreeMap<String, Double>(),
- consensusWeights = new TreeMap<String, Double>(),
- guardWeights = new TreeMap<String, Double>(),
- middleWeights = new TreeMap<String, Double>(),
- exitWeights = new TreeMap<String, Double>();
- double totalAdvertisedBandwidth = 0.0;
- double totalConsensusWeight = 0.0;
- double totalGuardWeight = 0.0;
- double totalMiddleWeight = 0.0;
- double totalExitWeight = 0.0;
- for (Map.Entry<String, Node> e : this.relays.entrySet()) {
- String fingerprint = e.getKey();
- Node relay = e.getValue();
- if (!relay.getRunning()) {
- continue;
- }
- boolean isExit = relay.getRelayFlags().contains("Exit") &&
- !relay.getRelayFlags().contains("BadExit");
- boolean isGuard = relay.getRelayFlags().contains("Guard");
- if (this.relayServerDescriptors.containsKey(fingerprint)) {
- ServerDescriptor serverDescriptor =
- this.relayServerDescriptors.get(fingerprint);
- double advertisedBandwidth = (double) Math.min(Math.min(
- serverDescriptor.getBandwidthBurst(),
- serverDescriptor.getBandwidthObserved()),
- serverDescriptor.getBandwidthRate());
- advertisedBandwidths.put(fingerprint, advertisedBandwidth);
- totalAdvertisedBandwidth += advertisedBandwidth;
- }
- double consensusWeight = (double) relay.getConsensusWeight();
- consensusWeights.put(fingerprint, consensusWeight);
- totalConsensusWeight += consensusWeight;
- if (consensusContainsBandwidthWeights) {
- double guardWeight = consensusWeight,
- middleWeight = consensusWeight,
- exitWeight = consensusWeight;
- if (isGuard && isExit) {
- guardWeight *= wgd;
- middleWeight *= wmd;
- exitWeight *= wed;
- } else if (isGuard) {
- guardWeight *= wgg;
- middleWeight *= wmg;
- exitWeight = 0.0;
- } else if (isExit) {
- guardWeight = 0.0;
- middleWeight *= wme;
- exitWeight *= wee;
- } else {
- guardWeight = 0.0;
- middleWeight *= wmm;
- exitWeight = 0.0;
- }
- guardWeights.put(fingerprint, guardWeight);
- middleWeights.put(fingerprint, middleWeight);
- exitWeights.put(fingerprint, exitWeight);
- totalGuardWeight += guardWeight;
- totalMiddleWeight += middleWeight;
- totalExitWeight += exitWeight;
- }
- }
- for (Map.Entry<String, Node> e : this.relays.entrySet()) {
- String fingerprint = e.getKey();
- Node relay = e.getValue();
- if (advertisedBandwidths.containsKey(fingerprint)) {
- relay.setAdvertisedBandwidthFraction(advertisedBandwidths.get(
- fingerprint) / totalAdvertisedBandwidth);
- }
- if (consensusWeights.containsKey(fingerprint)) {
- relay.setConsensusWeightFraction(consensusWeights.get(fingerprint)
- / totalConsensusWeight);
- }
- if (guardWeights.containsKey(fingerprint)) {
- relay.setGuardProbability(guardWeights.get(fingerprint)
- / totalGuardWeight);
- }
- if (middleWeights.containsKey(fingerprint)) {
- relay.setMiddleProbability(middleWeights.get(fingerprint)
- / totalMiddleWeight);
- }
- if (exitWeights.containsKey(fingerprint)) {
- relay.setExitProbability(exitWeights.get(fingerprint)
- / totalExitWeight);
- }
- }
- }
-
- private long now = System.currentTimeMillis();
- private Map<String, Set<ExitListEntry>> exitListEntries =
- new HashMap<String, Set<ExitListEntry>>();
- public void readExitLists() {
- DescriptorQueue descriptorQueue =
- this.descriptorSource.getDescriptorQueue(
- DescriptorType.EXIT_LISTS, DescriptorHistory.EXIT_LIST_HISTORY);
- Descriptor descriptor;
- while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
- if (descriptor instanceof ExitList) {
- ExitList exitList = (ExitList) descriptor;
- for (ExitListEntry exitListEntry :
- exitList.getExitListEntries()) {
- if (exitListEntry.getScanMillis() <
- this.now - 24L * 60L * 60L * 1000L) {
- continue;
- }
- String fingerprint = exitListEntry.getFingerprint();
- if (!this.exitListEntries.containsKey(fingerprint)) {
- this.exitListEntries.put(fingerprint,
- new HashSet<ExitListEntry>());
- }
- this.exitListEntries.get(fingerprint).add(exitListEntry);
- }
- }
- }
- }
-
- private Map<String, ServerDescriptor> bridgeServerDescriptors =
- new HashMap<String, ServerDescriptor>();
- public void readBridgeServerDescriptors() {
- /* Don't remember which server descriptors we already parsed. If we
- * parse a server descriptor now and first learn about the relay in a
- * later status, we'll never write the descriptor content anywhere.
- * The result would be details files containing no descriptor parts
- * until the bridge publishes the next descriptor. */
- DescriptorQueue descriptorQueue =
- this.descriptorSource.getDescriptorQueue(
- DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
- Descriptor descriptor;
- while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
- if (descriptor instanceof ServerDescriptor) {
- ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor;
- String fingerprint = serverDescriptor.getFingerprint();
- if (!this.bridgeServerDescriptors.containsKey(fingerprint) ||
- this.bridgeServerDescriptors.get(fingerprint).
- getPublishedMillis()
- < serverDescriptor.getPublishedMillis()) {
- this.bridgeServerDescriptors.put(fingerprint,
- serverDescriptor);
- }
- }
- }
- }
-
- private Map<String, String> bridgePoolAssignments =
- new HashMap<String, String>();
- public void readBridgePoolAssignments() {
- DescriptorQueue descriptorQueue =
- this.descriptorSource.getDescriptorQueue(
- DescriptorType.BRIDGE_POOL_ASSIGNMENTS,
- DescriptorHistory.BRIDGE_POOLASSIGN_HISTORY);
- Descriptor descriptor;
- while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
- if (descriptor instanceof BridgePoolAssignment) {
- BridgePoolAssignment bridgePoolAssignment =
- (BridgePoolAssignment) descriptor;
- for (Map.Entry<String, String> e :
- bridgePoolAssignment.getEntries().entrySet()) {
- String fingerprint = e.getKey();
- String details = e.getValue();
- this.bridgePoolAssignments.put(fingerprint, details);
- }
- }
- }
- }
-
- public void writeOutDetails() {
- SortedSet<String> remainingDetailsFiles = new TreeSet<String>();
- remainingDetailsFiles.addAll(this.documentStore.list(
- DocumentType.OUT_DETAILS));
- this.updateRelayDetailsFiles(remainingDetailsFiles);
- this.updateBridgeDetailsFiles(remainingDetailsFiles);
- this.deleteDetailsFiles(remainingDetailsFiles);
- }
-
- private static String escapeJSON(String s) {
- return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'");
- }
-
- private void updateRelayDetailsFiles(
- SortedSet<String> remainingDetailsFiles) {
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- for (Map.Entry<String, Node> relay : this.relays.entrySet()) {
- String fingerprint = relay.getKey();
-
- /* Read details file for this relay if it exists. */
- String descriptorParts = null;
- long publishedMillis = -1L;
- if (remainingDetailsFiles.contains(fingerprint)) {
- remainingDetailsFiles.remove(fingerprint);
- String documentString = this.documentStore.retrieve(
- DocumentType.OUT_DETAILS, fingerprint);
- if (documentString != null) {
- try {
- boolean copyDescriptorParts = false;
- StringBuilder sb = new StringBuilder();
- Scanner s = new Scanner(documentString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- if (line.startsWith("\"desc_published\":")) {
- String published = line.substring(
- "\"desc_published\":\"".length(),
- "\"desc_published\":\"1970-01-01 00:00:00".length());
- publishedMillis = dateTimeFormat.parse(published).
- getTime();
- copyDescriptorParts = true;
- }
- if (copyDescriptorParts) {
- sb.append(line + "\n");
- }
- }
- s.close();
- if (sb.length() > 0) {
- descriptorParts = sb.toString();
- }
- } catch (ParseException e) {
- System.err.println("Could not parse timestamp in details.json "
- + "file for '" + fingerprint + "'. Ignoring.");
- e.printStackTrace();
- publishedMillis = -1L;
- descriptorParts = null;
- }
- }
- }
-
- /* Generate new descriptor-specific part if we have a more recent
- * descriptor or if the part we read didn't contain a last_restarted
- * line. */
- if (this.relayServerDescriptors.containsKey(fingerprint) &&
- (this.relayServerDescriptors.get(fingerprint).
- getPublishedMillis() > publishedMillis)) {
- ServerDescriptor descriptor = this.relayServerDescriptors.get(
- fingerprint);
- StringBuilder sb = new StringBuilder();
- String publishedDateTime = dateTimeFormat.format(
- descriptor.getPublishedMillis());
- String lastRestartedString = dateTimeFormat.format(
- descriptor.getPublishedMillis()
- - descriptor.getUptime() * 1000L);
- int bandwidthRate = descriptor.getBandwidthRate();
- int bandwidthBurst = descriptor.getBandwidthBurst();
- int observedBandwidth = descriptor.getBandwidthObserved();
- int advertisedBandwidth = Math.min(bandwidthRate,
- Math.min(bandwidthBurst, observedBandwidth));
- sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
- + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
- + "\"bandwidth_rate\":" + bandwidthRate + ",\n"
- + "\"bandwidth_burst\":" + bandwidthBurst + ",\n"
- + "\"observed_bandwidth\":" + observedBandwidth + ",\n"
- + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
- + "\"exit_policy\":[");
- int written = 0;
- for (String exitPolicyLine : descriptor.getExitPolicyLines()) {
- sb.append((written++ > 0 ? "," : "") + "\n \"" + exitPolicyLine
- + "\"");
- }
- sb.append("\n]");
- if (descriptor.getContact() != null) {
- sb.append(",\n\"contact\":\""
- + escapeJSON(descriptor.getContact()) + "\"");
- }
- if (descriptor.getPlatform() != null) {
- sb.append(",\n\"platform\":\""
- + escapeJSON(descriptor.getPlatform()) + "\"");
- }
- if (descriptor.getFamilyEntries() != null) {
- sb.append(",\n\"family\":[");
- written = 0;
- for (String familyEntry : descriptor.getFamilyEntries()) {
- sb.append((written++ > 0 ? "," : "") + "\n \"" + familyEntry
- + "\"");
- }
- sb.append("\n]");
- }
- sb.append("\n}\n");
- descriptorParts = sb.toString();
- }
-
- /* Generate network-status-specific part. */
- Node entry = relay.getValue();
- String nickname = entry.getNickname();
- String address = entry.getAddress();
- List<String> orAddresses = new ArrayList<String>();
- orAddresses.add(address + ":" + entry.getOrPort());
- orAddresses.addAll(entry.getOrAddressesAndPorts());
- StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
- int addressesWritten = 0;
- for (String orAddress : orAddresses) {
- orAddressesAndPortsBuilder.append(
- (addressesWritten++ > 0 ? "," : "") + "\""
- + orAddress.toLowerCase() + "\"");
- }
- String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- String lastChangedOrAddress = dateTimeFormat.format(
- entry.getLastChangedOrAddress());
- String running = entry.getRunning() ? "true" : "false";
- int dirPort = entry.getDirPort();
- String countryCode = entry.getCountryCode();
- String latitude = entry.getLatitude();
- String longitude = entry.getLongitude();
- String countryName = entry.getCountryName();
- String regionName = entry.getRegionName();
- String cityName = entry.getCityName();
- String aSNumber = entry.getASNumber();
- String aSName = entry.getASName();
- long consensusWeight = entry.getConsensusWeight();
- String hostName = entry.getHostName();
- double advertisedBandwidthFraction =
- entry.getAdvertisedBandwidthFraction();
- double consensusWeightFraction = entry.getConsensusWeightFraction();
- double guardProbability = entry.getGuardProbability();
- double middleProbability = entry.getMiddleProbability();
- double exitProbability = entry.getExitProbability();
- String defaultPolicy = entry.getDefaultPolicy();
- String portList = entry.getPortList();
- StringBuilder sb = new StringBuilder();
- sb.append("{\"version\":1,\n"
- + "\"nickname\":\"" + nickname + "\",\n"
- + "\"fingerprint\":\"" + fingerprint + "\",\n"
- + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
- + "]");
- if (dirPort != 0) {
- sb.append(",\n\"dir_address\":\"" + address + ":" + dirPort
- + "\"");
- }
- sb.append(",\n\"last_seen\":\"" + lastSeen + "\"");
- sb.append(",\n\"first_seen\":\"" + firstSeen + "\"");
- sb.append(",\n\"last_changed_address_or_port\":\""
- + lastChangedOrAddress + "\"");
- sb.append(",\n\"running\":" + running);
- SortedSet<String> relayFlags = entry.getRelayFlags();
- if (!relayFlags.isEmpty()) {
- sb.append(",\n\"flags\":[");
- int written = 0;
- for (String relayFlag : relayFlags) {
- sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
- }
- sb.append("]");
- }
- if (countryCode != null) {
- sb.append(",\n\"country\":\"" + countryCode + "\"");
- }
- if (latitude != null) {
- sb.append(",\n\"latitude\":" + latitude);
- }
- if (longitude != null) {
- sb.append(",\n\"longitude\":" + longitude);
- }
- if (countryName != null) {
- sb.append(",\n\"country_name\":\""
- + escapeJSON(countryName) + "\"");
- }
- if (regionName != null) {
- sb.append(",\n\"region_name\":\""
- + escapeJSON(regionName) + "\"");
- }
- if (cityName != null) {
- sb.append(",\n\"city_name\":\""
- + escapeJSON(cityName) + "\"");
- }
- if (aSNumber != null) {
- sb.append(",\n\"as_number\":\""
- + escapeJSON(aSNumber) + "\"");
- }
- if (aSName != null) {
- sb.append(",\n\"as_name\":\""
- + escapeJSON(aSName) + "\"");
- }
- if (consensusWeight >= 0L) {
- sb.append(",\n\"consensus_weight\":"
- + String.valueOf(consensusWeight));
- }
- if (hostName != null) {
- sb.append(",\n\"host_name\":\""
- + escapeJSON(hostName) + "\"");
- }
- if (advertisedBandwidthFraction >= 0.0) {
- sb.append(String.format(
- ",\n\"advertised_bandwidth_fraction\":%.9f",
- advertisedBandwidthFraction));
- }
- if (consensusWeightFraction >= 0.0) {
- sb.append(String.format(",\n\"consensus_weight_fraction\":%.9f",
- consensusWeightFraction));
- }
- if (guardProbability >= 0.0) {
- sb.append(String.format(",\n\"guard_probability\":%.9f",
- guardProbability));
- }
- if (middleProbability >= 0.0) {
- sb.append(String.format(",\n\"middle_probability\":%.9f",
- middleProbability));
- }
- if (exitProbability >= 0.0) {
- sb.append(String.format(",\n\"exit_probability\":%.9f",
- exitProbability));
- }
- if (defaultPolicy != null && (defaultPolicy.equals("accept") ||
- defaultPolicy.equals("reject")) && portList != null) {
- sb.append(",\n\"exit_policy_summary\":{\"" + defaultPolicy
- + "\":[");
- int portsWritten = 0;
- for (String portOrPortRange : portList.split(",")) {
- sb.append((portsWritten++ > 0 ? "," : "")
- + "\"" + portOrPortRange + "\"");
- }
- sb.append("]}");
- }
-
- /* Add exit addresses if at least one of them is distinct from the
- * onion-routing addresses. */
- if (exitListEntries.containsKey(fingerprint)) {
- for (ExitListEntry exitListEntry :
- exitListEntries.get(fingerprint)) {
- entry.addExitAddress(exitListEntry.getExitAddress());
- }
- }
- if (!entry.getExitAddresses().isEmpty()) {
- sb.append(",\n\"exit_addresses\":[");
- int written = 0;
- for (String exitAddress : entry.getExitAddresses()) {
- sb.append((written++ > 0 ? "," : "") + "\""
- + exitAddress.toLowerCase() + "\"");
- }
- sb.append("]");
- }
-
- /* Add descriptor parts. */
- if (descriptorParts != null) {
- sb.append(",\n" + descriptorParts);
- } else {
- sb.append("\n}\n");
- }
-
- /* Write details file to disk. */
- String detailsLines = sb.toString();
- this.documentStore.store(detailsLines, DocumentType.OUT_DETAILS,
- fingerprint);
- }
- }
-
- private void updateBridgeDetailsFiles(
- SortedSet<String> remainingDetailsFiles) {
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- for (Map.Entry<String, Node> bridge : this.bridges.entrySet()) {
- String fingerprint = bridge.getKey();
-
- /* Read details file for this bridge if it exists. */
- String descriptorParts = null, bridgePoolAssignment = null;
- long publishedMillis = -1L;
- if (remainingDetailsFiles.contains(fingerprint)) {
- remainingDetailsFiles.remove(fingerprint);
- String documentString = this.documentStore.retrieve(
- DocumentType.OUT_DETAILS, fingerprint);
- if (documentString != null) {
- try {
- boolean copyDescriptorParts = false;
- StringBuilder sb = new StringBuilder();
- Scanner s = new Scanner(documentString);
- while (s.hasNextLine()) {
- String line = s.nextLine();
- if (line.startsWith("\"desc_published\":")) {
- String published = line.substring(
- "\"desc_published\":\"".length(),
- "\"desc_published\":\"1970-01-01 00:00:00".length());
- publishedMillis = dateTimeFormat.parse(published).
- getTime();
- copyDescriptorParts = true;
- } else if (line.startsWith("\"pool_assignment\":")) {
- bridgePoolAssignment = line;
- copyDescriptorParts = false;
- } else if (line.equals("}")) {
- copyDescriptorParts = false;
- }
- if (copyDescriptorParts) {
- sb.append(line + "\n");
- }
- }
- s.close();
- descriptorParts = sb.toString();
- if (descriptorParts.endsWith(",\n")) {
- descriptorParts = descriptorParts.substring(0,
- descriptorParts.length() - 2);
- } else if (descriptorParts.endsWith("\n")) {
- descriptorParts = descriptorParts.substring(0,
- descriptorParts.length() - 1);
- }
- } catch (ParseException e) {
- System.err.println("Could not parse timestamp in "
- + "details.json file for '" + fingerprint + "'. "
- + "Ignoring.");
- e.printStackTrace();
- publishedMillis = -1L;
- descriptorParts = null;
- }
- }
- }
-
- /* Generate new descriptor-specific part if we have a more recent
- * descriptor. */
- if (this.bridgeServerDescriptors.containsKey(fingerprint) &&
- this.bridgeServerDescriptors.get(fingerprint).
- getPublishedMillis() > publishedMillis) {
- ServerDescriptor descriptor = this.bridgeServerDescriptors.get(
- fingerprint);
- StringBuilder sb = new StringBuilder();
- String publishedDateTime = dateTimeFormat.format(
- descriptor.getPublishedMillis());
- String lastRestartedString = dateTimeFormat.format(
- descriptor.getPublishedMillis()
- - descriptor.getUptime() * 1000L);
- int advertisedBandwidth = Math.min(descriptor.getBandwidthRate(),
- Math.min(descriptor.getBandwidthBurst(),
- descriptor.getBandwidthObserved()));
- sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
- + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
- + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
- + "\"platform\":\"" + escapeJSON(descriptor.getPlatform())
- + "\"");
- descriptorParts = sb.toString();
- }
-
- /* Look up bridge pool assignment. */
- if (this.bridgePoolAssignments.containsKey(fingerprint)) {
- bridgePoolAssignment = "\"pool_assignment\":\""
- + this.bridgePoolAssignments.get(fingerprint) + "\"";
- }
-
- /* Generate network-status-specific part. */
- Node entry = bridge.getValue();
- String nickname = entry.getNickname();
- String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- String running = entry.getRunning() ? "true" : "false";
- String address = entry.getAddress();
- List<String> orAddresses = new ArrayList<String>();
- orAddresses.add(address + ":" + entry.getOrPort());
- orAddresses.addAll(entry.getOrAddressesAndPorts());
- StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
- int addressesWritten = 0;
- for (String orAddress : orAddresses) {
- orAddressesAndPortsBuilder.append(
- (addressesWritten++ > 0 ? "," : "") + "\""
- + orAddress.toLowerCase() + "\"");
- }
- StringBuilder sb = new StringBuilder();
- sb.append("{\"version\":1,\n"
- + "\"nickname\":\"" + nickname + "\",\n"
- + "\"hashed_fingerprint\":\"" + fingerprint + "\",\n"
- + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
- + "],\n\"last_seen\":\"" + lastSeen + "\",\n\"first_seen\":\""
- + firstSeen + "\",\n\"running\":" + running);
-
- SortedSet<String> relayFlags = entry.getRelayFlags();
- if (!relayFlags.isEmpty()) {
- sb.append(",\n\"flags\":[");
- int written = 0;
- for (String relayFlag : relayFlags) {
- sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
- }
- sb.append("]");
- }
-
- /* Append descriptor and bridge pool assignment parts. */
- if (descriptorParts != null && descriptorParts.length() != 0) {
- sb.append(",\n" + descriptorParts);
- }
- if (bridgePoolAssignment != null) {
- sb.append(",\n" + bridgePoolAssignment);
- }
- sb.append("\n}\n");
-
- /* Write details file to disk. */
- String detailsLines = sb.toString();
- this.documentStore.store(detailsLines, DocumentType.OUT_DETAILS,
- fingerprint);
- }
- }
-
- private void deleteDetailsFiles(
- SortedSet<String> remainingDetailsFiles) {
- for (String fingerprint : remainingDetailsFiles) {
- this.documentStore.remove(DocumentType.OUT_DETAILS, fingerprint);
- }
- }
-}
-
diff --git a/src/org/torproject/onionoo/DetailsDataWriter.java b/src/org/torproject/onionoo/DetailsDataWriter.java
new file mode 100644
index 0000000..3d49169
--- /dev/null
+++ b/src/org/torproject/onionoo/DetailsDataWriter.java
@@ -0,0 +1,821 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.apache.commons.lang.StringEscapeUtils;
+
+import org.torproject.descriptor.BridgePoolAssignment;
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.ExitList;
+import org.torproject.descriptor.ExitListEntry;
+import org.torproject.descriptor.ServerDescriptor;
+
+/* Write updated detail data files to disk and delete files of relays or
+ * bridges that fell out of the summary list.
+ *
+ * The parts of details files coming from server descriptors always come
+ * from the last known descriptor of a relay or bridge, not from the
+ * descriptor that was last referenced in a network status. */
+public class DetailsDataWriter {
+
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ private SortedMap<String, NodeStatus> relays;
+
+ private SortedMap<String, NodeStatus> bridges;
+
+ public DetailsDataWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ }
+
+ public void setCurrentNodes(
+ SortedMap<String, NodeStatus> currentNodes) {
+ this.relays = new TreeMap<String, NodeStatus>();
+ this.bridges = new TreeMap<String, NodeStatus>();
+ for (Map.Entry<String, NodeStatus> e : currentNodes.entrySet()) {
+ if (e.getValue().isRelay()) {
+ this.relays.put(e.getKey(), e.getValue());
+ } else {
+ this.bridges.put(e.getKey(), e.getValue());
+ }
+ }
+ }
+
+ private static final long RDNS_LOOKUP_MAX_REQUEST_MILLIS = 10L * 1000L;
+ private static final long RDNS_LOOKUP_MAX_DURATION_MILLIS = 5L * 60L
+ * 1000L;
+ private static final long RDNS_LOOKUP_MAX_AGE_MILLIS = 12L * 60L * 60L
+ * 1000L;
+ private static final int RDNS_LOOKUP_WORKERS_NUM = 5;
+ private Set<String> rdnsLookupJobs;
+ private Map<String, String> rdnsLookupResults;
+ private long startedRdnsLookups;
+ private List<RdnsLookupWorker> rdnsLookupWorkers;
+ public void startReverseDomainNameLookups() {
+ this.startedRdnsLookups = System.currentTimeMillis();
+ this.rdnsLookupJobs = new HashSet<String>();
+ for (NodeStatus relay : relays.values()) {
+ if (relay.getLastRdnsLookup() < this.startedRdnsLookups
+ - RDNS_LOOKUP_MAX_AGE_MILLIS) {
+ this.rdnsLookupJobs.add(relay.getAddress());
+ }
+ }
+ this.rdnsLookupResults = new HashMap<String, String>();
+ this.rdnsLookupWorkers = new ArrayList<RdnsLookupWorker>();
+ for (int i = 0; i < RDNS_LOOKUP_WORKERS_NUM; i++) {
+ RdnsLookupWorker rdnsLookupWorker = new RdnsLookupWorker();
+ this.rdnsLookupWorkers.add(rdnsLookupWorker);
+ rdnsLookupWorker.setDaemon(true);
+ rdnsLookupWorker.start();
+ }
+ }
+
+ public void finishReverseDomainNameLookups() {
+ for (RdnsLookupWorker rdnsLookupWorker : this.rdnsLookupWorkers) {
+ try {
+ rdnsLookupWorker.join();
+ } catch (InterruptedException e) {
+ /* This is not something that we can take care of. Just leave the
+ * worker thread alone. */
+ }
+ }
+ synchronized (this.rdnsLookupResults) {
+ for (NodeStatus relay : relays.values()) {
+ if (this.rdnsLookupResults.containsKey(relay.getAddress())) {
+ relay.setHostName(this.rdnsLookupResults.get(
+ relay.getAddress()));
+ relay.setLastRdnsLookup(this.startedRdnsLookups);
+ }
+ }
+ }
+ }
+
+ private class RdnsLookupWorker extends Thread {
+ public void run() {
+ while (System.currentTimeMillis() - RDNS_LOOKUP_MAX_DURATION_MILLIS
+ <= startedRdnsLookups) {
+ String rdnsLookupJob = null;
+ synchronized (rdnsLookupJobs) {
+ for (String job : rdnsLookupJobs) {
+ rdnsLookupJob = job;
+ rdnsLookupJobs.remove(job);
+ break;
+ }
+ }
+ if (rdnsLookupJob == null) {
+ break;
+ }
+ RdnsLookupRequest request = new RdnsLookupRequest(this,
+ rdnsLookupJob);
+ request.setDaemon(true);
+ request.start();
+ try {
+ Thread.sleep(RDNS_LOOKUP_MAX_REQUEST_MILLIS);
+ } catch (InterruptedException e) {
+ /* Getting interrupted should be the default case. */
+ }
+ String hostName = request.getHostName();
+ if (hostName != null) {
+ synchronized (rdnsLookupResults) {
+ rdnsLookupResults.put(rdnsLookupJob, hostName);
+ }
+ }
+ }
+ }
+ }
+
+ private class RdnsLookupRequest extends Thread {
+ RdnsLookupWorker parent;
+ String address, hostName;
+ public RdnsLookupRequest(RdnsLookupWorker parent, String address) {
+ this.parent = parent;
+ this.address = address;
+ }
+ public void run() {
+ try {
+ String result = InetAddress.getByName(this.address).getHostName();
+ synchronized (this) {
+ this.hostName = result;
+ }
+ } catch (UnknownHostException e) {
+ /* We'll try again the next time. */
+ }
+ this.parent.interrupt();
+ }
+ public synchronized String getHostName() {
+ return hostName;
+ }
+ }
+
+ private Map<String, ServerDescriptor> relayServerDescriptors =
+ new HashMap<String, ServerDescriptor>();
+ public void readRelayServerDescriptors() {
+ /* Don't remember which server descriptors we already parsed. If we
+ * parse a server descriptor now and first learn about the relay in a
+ * later consensus, we'll never write the descriptor content anywhere.
+ * The result would be details files containing no descriptor parts
+ * until the relay publishes the next descriptor. */
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ServerDescriptor) {
+ ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor;
+ String fingerprint = serverDescriptor.getFingerprint();
+ if (!this.relayServerDescriptors.containsKey(fingerprint) ||
+ this.relayServerDescriptors.get(fingerprint).
+ getPublishedMillis()
+ < serverDescriptor.getPublishedMillis()) {
+ this.relayServerDescriptors.put(fingerprint,
+ serverDescriptor);
+ }
+ }
+ }
+ }
+
+ public void calculatePathSelectionProbabilities(
+ SortedMap<String, Integer> bandwidthWeights) {
+ boolean consensusContainsBandwidthWeights = false;
+ double wgg = 0.0, wgd = 0.0, wmg = 0.0, wmm = 0.0, wme = 0.0,
+ wmd = 0.0, wee = 0.0, wed = 0.0;
+ if (bandwidthWeights != null) {
+ SortedSet<String> weightKeys = new TreeSet<String>(Arrays.asList(
+ "Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(",")));
+ weightKeys.removeAll(bandwidthWeights.keySet());
+ if (weightKeys.isEmpty()) {
+ consensusContainsBandwidthWeights = true;
+ wgg = ((double) bandwidthWeights.get("Wgg")) / 10000.0;
+ wgd = ((double) bandwidthWeights.get("Wgd")) / 10000.0;
+ wmg = ((double) bandwidthWeights.get("Wmg")) / 10000.0;
+ wmm = ((double) bandwidthWeights.get("Wmm")) / 10000.0;
+ wme = ((double) bandwidthWeights.get("Wme")) / 10000.0;
+ wmd = ((double) bandwidthWeights.get("Wmd")) / 10000.0;
+ wee = ((double) bandwidthWeights.get("Wee")) / 10000.0;
+ wed = ((double) bandwidthWeights.get("Wed")) / 10000.0;
+ }
+ } else {
+ System.err.println("Could not determine most recent Wxx parameter "
+ + "values, probably because we didn't parse a consensus in "
+ + "this execution. All relays' guard/middle/exit weights are "
+ + "going to be 0.0.");
+ }
+ SortedMap<String, Double>
+ advertisedBandwidths = new TreeMap<String, Double>(),
+ consensusWeights = new TreeMap<String, Double>(),
+ guardWeights = new TreeMap<String, Double>(),
+ middleWeights = new TreeMap<String, Double>(),
+ exitWeights = new TreeMap<String, Double>();
+ double totalAdvertisedBandwidth = 0.0;
+ double totalConsensusWeight = 0.0;
+ double totalGuardWeight = 0.0;
+ double totalMiddleWeight = 0.0;
+ double totalExitWeight = 0.0;
+ for (Map.Entry<String, NodeStatus> e : this.relays.entrySet()) {
+ String fingerprint = e.getKey();
+ NodeStatus relay = e.getValue();
+ if (!relay.getRunning()) {
+ continue;
+ }
+ boolean isExit = relay.getRelayFlags().contains("Exit") &&
+ !relay.getRelayFlags().contains("BadExit");
+ boolean isGuard = relay.getRelayFlags().contains("Guard");
+ if (this.relayServerDescriptors.containsKey(fingerprint)) {
+ ServerDescriptor serverDescriptor =
+ this.relayServerDescriptors.get(fingerprint);
+ double advertisedBandwidth = (double) Math.min(Math.min(
+ serverDescriptor.getBandwidthBurst(),
+ serverDescriptor.getBandwidthObserved()),
+ serverDescriptor.getBandwidthRate());
+ advertisedBandwidths.put(fingerprint, advertisedBandwidth);
+ totalAdvertisedBandwidth += advertisedBandwidth;
+ }
+ double consensusWeight = (double) relay.getConsensusWeight();
+ consensusWeights.put(fingerprint, consensusWeight);
+ totalConsensusWeight += consensusWeight;
+ if (consensusContainsBandwidthWeights) {
+ double guardWeight = consensusWeight,
+ middleWeight = consensusWeight,
+ exitWeight = consensusWeight;
+ if (isGuard && isExit) {
+ guardWeight *= wgd;
+ middleWeight *= wmd;
+ exitWeight *= wed;
+ } else if (isGuard) {
+ guardWeight *= wgg;
+ middleWeight *= wmg;
+ exitWeight = 0.0;
+ } else if (isExit) {
+ guardWeight = 0.0;
+ middleWeight *= wme;
+ exitWeight *= wee;
+ } else {
+ guardWeight = 0.0;
+ middleWeight *= wmm;
+ exitWeight = 0.0;
+ }
+ guardWeights.put(fingerprint, guardWeight);
+ middleWeights.put(fingerprint, middleWeight);
+ exitWeights.put(fingerprint, exitWeight);
+ totalGuardWeight += guardWeight;
+ totalMiddleWeight += middleWeight;
+ totalExitWeight += exitWeight;
+ }
+ }
+ for (Map.Entry<String, NodeStatus> e : this.relays.entrySet()) {
+ String fingerprint = e.getKey();
+ NodeStatus relay = e.getValue();
+ if (advertisedBandwidths.containsKey(fingerprint)) {
+ relay.setAdvertisedBandwidthFraction(advertisedBandwidths.get(
+ fingerprint) / totalAdvertisedBandwidth);
+ }
+ if (consensusWeights.containsKey(fingerprint)) {
+ relay.setConsensusWeightFraction(consensusWeights.get(fingerprint)
+ / totalConsensusWeight);
+ }
+ if (guardWeights.containsKey(fingerprint)) {
+ relay.setGuardProbability(guardWeights.get(fingerprint)
+ / totalGuardWeight);
+ }
+ if (middleWeights.containsKey(fingerprint)) {
+ relay.setMiddleProbability(middleWeights.get(fingerprint)
+ / totalMiddleWeight);
+ }
+ if (exitWeights.containsKey(fingerprint)) {
+ relay.setExitProbability(exitWeights.get(fingerprint)
+ / totalExitWeight);
+ }
+ }
+ }
+
+ private long now = System.currentTimeMillis();
+ private Map<String, Set<ExitListEntry>> exitListEntries =
+ new HashMap<String, Set<ExitListEntry>>();
+ public void readExitLists() {
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.EXIT_LISTS, DescriptorHistory.EXIT_LIST_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ExitList) {
+ ExitList exitList = (ExitList) descriptor;
+ for (ExitListEntry exitListEntry :
+ exitList.getExitListEntries()) {
+ if (exitListEntry.getScanMillis() <
+ this.now - 24L * 60L * 60L * 1000L) {
+ continue;
+ }
+ String fingerprint = exitListEntry.getFingerprint();
+ if (!this.exitListEntries.containsKey(fingerprint)) {
+ this.exitListEntries.put(fingerprint,
+ new HashSet<ExitListEntry>());
+ }
+ this.exitListEntries.get(fingerprint).add(exitListEntry);
+ }
+ }
+ }
+ }
+
+ private Map<String, ServerDescriptor> bridgeServerDescriptors =
+ new HashMap<String, ServerDescriptor>();
+ public void readBridgeServerDescriptors() {
+ /* Don't remember which server descriptors we already parsed. If we
+ * parse a server descriptor now and first learn about the relay in a
+ * later status, we'll never write the descriptor content anywhere.
+ * The result would be details files containing no descriptor parts
+ * until the bridge publishes the next descriptor. */
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ServerDescriptor) {
+ ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor;
+ String fingerprint = serverDescriptor.getFingerprint();
+ if (!this.bridgeServerDescriptors.containsKey(fingerprint) ||
+ this.bridgeServerDescriptors.get(fingerprint).
+ getPublishedMillis()
+ < serverDescriptor.getPublishedMillis()) {
+ this.bridgeServerDescriptors.put(fingerprint,
+ serverDescriptor);
+ }
+ }
+ }
+ }
+
+ private Map<String, String> bridgePoolAssignments =
+ new HashMap<String, String>();
+ public void readBridgePoolAssignments() {
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.BRIDGE_POOL_ASSIGNMENTS,
+ DescriptorHistory.BRIDGE_POOLASSIGN_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof BridgePoolAssignment) {
+ BridgePoolAssignment bridgePoolAssignment =
+ (BridgePoolAssignment) descriptor;
+ for (Map.Entry<String, String> e :
+ bridgePoolAssignment.getEntries().entrySet()) {
+ String fingerprint = e.getKey();
+ String details = e.getValue();
+ this.bridgePoolAssignments.put(fingerprint, details);
+ }
+ }
+ }
+ }
+
+ public void writeOutDetails() {
+ SortedSet<String> remainingDetailsFiles = new TreeSet<String>();
+ remainingDetailsFiles.addAll(this.documentStore.list(
+ DetailsDocument.class, false));
+ this.updateRelayDetailsFiles(remainingDetailsFiles);
+ this.updateBridgeDetailsFiles(remainingDetailsFiles);
+ this.deleteDetailsFiles(remainingDetailsFiles);
+ }
+
+ private static String escapeJSON(String s) {
+ return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'");
+ }
+
+ private void updateRelayDetailsFiles(
+ SortedSet<String> remainingDetailsFiles) {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ for (Map.Entry<String, NodeStatus> relay : this.relays.entrySet()) {
+ String fingerprint = relay.getKey();
+
+ /* Read details file for this relay if it exists. */
+ String descriptorParts = null;
+ long publishedMillis = -1L;
+ if (remainingDetailsFiles.contains(fingerprint)) {
+ remainingDetailsFiles.remove(fingerprint);
+ // TODO Use parsed details document here.
+ DetailsDocument detailsDocument = this.documentStore.retrieve(
+ DetailsDocument.class, false, fingerprint);
+ String documentString = detailsDocument.documentString;
+ if (documentString != null) {
+ try {
+ boolean copyDescriptorParts = false;
+ StringBuilder sb = new StringBuilder();
+ Scanner s = new Scanner(documentString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (line.startsWith("\"desc_published\":")) {
+ String published = line.substring(
+ "\"desc_published\":\"".length(),
+ "\"desc_published\":\"1970-01-01 00:00:00".length());
+ publishedMillis = dateTimeFormat.parse(published).
+ getTime();
+ copyDescriptorParts = true;
+ }
+ if (copyDescriptorParts) {
+ sb.append(line + "\n");
+ }
+ }
+ s.close();
+ if (sb.length() > 0) {
+ descriptorParts = sb.toString();
+ }
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp in details.json "
+ + "file for '" + fingerprint + "'. Ignoring.");
+ e.printStackTrace();
+ publishedMillis = -1L;
+ descriptorParts = null;
+ }
+ }
+ }
+
+ /* Generate new descriptor-specific part if we have a more recent
+ * descriptor or if the part we read didn't contain a last_restarted
+ * line. */
+ if (this.relayServerDescriptors.containsKey(fingerprint) &&
+ (this.relayServerDescriptors.get(fingerprint).
+ getPublishedMillis() > publishedMillis)) {
+ ServerDescriptor descriptor = this.relayServerDescriptors.get(
+ fingerprint);
+ StringBuilder sb = new StringBuilder();
+ String publishedDateTime = dateTimeFormat.format(
+ descriptor.getPublishedMillis());
+ String lastRestartedString = dateTimeFormat.format(
+ descriptor.getPublishedMillis()
+ - descriptor.getUptime() * 1000L);
+ int bandwidthRate = descriptor.getBandwidthRate();
+ int bandwidthBurst = descriptor.getBandwidthBurst();
+ int observedBandwidth = descriptor.getBandwidthObserved();
+ int advertisedBandwidth = Math.min(bandwidthRate,
+ Math.min(bandwidthBurst, observedBandwidth));
+ sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
+ + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
+ + "\"bandwidth_rate\":" + bandwidthRate + ",\n"
+ + "\"bandwidth_burst\":" + bandwidthBurst + ",\n"
+ + "\"observed_bandwidth\":" + observedBandwidth + ",\n"
+ + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
+ + "\"exit_policy\":[");
+ int written = 0;
+ for (String exitPolicyLine : descriptor.getExitPolicyLines()) {
+ sb.append((written++ > 0 ? "," : "") + "\n \"" + exitPolicyLine
+ + "\"");
+ }
+ sb.append("\n]");
+ if (descriptor.getContact() != null) {
+ sb.append(",\n\"contact\":\""
+ + escapeJSON(descriptor.getContact()) + "\"");
+ }
+ if (descriptor.getPlatform() != null) {
+ sb.append(",\n\"platform\":\""
+ + escapeJSON(descriptor.getPlatform()) + "\"");
+ }
+ if (descriptor.getFamilyEntries() != null) {
+ sb.append(",\n\"family\":[");
+ written = 0;
+ for (String familyEntry : descriptor.getFamilyEntries()) {
+ sb.append((written++ > 0 ? "," : "") + "\n \"" + familyEntry
+ + "\"");
+ }
+ sb.append("\n]");
+ }
+ sb.append("\n}\n");
+ descriptorParts = sb.toString();
+ }
+
+ /* Generate network-status-specific part. */
+ NodeStatus entry = relay.getValue();
+ String nickname = entry.getNickname();
+ String address = entry.getAddress();
+ List<String> orAddresses = new ArrayList<String>();
+ orAddresses.add(address + ":" + entry.getOrPort());
+ orAddresses.addAll(entry.getOrAddressesAndPorts());
+ StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
+ int addressesWritten = 0;
+ for (String orAddress : orAddresses) {
+ orAddressesAndPortsBuilder.append(
+ (addressesWritten++ > 0 ? "," : "") + "\""
+ + orAddress.toLowerCase() + "\"");
+ }
+ String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
+ String firstSeen = dateTimeFormat.format(
+ entry.getFirstSeenMillis());
+ String lastChangedOrAddress = dateTimeFormat.format(
+ entry.getLastChangedOrAddress());
+ String running = entry.getRunning() ? "true" : "false";
+ int dirPort = entry.getDirPort();
+ String countryCode = entry.getCountryCode();
+ String latitude = entry.getLatitude();
+ String longitude = entry.getLongitude();
+ String countryName = entry.getCountryName();
+ String regionName = entry.getRegionName();
+ String cityName = entry.getCityName();
+ String aSNumber = entry.getASNumber();
+ String aSName = entry.getASName();
+ long consensusWeight = entry.getConsensusWeight();
+ String hostName = entry.getHostName();
+ double advertisedBandwidthFraction =
+ entry.getAdvertisedBandwidthFraction();
+ double consensusWeightFraction = entry.getConsensusWeightFraction();
+ double guardProbability = entry.getGuardProbability();
+ double middleProbability = entry.getMiddleProbability();
+ double exitProbability = entry.getExitProbability();
+ String defaultPolicy = entry.getDefaultPolicy();
+ String portList = entry.getPortList();
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"version\":1,\n"
+ + "\"nickname\":\"" + nickname + "\",\n"
+ + "\"fingerprint\":\"" + fingerprint + "\",\n"
+ + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
+ + "]");
+ if (dirPort != 0) {
+ sb.append(",\n\"dir_address\":\"" + address + ":" + dirPort
+ + "\"");
+ }
+ sb.append(",\n\"last_seen\":\"" + lastSeen + "\"");
+ sb.append(",\n\"first_seen\":\"" + firstSeen + "\"");
+ sb.append(",\n\"last_changed_address_or_port\":\""
+ + lastChangedOrAddress + "\"");
+ sb.append(",\n\"running\":" + running);
+ SortedSet<String> relayFlags = entry.getRelayFlags();
+ if (!relayFlags.isEmpty()) {
+ sb.append(",\n\"flags\":[");
+ int written = 0;
+ for (String relayFlag : relayFlags) {
+ sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
+ }
+ sb.append("]");
+ }
+ if (countryCode != null) {
+ sb.append(",\n\"country\":\"" + countryCode + "\"");
+ }
+ if (latitude != null) {
+ sb.append(",\n\"latitude\":" + latitude);
+ }
+ if (longitude != null) {
+ sb.append(",\n\"longitude\":" + longitude);
+ }
+ if (countryName != null) {
+ sb.append(",\n\"country_name\":\""
+ + escapeJSON(countryName) + "\"");
+ }
+ if (regionName != null) {
+ sb.append(",\n\"region_name\":\""
+ + escapeJSON(regionName) + "\"");
+ }
+ if (cityName != null) {
+ sb.append(",\n\"city_name\":\""
+ + escapeJSON(cityName) + "\"");
+ }
+ if (aSNumber != null) {
+ sb.append(",\n\"as_number\":\""
+ + escapeJSON(aSNumber) + "\"");
+ }
+ if (aSName != null) {
+ sb.append(",\n\"as_name\":\""
+ + escapeJSON(aSName) + "\"");
+ }
+ if (consensusWeight >= 0L) {
+ sb.append(",\n\"consensus_weight\":"
+ + String.valueOf(consensusWeight));
+ }
+ if (hostName != null) {
+ sb.append(",\n\"host_name\":\""
+ + escapeJSON(hostName) + "\"");
+ }
+ if (advertisedBandwidthFraction >= 0.0) {
+ sb.append(String.format(
+ ",\n\"advertised_bandwidth_fraction\":%.9f",
+ advertisedBandwidthFraction));
+ }
+ if (consensusWeightFraction >= 0.0) {
+ sb.append(String.format(",\n\"consensus_weight_fraction\":%.9f",
+ consensusWeightFraction));
+ }
+ if (guardProbability >= 0.0) {
+ sb.append(String.format(",\n\"guard_probability\":%.9f",
+ guardProbability));
+ }
+ if (middleProbability >= 0.0) {
+ sb.append(String.format(",\n\"middle_probability\":%.9f",
+ middleProbability));
+ }
+ if (exitProbability >= 0.0) {
+ sb.append(String.format(",\n\"exit_probability\":%.9f",
+ exitProbability));
+ }
+ if (defaultPolicy != null && (defaultPolicy.equals("accept") ||
+ defaultPolicy.equals("reject")) && portList != null) {
+ sb.append(",\n\"exit_policy_summary\":{\"" + defaultPolicy
+ + "\":[");
+ int portsWritten = 0;
+ for (String portOrPortRange : portList.split(",")) {
+ sb.append((portsWritten++ > 0 ? "," : "")
+ + "\"" + portOrPortRange + "\"");
+ }
+ sb.append("]}");
+ }
+
+ /* Add exit addresses if at least one of them is distinct from the
+ * onion-routing addresses. */
+ if (exitListEntries.containsKey(fingerprint)) {
+ for (ExitListEntry exitListEntry :
+ exitListEntries.get(fingerprint)) {
+ entry.addExitAddress(exitListEntry.getExitAddress());
+ }
+ }
+ if (!entry.getExitAddresses().isEmpty()) {
+ sb.append(",\n\"exit_addresses\":[");
+ int written = 0;
+ for (String exitAddress : entry.getExitAddresses()) {
+ sb.append((written++ > 0 ? "," : "") + "\""
+ + exitAddress.toLowerCase() + "\"");
+ }
+ sb.append("]");
+ }
+
+ /* Add descriptor parts. */
+ if (descriptorParts != null) {
+ sb.append(",\n" + descriptorParts);
+ } else {
+ sb.append("\n}\n");
+ }
+
+ /* Write details file to disk. */
+ DetailsDocument detailsDocument = new DetailsDocument();
+ detailsDocument.documentString = sb.toString();
+ this.documentStore.store(detailsDocument, fingerprint);
+ }
+ }
+
+ private void updateBridgeDetailsFiles(
+ SortedSet<String> remainingDetailsFiles) {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ for (Map.Entry<String, NodeStatus> bridge : this.bridges.entrySet()) {
+ String fingerprint = bridge.getKey();
+
+ /* Read details file for this bridge if it exists. */
+ String descriptorParts = null, bridgePoolAssignment = null;
+ long publishedMillis = -1L;
+ if (remainingDetailsFiles.contains(fingerprint)) {
+ remainingDetailsFiles.remove(fingerprint);
+ // TODO Use parsed details document here.
+ DetailsDocument detailsDocument = this.documentStore.retrieve(
+ DetailsDocument.class, false, fingerprint);
+ String documentString = detailsDocument.documentString;
+ if (documentString != null) {
+ try {
+ boolean copyDescriptorParts = false;
+ StringBuilder sb = new StringBuilder();
+ Scanner s = new Scanner(documentString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (line.startsWith("\"desc_published\":")) {
+ String published = line.substring(
+ "\"desc_published\":\"".length(),
+ "\"desc_published\":\"1970-01-01 00:00:00".length());
+ publishedMillis = dateTimeFormat.parse(published).
+ getTime();
+ copyDescriptorParts = true;
+ } else if (line.startsWith("\"pool_assignment\":")) {
+ bridgePoolAssignment = line;
+ copyDescriptorParts = false;
+ } else if (line.equals("}")) {
+ copyDescriptorParts = false;
+ }
+ if (copyDescriptorParts) {
+ sb.append(line + "\n");
+ }
+ }
+ s.close();
+ descriptorParts = sb.toString();
+ if (descriptorParts.endsWith(",\n")) {
+ descriptorParts = descriptorParts.substring(0,
+ descriptorParts.length() - 2);
+ } else if (descriptorParts.endsWith("\n")) {
+ descriptorParts = descriptorParts.substring(0,
+ descriptorParts.length() - 1);
+ }
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp in "
+ + "details.json file for '" + fingerprint + "'. "
+ + "Ignoring.");
+ e.printStackTrace();
+ publishedMillis = -1L;
+ descriptorParts = null;
+ }
+ }
+ }
+
+ /* Generate new descriptor-specific part if we have a more recent
+ * descriptor. */
+ if (this.bridgeServerDescriptors.containsKey(fingerprint) &&
+ this.bridgeServerDescriptors.get(fingerprint).
+ getPublishedMillis() > publishedMillis) {
+ ServerDescriptor descriptor = this.bridgeServerDescriptors.get(
+ fingerprint);
+ StringBuilder sb = new StringBuilder();
+ String publishedDateTime = dateTimeFormat.format(
+ descriptor.getPublishedMillis());
+ String lastRestartedString = dateTimeFormat.format(
+ descriptor.getPublishedMillis()
+ - descriptor.getUptime() * 1000L);
+ int advertisedBandwidth = Math.min(descriptor.getBandwidthRate(),
+ Math.min(descriptor.getBandwidthBurst(),
+ descriptor.getBandwidthObserved()));
+ sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n"
+ + "\"last_restarted\":\"" + lastRestartedString + "\",\n"
+ + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n"
+ + "\"platform\":\"" + escapeJSON(descriptor.getPlatform())
+ + "\"");
+ descriptorParts = sb.toString();
+ }
+
+ /* Look up bridge pool assignment. */
+ if (this.bridgePoolAssignments.containsKey(fingerprint)) {
+ bridgePoolAssignment = "\"pool_assignment\":\""
+ + this.bridgePoolAssignments.get(fingerprint) + "\"";
+ }
+
+ /* Generate network-status-specific part. */
+ NodeStatus entry = bridge.getValue();
+ String nickname = entry.getNickname();
+ String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
+ String firstSeen = dateTimeFormat.format(
+ entry.getFirstSeenMillis());
+ String running = entry.getRunning() ? "true" : "false";
+ String address = entry.getAddress();
+ List<String> orAddresses = new ArrayList<String>();
+ orAddresses.add(address + ":" + entry.getOrPort());
+ orAddresses.addAll(entry.getOrAddressesAndPorts());
+ StringBuilder orAddressesAndPortsBuilder = new StringBuilder();
+ int addressesWritten = 0;
+ for (String orAddress : orAddresses) {
+ orAddressesAndPortsBuilder.append(
+ (addressesWritten++ > 0 ? "," : "") + "\""
+ + orAddress.toLowerCase() + "\"");
+ }
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"version\":1,\n"
+ + "\"nickname\":\"" + nickname + "\",\n"
+ + "\"hashed_fingerprint\":\"" + fingerprint + "\",\n"
+ + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString()
+ + "],\n\"last_seen\":\"" + lastSeen + "\",\n\"first_seen\":\""
+ + firstSeen + "\",\n\"running\":" + running);
+
+ SortedSet<String> relayFlags = entry.getRelayFlags();
+ if (!relayFlags.isEmpty()) {
+ sb.append(",\n\"flags\":[");
+ int written = 0;
+ for (String relayFlag : relayFlags) {
+ sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\"");
+ }
+ sb.append("]");
+ }
+
+ /* Append descriptor and bridge pool assignment parts. */
+ if (descriptorParts != null && descriptorParts.length() != 0) {
+ sb.append(",\n" + descriptorParts);
+ }
+ if (bridgePoolAssignment != null) {
+ sb.append(",\n" + bridgePoolAssignment);
+ }
+ sb.append("\n}\n");
+
+ /* Write details file to disk. */
+ DetailsDocument detailsDocument = new DetailsDocument();
+ detailsDocument.documentString = sb.toString();
+ this.documentStore.store(detailsDocument, fingerprint);
+ }
+ }
+
+ private void deleteDetailsFiles(
+ SortedSet<String> remainingDetailsFiles) {
+ for (String fingerprint : remainingDetailsFiles) {
+ this.documentStore.remove(DetailsDocument.class, fingerprint);
+ }
+ }
+}
+
diff --git a/src/org/torproject/onionoo/DetailsDocument.java b/src/org/torproject/onionoo/DetailsDocument.java
new file mode 100644
index 0000000..9b81bbd
--- /dev/null
+++ b/src/org/torproject/onionoo/DetailsDocument.java
@@ -0,0 +1,71 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.util.List;
+
+class DetailsDocument extends Document {
+
+ class ExitPolicySummary {
+ List<String> reject;
+ List<String> accept;
+ }
+
+ class RelayDetails {
+ String nickname;
+ String fingerprint;
+ List<String> or_addresses;
+ List<String> exit_addresses;
+ String dir_address;
+ String last_seen;
+ String last_changed_address_or_port;
+ String first_seen;
+ Boolean running;
+ List<String> flags;
+ String country;
+ String country_name;
+ String region_name;
+ String city_name;
+ Double latitude;
+ Double longitude;
+ String as_number;
+ String as_name;
+ Double consensus_weight;
+ String host_name;
+ String last_restarted;
+ Integer bandwidth_rate;
+ Integer bandwidth_burst;
+ Integer observed_bandwidth;
+ Integer advertised_bandwidth;
+ List<String> exit_policy;
+ ExitPolicySummary exit_policy_summary;
+ String contact;
+ String platform;
+ List<String> family;
+ Double advertised_bandwidth_fraction;
+ Double consensus_weight_fraction;
+ Double guard_probability;
+ Double middle_probability;
+ Double exit_probability;
+ }
+
+ class BridgeDetails {
+ String nickname;
+ String hashed_fingerprint;
+ List<String> or_addresses;
+ String last_seen;
+ String first_seen;
+ Boolean running;
+ List<String> flags;
+ String last_restarted;
+ Integer advertised_bandwidth;
+ String platform;
+ String pool_assignment;
+ }
+
+ String relays_published;
+ List<RelayDetails> relays;
+ String bridges_published;
+ List<BridgeDetails> bridges;
+}
+
diff --git a/src/org/torproject/onionoo/Document.java b/src/org/torproject/onionoo/Document.java
new file mode 100644
index 0000000..9730d66
--- /dev/null
+++ b/src/org/torproject/onionoo/Document.java
@@ -0,0 +1,8 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+abstract class Document {
+ transient String documentString;
+}
+
diff --git a/src/org/torproject/onionoo/DocumentStore.java b/src/org/torproject/onionoo/DocumentStore.java
index c1097a0..829f35f 100644
--- a/src/org/torproject/onionoo/DocumentStore.java
+++ b/src/org/torproject/onionoo/DocumentStore.java
@@ -1,6 +1,5 @@
/* Copyright 2013 The Tor Project
* See LICENSE for licensing information */
-
package org.torproject.onionoo;
import java.io.BufferedReader;
@@ -10,20 +9,15 @@ import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Arrays;
+import java.util.Map;
+import java.util.SortedMap;
import java.util.SortedSet;
import java.util.Stack;
+import java.util.TreeMap;
import java.util.TreeSet;
-enum DocumentType {
- STATUS_SUMMARY,
- STATUS_BANDWIDTH,
- STATUS_WEIGHTS,
- OUT_UPDATE,
- OUT_SUMMARY,
- OUT_DETAILS,
- OUT_BANDWIDTH,
- OUT_WEIGHTS;
-}
+import com.google.gson.Gson;
+import com.google.gson.JsonParseException;
// TODO For later migration from disk to database, do the following:
// - read from database and then from disk if not found
@@ -36,10 +30,19 @@ public class DocumentStore {
private File outDir;
+ boolean listedArchivedNodeStatuses = false,
+ listedCurrentNodeStatuses = false;
+
long listOperations = 0L, listedFiles = 0L, storedFiles = 0L,
storedBytes = 0L, retrievedFiles = 0L, retrievedBytes = 0L,
removedFiles = 0L;
+ /* Node statuses are cached in memory, as opposed to all other document
+ * types. This cache is initialized when listing NodeStatus documents,
+ * either including or excluding archived node statuses. Later retrieve
+ * operations depend on which NodeStatus documents were listed. */
+ private SortedMap<String, NodeStatus> cachedNodeStatuses;
+
public DocumentStore(File outDir) {
this.outDir = outDir;
}
@@ -49,32 +52,75 @@ public class DocumentStore {
this.outDir = outDir;
}
- public SortedSet<String> list(DocumentType documentType) {
+ public <T extends Document> SortedSet<String> list(
+ Class<T> documentType, boolean includeArchive) {
+ if (documentType.equals(NodeStatus.class)) {
+ return this.listNodeStatuses(includeArchive);
+ } else {
+ return this.listDocumentFiles(documentType);
+ }
+ }
+
+ private SortedSet<String> listNodeStatuses(boolean includeArchive) {
+ SortedMap<String, NodeStatus> parsedNodeStatuses =
+ new TreeMap<String, NodeStatus>();
+ File directory = includeArchive ? this.statusDir : this.outDir;
+ if (directory != null) {
+ File summaryFile = new File(directory, "summary");
+ if (summaryFile.exists()) {
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(
+ summaryFile));
+ String line;
+ while ((line = br.readLine()) != null) {
+ if (line.length() == 0) {
+ continue;
+ }
+ NodeStatus node = NodeStatus.fromString(line);
+ if (node != null) {
+ parsedNodeStatuses.put(node.getFingerprint(), node);
+ }
+ }
+ br.close();
+ this.listedFiles += parsedNodeStatuses.size();
+ this.listOperations++;
+ } catch (IOException e) {
+ System.err.println("Could not read file '"
+ + summaryFile.getAbsolutePath() + "'.");
+ e.printStackTrace();
+ return null;
+ }
+ }
+ }
+ if (includeArchive) {
+ this.listedArchivedNodeStatuses = true;
+ } else {
+ this.listedCurrentNodeStatuses = true;
+ }
+ this.cachedNodeStatuses = parsedNodeStatuses;
+ return new TreeSet<String>(this.cachedNodeStatuses.keySet());
+ }
+
+ private <T extends Document> SortedSet<String> listDocumentFiles(
+ Class<T> documentType) {
SortedSet<String> fingerprints = new TreeSet<String>();
File directory = null;
String subdirectory = null;
- switch (documentType) {
- case STATUS_BANDWIDTH:
+ if (documentType.equals(BandwidthStatus.class)) {
directory = this.statusDir;
subdirectory = "bandwidth";
- break;
- case STATUS_WEIGHTS:
+ } else if (documentType.equals(WeightsStatus.class)) {
directory = this.statusDir;
subdirectory = "weights";
- break;
- case OUT_DETAILS:
+ } else if (documentType.equals(DetailsDocument.class)) {
directory = this.outDir;
subdirectory = "details";
- break;
- case OUT_BANDWIDTH:
+ } else if (documentType.equals(BandwidthDocument.class)) {
directory = this.outDir;
subdirectory = "bandwidth";
- break;
- case OUT_WEIGHTS:
+ } else if (documentType.equals(WeightsDocument.class)) {
directory = this.outDir;
- break;
- default:
- break;
+ subdirectory = "weights";
}
if (directory != null && subdirectory != null) {
Stack<File> files = new Stack<File>();
@@ -93,16 +139,45 @@ public class DocumentStore {
return fingerprints;
}
- public boolean store(String documentString, DocumentType documentType) {
- return this.store(documentString, documentType, null);
+ public <T extends Document> boolean store(T document) {
+ return this.store(document, null);
}
- public boolean store(String documentString, DocumentType documentType,
+ public <T extends Document> boolean store(T document,
String fingerprint) {
- File documentFile = this.getDocumentFile(documentType, fingerprint);
+ if (document instanceof NodeStatus) {
+ return this.storeNodeStatus((NodeStatus) document, fingerprint);
+ } else {
+ return this.storeDocumentFile(document, fingerprint);
+ }
+ }
+
+ private <T extends Document> boolean storeNodeStatus(
+ NodeStatus nodeStatus, String fingerprint) {
+ this.cachedNodeStatuses.put(fingerprint, nodeStatus);
+ return true;
+ }
+
+ private <T extends Document> boolean storeDocumentFile(T document,
+ String fingerprint) {
+ File documentFile = this.getDocumentFile(document.getClass(),
+ fingerprint);
if (documentFile == null) {
return false;
}
+ String documentString;
+ if (document.documentString != null) {
+ documentString = document.documentString;
+ } else if (document instanceof DetailsDocument ||
+ document instanceof BandwidthDocument ||
+ document instanceof WeightsDocument) {
+ Gson gson = new Gson();
+ documentString = gson.toJson(this);
+ } else {
+ System.err.println("Serializing is not supported for type "
+ + document.getClass().getName() + ".");
+ return false;
+ }
try {
documentFile.getParentFile().mkdirs();
File documentTempFile = new File(
@@ -124,11 +199,26 @@ public class DocumentStore {
return true;
}
- public String retrieve(DocumentType documentType) {
- return this.retrieve(documentType, null);
+ public <T extends Document> T retrieve(Class<T> documentType,
+ boolean parse) {
+ return this.retrieve(documentType, parse, null);
+ }
+
+ public <T extends Document> T retrieve(Class<T> documentType,
+ boolean parse, String fingerprint) {
+ if (documentType.equals(NodeStatus.class)) {
+ return documentType.cast(this.retrieveNodeStatus(fingerprint));
+ } else {
+ return this.retrieveDocumentFile(documentType, parse, fingerprint);
+ }
+ }
+
+ private NodeStatus retrieveNodeStatus(String fingerprint) {
+ return this.cachedNodeStatuses.get(fingerprint);
}
- public String retrieve(DocumentType documentType, String fingerprint) {
+ private <T extends Document> T retrieveDocumentFile(
+ Class<T> documentType, boolean parse, String fingerprint) {
File documentFile = this.getDocumentFile(documentType, fingerprint);
if (documentFile == null || !documentFile.exists()) {
return null;
@@ -138,6 +228,7 @@ public class DocumentStore {
+ "directory.");
return null;
}
+ String documentString = null;
try {
BufferedReader br = new BufferedReader(new FileReader(
documentFile));
@@ -149,20 +240,82 @@ public class DocumentStore {
br.close();
this.retrievedFiles++;
this.retrievedBytes += sb.length();
- return sb.toString();
+ documentString = sb.toString();
} catch (IOException e) {
System.err.println("Could not read file '"
+ documentFile.getAbsolutePath() + "'.");
e.printStackTrace();
return null;
}
+ T result = null;
+ if (!parse) {
+ return this.retrieveUnparsedDocumentFile(documentType,
+ documentString);
+ } else if (documentType.equals(DetailsDocument.class) ||
+ documentType.equals(BandwidthDocument.class) ||
+ documentType.equals(WeightsDocument.class)) {
+ return this.retrieveParsedDocumentFile(documentType,
+ documentString);
+ } else {
+ System.err.println("Parsing is not supported for type "
+ + documentType.getName() + ".");
+ }
+ return result;
+ }
+
+ private <T extends Document> T retrieveParsedDocumentFile(
+ Class<T> documentType, String documentString) {
+ T result = null;
+ Gson gson = new Gson();
+ try {
+ result = gson.fromJson(documentString, documentType);
+ result.documentString = documentString;
+ } catch (JsonParseException e) {
+ /* Handle below. */
+ e.printStackTrace();
+ }
+ if (result == null) {
+ System.err.println("Could not initialize parsed document of type "
+ + documentType.getName() + ".");
+ }
+ return result;
+ }
+
+ private <T extends Document> T retrieveUnparsedDocumentFile(
+ Class<T> documentType, String documentString) {
+ T result = null;
+ try {
+ result = documentType.newInstance();
+ result.documentString = documentString;
+ } catch (InstantiationException e) {
+ /* Handle below. */
+ e.printStackTrace();
+ } catch (IllegalAccessException e) {
+ /* Handle below. */
+ e.printStackTrace();
+ }
+ if (result == null) {
+ System.err.println("Could not initialize unparsed document of type "
+ + documentType.getName() + ".");
+ }
+ return result;
}
- public boolean remove(DocumentType documentType) {
+ public <T extends Document> boolean remove(Class<T> documentType) {
return this.remove(documentType, null);
}
- public boolean remove(DocumentType documentType, String fingerprint) {
+ public <T extends Document> boolean remove(Class<T> documentType,
+ String fingerprint) {
+ if (documentType.equals(NodeStatus.class)) {
+ return this.cachedNodeStatuses.remove(fingerprint) != null;
+ } else {
+ return this.removeDocumentFile(documentType, fingerprint);
+ }
+ }
+
+ private <T extends Document> boolean removeDocumentFile(
+ Class<T> documentType, String fingerprint) {
File documentFile = this.getDocumentFile(documentType, fingerprint);
if (documentFile == null || !documentFile.delete()) {
System.err.println("Could not delete file '"
@@ -173,54 +326,46 @@ public class DocumentStore {
return true;
}
- private File getDocumentFile(DocumentType documentType,
+ private <T extends Document> File getDocumentFile(Class<T> documentType,
String fingerprint) {
File documentFile = null;
- if (fingerprint == null && !(
- documentType == DocumentType.STATUS_SUMMARY ||
- documentType == DocumentType.OUT_UPDATE||
- documentType == DocumentType.OUT_SUMMARY)) {
+ if (fingerprint == null &&
+ !documentType.equals(UpdateStatus.class)) {
+ // TODO Instead of using the update file workaround, add new method
+ // lastModified(Class<T> documentType) that serves a similar
+ // purpose. Once that's implemented, make fingerprint mandatory for
+ // all methods.
return null;
}
File directory = null;
String fileName = null;
- switch (documentType) {
- case STATUS_SUMMARY:
- directory = this.statusDir;
- fileName = "summary";
- break;
- case STATUS_BANDWIDTH:
+ if (documentType.equals(BandwidthStatus.class)) {
directory = this.statusDir;
fileName = String.format("bandwidth/%s/%s/%s",
fingerprint.substring(0, 1), fingerprint.substring(1, 2),
fingerprint);
- break;
- case STATUS_WEIGHTS:
+ } else if (documentType.equals(WeightsStatus.class)) {
directory = this.statusDir;
fileName = String.format("weights/%s/%s/%s",
fingerprint.substring(0, 1), fingerprint.substring(1, 2),
fingerprint);
- break;
- case OUT_UPDATE:
+ } else if (documentType.equals(UpdateStatus.class)) {
directory = this.outDir;
fileName = "update";
- break;
- case OUT_SUMMARY:
- directory = this.outDir;
- fileName = "summary";
- break;
- case OUT_DETAILS:
+ // TODO Taken out, because storing/retrieving summary documents is not
+ // supported yet.
+ //} else if (documentType.equals(SummaryDocument.class)) {
+ // directory = this.outDir;
+ // fileName = "summary";
+ } else if (documentType.equals(DetailsDocument.class)) {
directory = this.outDir;
fileName = String.format("details/%s", fingerprint);
- break;
- case OUT_BANDWIDTH:
+ } else if (documentType.equals(BandwidthDocument.class)) {
directory = this.outDir;
fileName = String.format("bandwidth/%s", fingerprint);
- break;
- case OUT_WEIGHTS:
+ } else if (documentType.equals(WeightsDocument.class)) {
directory = this.outDir;
fileName = String.format("weights/%s", fingerprint);
- break;
}
if (directory != null && fileName != null) {
documentFile = new File(directory, fileName);
@@ -228,6 +373,99 @@ public class DocumentStore {
return documentFile;
}
+ public void flushDocumentCache() {
+ if (this.listedArchivedNodeStatuses) {
+ this.writeNodeStatuses(false);
+ this.writeNodeStatuses(true);
+ this.writeUpdateStatus();
+ } else if (this.listedCurrentNodeStatuses) {
+ this.writeNodeStatuses(false);
+ this.writeUpdateStatus();
+ }
+ }
+
+ private void writeNodeStatuses(boolean includeArchive) {
+ File directory = includeArchive ? this.statusDir : this.outDir;
+ if (directory == null) {
+ return;
+ }
+ File summaryFile = new File(directory, "summary");
+ SortedMap<String, NodeStatus>
+ cachedRelays = new TreeMap<String, NodeStatus>(),
+ cachedBridges = new TreeMap<String, NodeStatus>();
+ long cutoff = 0L;
+ if (!includeArchive) {
+ long maxLastSeenMillis = 0L;
+ for (NodeStatus node : this.cachedNodeStatuses.values()) {
+ if (node.getLastSeenMillis() > maxLastSeenMillis) {
+ maxLastSeenMillis = node.getLastSeenMillis();
+ }
+ }
+ cutoff = maxLastSeenMillis - 7L * 24L * 60L * 60L * 1000L;
+ }
+ for (Map.Entry<String, NodeStatus> e :
+ this.cachedNodeStatuses.entrySet()) {
+ if (e.getValue().getLastSeenMillis() < cutoff) {
+ continue;
+ }
+ if (e.getValue().isRelay()) {
+ cachedRelays.put(e.getKey(), e.getValue());
+ } else {
+ cachedBridges.put(e.getKey(), e.getValue());
+ }
+ }
+ StringBuilder sb = new StringBuilder();
+ for (NodeStatus relay : cachedRelays.values()) {
+ String line = relay.toString();
+ if (line != null) {
+ sb.append(line + "\n");
+ } else {
+ System.err.println("Could not serialize relay node status '"
+ + relay.getFingerprint() + "'");
+ }
+ }
+ for (NodeStatus bridge : cachedBridges.values()) {
+ String line = bridge.toString();
+ if (line != null) {
+ sb.append(line + "\n");
+ } else {
+ System.err.println("Could not serialize bridge node status '"
+ + bridge.getFingerprint() + "'");
+ }
+ }
+ String documentString = sb.toString();
+ try {
+ BufferedWriter bw = new BufferedWriter(new FileWriter(summaryFile));
+ bw.write(documentString);
+ bw.close();
+ this.storedFiles++;
+ this.storedBytes += documentString.length();
+ } catch (IOException e) {
+ System.err.println("Could not write file '"
+ + summaryFile.getAbsolutePath() + "'.");
+ e.printStackTrace();
+ }
+ }
+
+ private void writeUpdateStatus() {
+ if (this.outDir == null) {
+ return;
+ }
+ File updateFile = new File(this.outDir, "update");
+ String documentString = String.valueOf(System.currentTimeMillis());
+ try {
+ BufferedWriter bw = new BufferedWriter(new FileWriter(updateFile));
+ bw.write(documentString);
+ bw.close();
+ this.storedFiles++;
+ this.storedBytes += documentString.length();
+ } catch (IOException e) {
+ System.err.println("Could not write file '"
+ + updateFile.getAbsolutePath() + "'.");
+ e.printStackTrace();
+ }
+ }
+
public String getStatsString() {
StringBuilder sb = new StringBuilder();
sb.append(" " + formatDecimalNumber(listOperations)
diff --git a/src/org/torproject/onionoo/LookupService.java b/src/org/torproject/onionoo/LookupService.java
index bf3131e..4071e26 100644
--- a/src/org/torproject/onionoo/LookupService.java
+++ b/src/org/torproject/onionoo/LookupService.java
@@ -1,6 +1,5 @@
/* Copyright 2013 The Tor Project
* See LICENSE for licensing information */
-
package org.torproject.onionoo;
import java.io.BufferedReader;
diff --git a/src/org/torproject/onionoo/Main.java b/src/org/torproject/onionoo/Main.java
index 3086054..2232d81 100644
--- a/src/org/torproject/onionoo/Main.java
+++ b/src/org/torproject/onionoo/Main.java
@@ -4,6 +4,7 @@ package org.torproject.onionoo;
import java.io.File;
import java.util.Date;
+import java.util.SortedMap;
/* Update search data and status data files. */
public class Main {
@@ -24,34 +25,37 @@ public class Main {
printStatusTime("Initialized Geoip lookup service");
printStatus("Updating internal node list.");
- CurrentNodes cn = new CurrentNodes(dso, ls, ds);
- cn.readStatusSummary();
+ NodeDataWriter ndw = new NodeDataWriter(dso, ls, ds);
+ ndw.readStatusSummary();
printStatusTime("Read status summary");
- cn.readRelayNetworkConsensuses();
+ ndw.readRelayNetworkConsensuses();
printStatusTime("Read network status consensuses");
- cn.setRelayRunningBits();
- printStatusTime("Set relay running bits");
- cn.lookUpCitiesAndASes();
+ ndw.lookUpCitiesAndASes();
printStatusTime("Looked up cities and ASes");
- cn.readBridgeNetworkStatuses();
+ ndw.readBridgeNetworkStatuses();
printStatusTime("Read bridge network statuses");
- cn.setBridgeRunningBits();
- printStatusTime("Set bridge running bits");
- cn.writeStatusSummary();
+ ndw.setRunningBits();
+ printStatusTime("Set running bits");
+ ndw.writeStatusSummary();
printStatusTime("Wrote status summary");
+ SortedMap<String, NodeStatus> currentNodes = ndw.getCurrentNodes();
+ SortedMap<String, Integer> lastBandwidthWeights =
+ ndw.getLastBandwidthWeights();
// TODO Could write statistics here, too.
printStatus("Updating detail data.");
- DetailDataWriter ddw = new DetailDataWriter(dso, ds);
- ddw.setCurrentRelays(cn.getCurrentRelays());
- printStatusTime("Set current relays");
- ddw.setCurrentBridges(cn.getCurrentBridges());
- printStatusTime("Set current bridges");
+ DetailsDataWriter ddw = new DetailsDataWriter(dso, ds);
+ // TODO Instead of using ndw's currentNodes and lastBandwidthWeights,
+ // parse statuses once again, keeping separate parse history. Allows
+ // us to run ndw and ddw in parallel in the future. Alternatively,
+ // merge ndw and ddw, because they're doing similar things anyway.
+ ddw.setCurrentNodes(currentNodes);
+ printStatusTime("Set current node fingerprints");
ddw.startReverseDomainNameLookups();
printStatusTime("Started reverse domain name lookups");
ddw.readRelayServerDescriptors();
printStatusTime("Read relay server descriptors");
- ddw.calculatePathSelectionProbabilities(cn.getLastBandwidthWeights());
+ ddw.calculatePathSelectionProbabilities(lastBandwidthWeights);
printStatusTime("Calculated path selection probabilities");
ddw.readExitLists();
printStatusTime("Read exit lists");
@@ -67,35 +71,43 @@ public class Main {
printStatus("Updating bandwidth data.");
BandwidthDataWriter bdw = new BandwidthDataWriter(dso, ds);
- bdw.setCurrentRelays(cn.getCurrentRelays());
- printStatusTime("Set current relays");
- bdw.setCurrentBridges(cn.getCurrentBridges());
- printStatusTime("Set current bridges");
+ bdw.setCurrentNodes(currentNodes);
+ printStatusTime("Set current node fingerprints");
bdw.readExtraInfoDescriptors();
printStatusTime("Read extra-info descriptors");
+ // TODO Evaluate overhead of not deleting obsolete bandwidth files.
+ // An advantage would be that we don't need ndw's currentNodes
+ // anymore, which allows us to run ndw and bdw in parallel in the
+ // future.
bdw.deleteObsoleteBandwidthFiles();
printStatusTime("Deleted obsolete bandwidth files");
// TODO Could write statistics here, too.
printStatus("Updating weights data.");
WeightsDataWriter wdw = new WeightsDataWriter(dso, ds);
- wdw.setCurrentRelays(cn.getCurrentRelays());
- printStatusTime("Set current relays");
+ wdw.setCurrentNodes(currentNodes);
+ printStatusTime("Set current node fingerprints");
wdw.readRelayServerDescriptors();
printStatusTime("Read relay server descriptors");
wdw.readRelayNetworkConsensuses();
printStatusTime("Read relay network consensuses");
wdw.writeWeightsDataFiles();
printStatusTime("Wrote weights data files");
+ // TODO Evaluate overhead of not deleting obsolete weights files. An
+ // advantage would be that we don't need ndw's currentNodes anymore,
+ // which allows us to run ndw and wdw in parallel in the future.
wdw.deleteObsoleteWeightsDataFiles();
printStatusTime("Deleted obsolete weights files");
// TODO Could write statistics here, too.
printStatus("Updating summary data.");
- cn.writeOutSummary();
+ ndw.writeOutSummary();
printStatusTime("Wrote out summary");
// TODO Could write statistics here, too.
+ // TODO "Shut down" lookup service and write statistics about number
+ // of (successfully) looked up addresses.
+
printStatus("Shutting down descriptor source.");
dso.writeHistoryFiles();
printStatusTime("Wrote parse histories");
@@ -103,6 +115,8 @@ public class Main {
printStatusTime("Shut down descriptor source");
printStatus("Shutting down document store.");
+ ds.flushDocumentCache();
+ printStatusTime("Flushed document cache");
printStatistics(ds.getStatsString());
printStatusTime("Shut down document store");
diff --git a/src/org/torproject/onionoo/Node.java b/src/org/torproject/onionoo/Node.java
deleted file mode 100644
index 35d81c3..0000000
--- a/src/org/torproject/onionoo/Node.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.onionoo;
-
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.SortedMap;
-import java.util.TreeSet;
-import java.util.TreeMap;
-
-import org.apache.commons.codec.DecoderException;
-import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.codec.digest.DigestUtils;
-
-/* Store search data of a single relay that was running in the past seven
- * days. */
-public class Node {
- private String fingerprint;
- private String hashedFingerprint;
- private String nickname;
- private String address;
- private SortedSet<String> orAddresses;
- private SortedSet<String> orAddressesAndPorts;
- private SortedSet<String> exitAddresses;
- private String latitude;
- private String longitude;
- private String countryCode;
- private String countryName;
- private String regionName;
- private String cityName;
- private String aSName;
- private String aSNumber;
- private long firstSeenMillis;
- private long lastSeenMillis;
- private int orPort;
- private int dirPort;
- private SortedSet<String> relayFlags;
- private long consensusWeight;
- private boolean running;
- private String hostName;
- private long lastRdnsLookup = -1L;
- private double advertisedBandwidthFraction = -1.0;
- private double consensusWeightFraction = -1.0;
- private double guardProbability = -1.0;
- private double middleProbability = -1.0;
- private double exitProbability = -1.0;
- private String defaultPolicy;
- private String portList;
- private SortedMap<Long, Set<String>> lastAddresses;
- public Node(String nickname, String fingerprint, String address,
- SortedSet<String> orAddressesAndPorts,
- SortedSet<String> exitAddresses, long lastSeenMillis, int orPort,
- int dirPort, SortedSet<String> relayFlags, long consensusWeight,
- String countryCode, String hostName, long lastRdnsLookup,
- String defaultPolicy, String portList, long firstSeenMillis,
- SortedMap<Long, Set<String>> lastAddresses, String aSNumber) {
- this.nickname = nickname;
- this.fingerprint = fingerprint;
- try {
- this.hashedFingerprint = DigestUtils.shaHex(Hex.decodeHex(
- fingerprint.toCharArray())).toUpperCase();
- } catch (DecoderException e) {
- throw new IllegalArgumentException("Fingerprint '" + fingerprint
- + "' is not a valid fingerprint.");
- }
- this.address = address;
- this.exitAddresses = new TreeSet<String>();
- if (exitAddresses != null) {
- this.exitAddresses.addAll(exitAddresses);
- }
- this.exitAddresses.remove(this.address);
- this.orAddresses = new TreeSet<String>();
- this.orAddressesAndPorts = new TreeSet<String>();
- if (orAddressesAndPorts != null) {
- for (String orAddressAndPort : orAddressesAndPorts) {
- this.addOrAddressAndPort(orAddressAndPort);
- }
- }
- this.lastSeenMillis = lastSeenMillis;
- this.orPort = orPort;
- this.dirPort = dirPort;
- this.relayFlags = relayFlags;
- this.consensusWeight = consensusWeight;
- this.countryCode = countryCode;
- this.hostName = hostName;
- this.lastRdnsLookup = lastRdnsLookup;
- this.defaultPolicy = defaultPolicy;
- this.portList = portList;
- this.firstSeenMillis = firstSeenMillis;
- this.lastAddresses = lastAddresses;
- this.aSNumber = aSNumber;
- }
- public String getFingerprint() {
- return this.fingerprint;
- }
- public String getHashedFingerprint() {
- return this.hashedFingerprint;
- }
- public String getNickname() {
- return this.nickname;
- }
- public String getAddress() {
- return this.address;
- }
- public SortedSet<String> getOrAddresses() {
- return new TreeSet<String>(this.orAddresses);
- }
- public void addOrAddressAndPort(String orAddressAndPort) {
- if (!orAddressAndPort.contains(":")) {
- System.err.println("Illegal OR address:port '" + orAddressAndPort
- + "'. Exiting.");
- System.exit(1);
- } else if (orAddressAndPort.length() > 0) {
- String orAddress = orAddressAndPort.substring(0,
- orAddressAndPort.lastIndexOf(":"));
- if (this.exitAddresses.contains(orAddress)) {
- this.exitAddresses.remove(orAddress);
- }
- this.orAddresses.add(orAddress);
- this.orAddressesAndPorts.add(orAddressAndPort);
- }
- }
- public SortedSet<String> getOrAddressesAndPorts() {
- return new TreeSet<String>(this.orAddressesAndPorts);
- }
- public void addExitAddress(String exitAddress) {
- if (exitAddress.length() > 0 && !this.address.equals(exitAddress) &&
- !this.orAddresses.contains(exitAddress)) {
- this.exitAddresses.add(exitAddress);
- }
- }
- public SortedSet<String> getExitAddresses() {
- return new TreeSet<String>(this.exitAddresses);
- }
- public void setLatitude(String latitude) {
- this.latitude = latitude;
- }
- public String getLatitude() {
- return this.latitude;
- }
- public void setLongitude(String longitude) {
- this.longitude = longitude;
- }
- public String getLongitude() {
- return this.longitude;
- }
- public void setCountryCode(String countryCode) {
- this.countryCode = countryCode;
- }
- public String getCountryCode() {
- return this.countryCode;
- }
- public void setCountryName(String countryName) {
- this.countryName = countryName;
- }
- public String getCountryName() {
- return this.countryName;
- }
- public void setRegionName(String regionName) {
- this.regionName = regionName;
- }
- public String getRegionName() {
- return this.regionName;
- }
- public void setCityName(String cityName) {
- this.cityName = cityName;
- }
- public String getCityName() {
- return this.cityName;
- }
- public void setASNumber(String aSNumber) {
- this.aSNumber = aSNumber;
- }
- public String getASNumber() {
- return this.aSNumber;
- }
- public void setASName(String aSName) {
- this.aSName = aSName;
- }
- public String getASName() {
- return this.aSName;
- }
- public long getFirstSeenMillis() {
- return this.firstSeenMillis;
- }
- public long getLastSeenMillis() {
- return this.lastSeenMillis;
- }
- public int getOrPort() {
- return this.orPort;
- }
- public int getDirPort() {
- return this.dirPort;
- }
- public SortedSet<String> getRelayFlags() {
- return this.relayFlags;
- }
- public long getConsensusWeight() {
- return this.consensusWeight;
- }
- public void setRunning(boolean running) {
- this.running = running;
- }
- public boolean getRunning() {
- return this.running;
- }
- public void setHostName(String hostName) {
- this.hostName = hostName;
- }
- public String getHostName() {
- return this.hostName;
- }
- public void setLastRdnsLookup(long lastRdnsLookup) {
- this.lastRdnsLookup = lastRdnsLookup;
- }
- public long getLastRdnsLookup() {
- return this.lastRdnsLookup;
- }
- public void setAdvertisedBandwidthFraction(
- double advertisedBandwidthFraction) {
- this.advertisedBandwidthFraction = advertisedBandwidthFraction;
- }
- public double getAdvertisedBandwidthFraction() {
- return this.advertisedBandwidthFraction;
- }
- public void setConsensusWeightFraction(double consensusWeightFraction) {
- this.consensusWeightFraction = consensusWeightFraction;
- }
- public double getConsensusWeightFraction() {
- return this.consensusWeightFraction;
- }
- public void setGuardProbability(double guardProbability) {
- this.guardProbability = guardProbability;
- }
- public double getGuardProbability() {
- return this.guardProbability;
- }
- public void setMiddleProbability(double middleProbability) {
- this.middleProbability = middleProbability;
- }
- public double getMiddleProbability() {
- return this.middleProbability;
- }
- public void setExitProbability(double exitProbability) {
- this.exitProbability = exitProbability;
- }
- public double getExitProbability() {
- return this.exitProbability;
- }
- public String getDefaultPolicy() {
- return this.defaultPolicy;
- }
- public String getPortList() {
- return this.portList;
- }
- public SortedMap<Long, Set<String>> getLastAddresses() {
- return this.lastAddresses == null ? null :
- new TreeMap<Long, Set<String>>(this.lastAddresses);
- }
- public long getLastChangedOrAddress() {
- long lastChangedAddressesMillis = -1L;
- if (this.lastAddresses != null) {
- Set<String> lastAddresses = null;
- for (Map.Entry<Long, Set<String>> e : this.lastAddresses.entrySet()) {
- if (lastAddresses != null) {
- for (String address : e.getValue()) {
- if (!lastAddresses.contains(address)) {
- return lastChangedAddressesMillis;
- }
- }
- }
- lastChangedAddressesMillis = e.getKey();
- lastAddresses = e.getValue();
- }
- }
- return lastChangedAddressesMillis;
- }
-}
-
diff --git a/src/org/torproject/onionoo/NodeDataWriter.java b/src/org/torproject/onionoo/NodeDataWriter.java
new file mode 100644
index 0000000..9cfbb21
--- /dev/null
+++ b/src/org/torproject/onionoo/NodeDataWriter.java
@@ -0,0 +1,237 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import org.torproject.descriptor.BridgeNetworkStatus;
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.NetworkStatusEntry;
+import org.torproject.descriptor.RelayNetworkStatusConsensus;
+import org.torproject.onionoo.LookupService.LookupResult;
+
+/* Store relays and bridges that have been running in the past seven
+ * days. */
+public class NodeDataWriter {
+
+ private DescriptorSource descriptorSource;
+
+ private LookupService lookupService;
+
+ private DocumentStore documentStore;
+
+ private SortedMap<String, NodeStatus> knownNodes =
+ new TreeMap<String, NodeStatus>();
+
+ private long relaysLastValidAfterMillis = -1L;
+
+ private long bridgesLastPublishedMillis = -1L;
+
+ private SortedMap<String, Integer> lastBandwidthWeights = null;
+
+ public NodeDataWriter(DescriptorSource descriptorSource,
+ LookupService lookupService, DocumentStore documentStore) {
+ this.descriptorSource = descriptorSource;
+ this.lookupService = lookupService;
+ this.documentStore = documentStore;
+ }
+
+ public void readStatusSummary() {
+ SortedSet<String> fingerprints = this.documentStore.list(
+ NodeStatus.class, true);
+ for (String fingerprint : fingerprints) {
+ NodeStatus node = this.documentStore.retrieve(NodeStatus.class,
+ true, fingerprint);
+ if (node.isRelay()) {
+ this.relaysLastValidAfterMillis = Math.max(
+ this.relaysLastValidAfterMillis, node.getLastSeenMillis());
+ } else {
+ this.bridgesLastPublishedMillis = Math.max(
+ this.bridgesLastPublishedMillis, node.getLastSeenMillis());
+ }
+ this.knownNodes.put(fingerprint, node);
+ }
+ }
+
+ public void readRelayNetworkConsensuses() {
+ if (this.descriptorSource == null) {
+ System.err.println("Not configured to read relay network "
+ + "consensuses.");
+ return;
+ }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.RELAY_CONSENSUSES,
+ DescriptorHistory.RELAY_CONSENSUS_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof RelayNetworkStatusConsensus) {
+ updateRelayNetworkStatusConsensus(
+ (RelayNetworkStatusConsensus) descriptor);
+ }
+ }
+ }
+
+ public void setRunningBits() {
+ for (NodeStatus node : this.knownNodes.values()) {
+ if (node.isRelay() &&
+ node.getLastSeenMillis() == this.relaysLastValidAfterMillis) {
+ node.setRunning(true);
+ }
+ if (!node.isRelay() &&
+ node.getLastSeenMillis() == this.bridgesLastPublishedMillis) {
+ node.setRunning(true);
+ }
+ }
+ }
+
+ private void updateRelayNetworkStatusConsensus(
+ RelayNetworkStatusConsensus consensus) {
+ long validAfterMillis = consensus.getValidAfterMillis();
+ if (validAfterMillis > this.relaysLastValidAfterMillis) {
+ this.relaysLastValidAfterMillis = validAfterMillis;
+ }
+ for (NetworkStatusEntry entry :
+ consensus.getStatusEntries().values()) {
+ String nickname = entry.getNickname();
+ String fingerprint = entry.getFingerprint();
+ String address = entry.getAddress();
+ SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
+ entry.getOrAddresses());
+ int orPort = entry.getOrPort();
+ int dirPort = entry.getDirPort();
+ SortedSet<String> relayFlags = entry.getFlags();
+ long consensusWeight = entry.getBandwidth();
+ String defaultPolicy = entry.getDefaultPolicy();
+ String portList = entry.getPortList();
+ NodeStatus newNodeStatus = new NodeStatus(true, nickname,
+ fingerprint, address, orAddressesAndPorts, null,
+ validAfterMillis, orPort, dirPort, relayFlags, consensusWeight,
+ null, null, -1L, defaultPolicy, portList, validAfterMillis,
+ validAfterMillis, null);
+ if (this.knownNodes.containsKey(fingerprint)) {
+ this.knownNodes.get(fingerprint).update(newNodeStatus);
+ } else {
+ this.knownNodes.put(fingerprint, newNodeStatus);
+ }
+ }
+ if (this.relaysLastValidAfterMillis == validAfterMillis) {
+ this.lastBandwidthWeights = consensus.getBandwidthWeights();
+ }
+ }
+
+ public void lookUpCitiesAndASes() {
+ SortedSet<String> addressStrings = new TreeSet<String>();
+ for (NodeStatus node : this.knownNodes.values()) {
+ if (node.isRelay()) {
+ addressStrings.add(node.getAddress());
+ }
+ }
+ if (addressStrings.isEmpty()) {
+ System.err.println("No relay IP addresses to resolve to cities or "
+ + "ASN.");
+ return;
+ }
+ SortedMap<String, LookupResult> lookupResults =
+ this.lookupService.lookup(addressStrings);
+ for (NodeStatus node : knownNodes.values()) {
+ if (!node.isRelay()) {
+ continue;
+ }
+ String addressString = node.getAddress();
+ if (lookupResults.containsKey(addressString)) {
+ LookupResult lookupResult = lookupResults.get(addressString);
+ node.setCountryCode(lookupResult.countryCode);
+ node.setCountryName(lookupResult.countryName);
+ node.setRegionName(lookupResult.regionName);
+ node.setCityName(lookupResult.cityName);
+ node.setLatitude(lookupResult.latitude);
+ node.setLongitude(lookupResult.longitude);
+ node.setASNumber(lookupResult.aSNumber);
+ node.setASName(lookupResult.aSName);
+ }
+ }
+ }
+
+ public void readBridgeNetworkStatuses() {
+ if (this.descriptorSource == null) {
+ System.err.println("Not configured to read bridge network "
+ + "statuses.");
+ return;
+ }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.BRIDGE_STATUSES,
+ DescriptorHistory.BRIDGE_STATUS_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof BridgeNetworkStatus) {
+ updateBridgeNetworkStatus((BridgeNetworkStatus) descriptor);
+ }
+ }
+ }
+
+ private void updateBridgeNetworkStatus(BridgeNetworkStatus status) {
+ long publishedMillis = status.getPublishedMillis();
+ if (publishedMillis > this.bridgesLastPublishedMillis) {
+ this.bridgesLastPublishedMillis = publishedMillis;
+ }
+ for (NetworkStatusEntry entry : status.getStatusEntries().values()) {
+ String nickname = entry.getNickname();
+ String fingerprint = entry.getFingerprint();
+ String address = entry.getAddress();
+ SortedSet<String> orAddressesAndPorts = new TreeSet<String>(
+ entry.getOrAddresses());
+ int orPort = entry.getOrPort();
+ int dirPort = entry.getDirPort();
+ SortedSet<String> relayFlags = entry.getFlags();
+ NodeStatus newNodeStatus = new NodeStatus(false, nickname,
+ fingerprint, address, orAddressesAndPorts, null,
+ publishedMillis, orPort, dirPort, relayFlags, -1L, "??", null,
+ -1L, null, null, publishedMillis, -1L, null);
+ if (this.knownNodes.containsKey(fingerprint)) {
+ this.knownNodes.get(fingerprint).update(newNodeStatus);
+ } else {
+ this.knownNodes.put(fingerprint, newNodeStatus);
+ }
+ }
+ }
+
+ public void writeStatusSummary() {
+ this.writeSummary(true);
+ }
+
+ public void writeOutSummary() {
+ this.writeSummary(false);
+ }
+
+ private void writeSummary(boolean includeArchive) {
+ SortedMap<String, NodeStatus> nodes = includeArchive
+ ? this.knownNodes : this.getCurrentNodes();
+ for (Map.Entry<String, NodeStatus> e : nodes.entrySet()) {
+ this.documentStore.store(e.getValue(), e.getKey());
+ }
+ }
+
+ public SortedMap<String, NodeStatus> getCurrentNodes() {
+ long cutoff = Math.max(this.relaysLastValidAfterMillis,
+ this.bridgesLastPublishedMillis) - 7L * 24L * 60L * 60L * 1000L;
+ SortedMap<String, NodeStatus> currentNodes =
+ new TreeMap<String, NodeStatus>();
+ for (Map.Entry<String, NodeStatus> e : this.knownNodes.entrySet()) {
+ if (e.getValue().getLastSeenMillis() >= cutoff) {
+ currentNodes.put(e.getKey(), e.getValue());
+ }
+ }
+ return currentNodes;
+ }
+
+ public SortedMap<String, Integer> getLastBandwidthWeights() {
+ return this.lastBandwidthWeights;
+ }
+}
+
diff --git a/src/org/torproject/onionoo/NodeStatus.java b/src/org/torproject/onionoo/NodeStatus.java
new file mode 100644
index 0000000..81e5c7f
--- /dev/null
+++ b/src/org/torproject/onionoo/NodeStatus.java
@@ -0,0 +1,482 @@
+/* Copyright 2011, 2012 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.SortedMap;
+import java.util.TimeZone;
+import java.util.TreeSet;
+import java.util.TreeMap;
+
+import org.apache.commons.codec.DecoderException;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.codec.digest.DigestUtils;
+
+/* Store search data of a single relay that was running in the past seven
+ * days. */
+public class NodeStatus extends Document {
+ private boolean isRelay;
+ private String fingerprint;
+ private String hashedFingerprint;
+ private String nickname;
+ private String address;
+ private SortedSet<String> orAddresses;
+ private SortedSet<String> orAddressesAndPorts;
+ private SortedSet<String> exitAddresses;
+ private String latitude;
+ private String longitude;
+ private String countryCode;
+ private String countryName;
+ private String regionName;
+ private String cityName;
+ private String aSName;
+ private String aSNumber;
+ private long firstSeenMillis;
+ private long lastSeenMillis;
+ private int orPort;
+ private int dirPort;
+ private SortedSet<String> relayFlags;
+ private long consensusWeight;
+ private boolean running;
+ private String hostName;
+ private long lastRdnsLookup = -1L;
+ private double advertisedBandwidthFraction = -1.0;
+ private double consensusWeightFraction = -1.0;
+ private double guardProbability = -1.0;
+ private double middleProbability = -1.0;
+ private double exitProbability = -1.0;
+ private String defaultPolicy;
+ private String portList;
+ private SortedMap<Long, Set<String>> lastAddresses;
+ public NodeStatus(boolean isRelay, String nickname, String fingerprint,
+ String address, SortedSet<String> orAddressesAndPorts,
+ SortedSet<String> exitAddresses, long lastSeenMillis, int orPort,
+ int dirPort, SortedSet<String> relayFlags, long consensusWeight,
+ String countryCode, String hostName, long lastRdnsLookup,
+ String defaultPolicy, String portList, long firstSeenMillis,
+ long lastChangedAddresses, String aSNumber) {
+ this.isRelay = isRelay;
+ this.nickname = nickname;
+ this.fingerprint = fingerprint;
+ try {
+ this.hashedFingerprint = DigestUtils.shaHex(Hex.decodeHex(
+ this.fingerprint.toCharArray())).toUpperCase();
+ } catch (DecoderException e) {
+ throw new IllegalArgumentException("Fingerprint '" + fingerprint
+ + "' is not a valid fingerprint.");
+ }
+ this.address = address;
+ this.exitAddresses = new TreeSet<String>();
+ if (exitAddresses != null) {
+ this.exitAddresses.addAll(exitAddresses);
+ }
+ this.exitAddresses.remove(this.address);
+ this.orAddresses = new TreeSet<String>();
+ this.orAddressesAndPorts = new TreeSet<String>();
+ if (orAddressesAndPorts != null) {
+ for (String orAddressAndPort : orAddressesAndPorts) {
+ this.addOrAddressAndPort(orAddressAndPort);
+ }
+ }
+ this.lastSeenMillis = lastSeenMillis;
+ this.orPort = orPort;
+ this.dirPort = dirPort;
+ this.relayFlags = relayFlags;
+ this.consensusWeight = consensusWeight;
+ this.countryCode = countryCode;
+ this.hostName = hostName;
+ this.lastRdnsLookup = lastRdnsLookup;
+ this.defaultPolicy = defaultPolicy;
+ this.portList = portList;
+ this.firstSeenMillis = firstSeenMillis;
+ this.lastAddresses =
+ new TreeMap<Long, Set<String>>(Collections.reverseOrder());
+ Set<String> addresses = new HashSet<String>();
+ addresses.add(address + ":" + orPort);
+ if (dirPort > 0) {
+ addresses.add(address + ":" + dirPort);
+ }
+ addresses.addAll(orAddressesAndPorts);
+ this.lastAddresses.put(lastChangedAddresses, addresses);
+ this.aSNumber = aSNumber;
+ }
+
+ public static NodeStatus fromString(String documentString) {
+ boolean isRelay = false;
+ String nickname = null, fingerprint = null, address = null,
+ countryCode = null, hostName = null, defaultPolicy = null,
+ portList = null, aSNumber = null;
+ SortedSet<String> orAddressesAndPorts = null, exitAddresses = null,
+ relayFlags = null;
+ long lastSeenMillis = -1L, consensusWeight = -1L,
+ lastRdnsLookup = -1L, firstSeenMillis = -1L,
+ lastChangedAddresses = -1L;
+ int orPort = -1, dirPort = -1;
+ try {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ String[] parts = documentString.trim().split(" ");
+ isRelay = parts[0].equals("r");
+ if (parts.length < 9) {
+ System.err.println("Too few space-separated values in line '"
+ + documentString.trim() + "'. Skipping.");
+ return null;
+ }
+ nickname = parts[1];
+ fingerprint = parts[2];
+ orAddressesAndPorts = new TreeSet<String>();
+ exitAddresses = new TreeSet<String>();
+ String addresses = parts[3];
+ if (addresses.contains(";")) {
+ String[] addressParts = addresses.split(";", -1);
+ if (addressParts.length != 3) {
+ System.err.println("Invalid addresses entry in line '"
+ + documentString.trim() + "'. Skipping.");
+ return null;
+ }
+ address = addressParts[0];
+ if (addressParts[1].length() > 0) {
+ orAddressesAndPorts.addAll(Arrays.asList(
+ addressParts[1].split("\\+")));
+ }
+ if (addressParts[2].length() > 0) {
+ exitAddresses.addAll(Arrays.asList(
+ addressParts[2].split("\\+")));
+ }
+ } else {
+ address = addresses;
+ }
+ lastSeenMillis = dateTimeFormat.parse(parts[4] + " " + parts[5]).
+ getTime();
+ orPort = Integer.parseInt(parts[6]);
+ dirPort = Integer.parseInt(parts[7]);
+ relayFlags = new TreeSet<String>(
+ Arrays.asList(parts[8].split(",")));
+ if (parts.length > 9) {
+ consensusWeight = Long.parseLong(parts[9]);
+ }
+ if (parts.length > 10) {
+ countryCode = parts[10];
+ }
+ if (parts.length > 12) {
+ hostName = parts[11].equals("null") ? null : parts[11];
+ lastRdnsLookup = Long.parseLong(parts[12]);
+ }
+ if (parts.length > 14) {
+ if (!parts[13].equals("null")) {
+ defaultPolicy = parts[13];
+ }
+ if (!parts[14].equals("null")) {
+ portList = parts[14];
+ }
+ }
+ firstSeenMillis = lastSeenMillis;
+ if (parts.length > 16) {
+ firstSeenMillis = dateTimeFormat.parse(parts[15] + " "
+ + parts[16]).getTime();
+ }
+ lastChangedAddresses = lastSeenMillis;
+ if (parts.length > 18 && !parts[17].equals("null")) {
+ lastChangedAddresses = dateTimeFormat.parse(parts[17] + " "
+ + parts[18]).getTime();
+ }
+ if (parts.length > 19) {
+ aSNumber = parts[19];
+ }
+ } catch (NumberFormatException e) {
+ System.err.println("Number format exception while parsing node "
+ + "status line '" + documentString + "': " + e.getMessage()
+ + ". Skipping.");
+ return null;
+ } catch (ParseException e) {
+ System.err.println("Parse exception while parsing node status "
+ + "line '" + documentString + "': " + e.getMessage() + ". "
+ + "Skipping.");
+ return null;
+ } catch (Exception e) {
+ /* This catch block is only here to handle yet unknown errors. It
+ * should go away once we're sure what kind of errors can occur. */
+ System.err.println("Unknown exception while parsing node status "
+ + "line '" + documentString + "': " + e.getMessage() + ". "
+ + "Skipping.");
+ return null;
+ }
+ NodeStatus newNodeStatus = new NodeStatus(isRelay, nickname,
+ fingerprint, address, orAddressesAndPorts, exitAddresses,
+ lastSeenMillis, orPort, dirPort, relayFlags, consensusWeight,
+ countryCode, hostName, lastRdnsLookup, defaultPolicy, portList,
+ firstSeenMillis, lastChangedAddresses, aSNumber);
+ return newNodeStatus;
+ }
+
+ public void update(NodeStatus newNodeStatus) {
+ if (newNodeStatus.lastSeenMillis > this.lastSeenMillis) {
+ this.nickname = newNodeStatus.nickname;
+ this.address = newNodeStatus.address;
+ this.orAddressesAndPorts = newNodeStatus.orAddressesAndPorts;
+ this.exitAddresses = newNodeStatus.exitAddresses;
+ this.lastSeenMillis = newNodeStatus.lastSeenMillis;
+ this.orPort = newNodeStatus.orPort;
+ this.dirPort = newNodeStatus.dirPort;
+ this.relayFlags = newNodeStatus.relayFlags;
+ this.consensusWeight = newNodeStatus.consensusWeight;
+ this.countryCode = newNodeStatus.countryCode;
+ this.defaultPolicy = newNodeStatus.defaultPolicy;
+ this.portList = newNodeStatus.portList;
+ this.aSNumber = newNodeStatus.aSNumber;
+ }
+ if (this.isRelay && newNodeStatus.isRelay) {
+ this.lastAddresses.putAll(newNodeStatus.lastAddresses);
+ }
+ this.firstSeenMillis = Math.min(newNodeStatus.firstSeenMillis,
+ this.getFirstSeenMillis());
+ }
+
+ public String toString() {
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ StringBuilder sb = new StringBuilder();
+ sb.append(this.isRelay ? "r" : "b");
+ sb.append(" " + this.nickname);
+ sb.append(" " + this.fingerprint);
+ sb.append(" " + this.address + ";");
+ int written = 0;
+ for (String orAddressAndPort : this.orAddressesAndPorts) {
+ sb.append((written++ > 0 ? "+" : "") + orAddressAndPort);
+ }
+ sb.append(";");
+ if (this.isRelay) {
+ written = 0;
+ for (String exitAddress : this.exitAddresses) {
+ sb.append((written++ > 0 ? "+" : "")
+ + exitAddress);
+ }
+ }
+ sb.append(" " + dateTimeFormat.format(this.lastSeenMillis));
+ sb.append(" " + this.orPort);
+ sb.append(" " + this.dirPort + " ");
+ written = 0;
+ for (String relayFlag : this.relayFlags) {
+ sb.append((written++ > 0 ? "," : "") + relayFlag);
+ }
+ if (this.isRelay) {
+ sb.append(" " + String.valueOf(this.consensusWeight));
+ sb.append(" " + (this.countryCode != null ? this.countryCode : "??"));
+ sb.append(" " + (this.hostName != null ? this.hostName : "null"));
+ sb.append(" " + String.valueOf(this.lastRdnsLookup));
+ sb.append(" " + (this.defaultPolicy != null ? this.defaultPolicy
+ : "null"));
+ sb.append(" " + (this.portList != null ? this.portList : "null"));
+ } else {
+ sb.append(" -1 ?? null -1 null null");
+ }
+ sb.append(" " + dateTimeFormat.format(this.firstSeenMillis));
+ if (this.isRelay) {
+ sb.append(" " + dateTimeFormat.format(
+ this.getLastChangedOrAddress()));
+ sb.append(" " + (this.aSNumber != null ? this.aSNumber : "null"));
+ } else {
+ sb.append(" null null null");
+ }
+ return sb.toString();
+ }
+
+ public boolean isRelay() {
+ return this.isRelay;
+ }
+ public String getFingerprint() {
+ return this.fingerprint;
+ }
+ public String getHashedFingerprint() {
+ return this.hashedFingerprint;
+ }
+ public String getNickname() {
+ return this.nickname;
+ }
+ public String getAddress() {
+ return this.address;
+ }
+ public SortedSet<String> getOrAddresses() {
+ return new TreeSet<String>(this.orAddresses);
+ }
+ public void addOrAddressAndPort(String orAddressAndPort) {
+ if (!orAddressAndPort.contains(":")) {
+ System.err.println("Illegal OR address:port '" + orAddressAndPort
+ + "'. Exiting.");
+ System.exit(1);
+ } else if (orAddressAndPort.length() > 0) {
+ String orAddress = orAddressAndPort.substring(0,
+ orAddressAndPort.lastIndexOf(":"));
+ if (this.exitAddresses.contains(orAddress)) {
+ this.exitAddresses.remove(orAddress);
+ }
+ this.orAddresses.add(orAddress);
+ this.orAddressesAndPorts.add(orAddressAndPort);
+ }
+ }
+ public SortedSet<String> getOrAddressesAndPorts() {
+ return new TreeSet<String>(this.orAddressesAndPorts);
+ }
+ public void addExitAddress(String exitAddress) {
+ if (exitAddress.length() > 0 && !this.address.equals(exitAddress) &&
+ !this.orAddresses.contains(exitAddress)) {
+ this.exitAddresses.add(exitAddress);
+ }
+ }
+ public SortedSet<String> getExitAddresses() {
+ return new TreeSet<String>(this.exitAddresses);
+ }
+ public void setLatitude(String latitude) {
+ this.latitude = latitude;
+ }
+ public String getLatitude() {
+ return this.latitude;
+ }
+ public void setLongitude(String longitude) {
+ this.longitude = longitude;
+ }
+ public String getLongitude() {
+ return this.longitude;
+ }
+ public void setCountryCode(String countryCode) {
+ this.countryCode = countryCode;
+ }
+ public String getCountryCode() {
+ return this.countryCode;
+ }
+ public void setCountryName(String countryName) {
+ this.countryName = countryName;
+ }
+ public String getCountryName() {
+ return this.countryName;
+ }
+ public void setRegionName(String regionName) {
+ this.regionName = regionName;
+ }
+ public String getRegionName() {
+ return this.regionName;
+ }
+ public void setCityName(String cityName) {
+ this.cityName = cityName;
+ }
+ public String getCityName() {
+ return this.cityName;
+ }
+ public void setASNumber(String aSNumber) {
+ this.aSNumber = aSNumber;
+ }
+ public String getASNumber() {
+ return this.aSNumber;
+ }
+ public void setASName(String aSName) {
+ this.aSName = aSName;
+ }
+ public String getASName() {
+ return this.aSName;
+ }
+ public long getFirstSeenMillis() {
+ return this.firstSeenMillis;
+ }
+ public long getLastSeenMillis() {
+ return this.lastSeenMillis;
+ }
+ public int getOrPort() {
+ return this.orPort;
+ }
+ public int getDirPort() {
+ return this.dirPort;
+ }
+ public SortedSet<String> getRelayFlags() {
+ return this.relayFlags;
+ }
+ public long getConsensusWeight() {
+ return this.consensusWeight;
+ }
+ public void setRunning(boolean running) {
+ this.running = running;
+ }
+ public boolean getRunning() {
+ return this.running;
+ }
+ public void setHostName(String hostName) {
+ this.hostName = hostName;
+ }
+ public String getHostName() {
+ return this.hostName;
+ }
+ public void setLastRdnsLookup(long lastRdnsLookup) {
+ this.lastRdnsLookup = lastRdnsLookup;
+ }
+ public long getLastRdnsLookup() {
+ return this.lastRdnsLookup;
+ }
+ public void setAdvertisedBandwidthFraction(
+ double advertisedBandwidthFraction) {
+ this.advertisedBandwidthFraction = advertisedBandwidthFraction;
+ }
+ public double getAdvertisedBandwidthFraction() {
+ return this.advertisedBandwidthFraction;
+ }
+ public void setConsensusWeightFraction(double consensusWeightFraction) {
+ this.consensusWeightFraction = consensusWeightFraction;
+ }
+ public double getConsensusWeightFraction() {
+ return this.consensusWeightFraction;
+ }
+ public void setGuardProbability(double guardProbability) {
+ this.guardProbability = guardProbability;
+ }
+ public double getGuardProbability() {
+ return this.guardProbability;
+ }
+ public void setMiddleProbability(double middleProbability) {
+ this.middleProbability = middleProbability;
+ }
+ public double getMiddleProbability() {
+ return this.middleProbability;
+ }
+ public void setExitProbability(double exitProbability) {
+ this.exitProbability = exitProbability;
+ }
+ public double getExitProbability() {
+ return this.exitProbability;
+ }
+ public String getDefaultPolicy() {
+ return this.defaultPolicy;
+ }
+ public String getPortList() {
+ return this.portList;
+ }
+ public SortedMap<Long, Set<String>> getLastAddresses() {
+ return this.lastAddresses == null ? null :
+ new TreeMap<Long, Set<String>>(this.lastAddresses);
+ }
+ public long getLastChangedOrAddress() {
+ long lastChangedAddressesMillis = -1L;
+ if (this.lastAddresses != null) {
+ Set<String> lastAddresses = null;
+ for (Map.Entry<Long, Set<String>> e : this.lastAddresses.entrySet()) {
+ if (lastAddresses != null) {
+ for (String address : e.getValue()) {
+ if (!lastAddresses.contains(address)) {
+ return lastChangedAddressesMillis;
+ }
+ }
+ }
+ lastChangedAddressesMillis = e.getKey();
+ lastAddresses = e.getValue();
+ }
+ }
+ return lastChangedAddressesMillis;
+ }
+}
+
diff --git a/src/org/torproject/onionoo/ResourceServlet.java b/src/org/torproject/onionoo/ResourceServlet.java
index 3ad33ae..996b870 100644
--- a/src/org/torproject/onionoo/ResourceServlet.java
+++ b/src/org/torproject/onionoo/ResourceServlet.java
@@ -16,6 +16,7 @@ import java.util.Map;
import java.util.Scanner;
import java.util.Set;
import java.util.SortedMap;
+import java.util.SortedSet;
import java.util.TimeZone;
import java.util.TreeMap;
import java.util.regex.Pattern;
@@ -64,9 +65,10 @@ public class ResourceServlet extends HttpServlet {
bridgesByLastSeenDays = null;
private void readSummaryFile() {
long summaryFileLastModified = -1L;
- String updateString = this.documentStore.retrieve(
- DocumentType.OUT_UPDATE);
- if (updateString != null) {
+ UpdateStatus updateStatus = this.documentStore.retrieve(
+ UpdateStatus.class, false);
+ if (updateStatus != null && updateStatus.documentString != null) {
+ String updateString = updateStatus.documentString;
try {
summaryFileLastModified = Long.parseLong(updateString.trim());
} catch (NumberFormatException e) {
@@ -94,25 +96,42 @@ public class ResourceServlet extends HttpServlet {
bridgesByFirstSeenDays = new TreeMap<Integer, Set<String>>(),
relaysByLastSeenDays = new TreeMap<Integer, Set<String>>(),
bridgesByLastSeenDays = new TreeMap<Integer, Set<String>>();
- CurrentNodes cn = new CurrentNodes(this.documentStore);
- cn.readOutSummary();
+ long relaysLastValidAfterMillis = -1L,
+ bridgesLastPublishedMillis = -1L;
+ Set<NodeStatus> currentRelays = new HashSet<NodeStatus>(),
+ currentBridges = new HashSet<NodeStatus>();
+ SortedSet<String> fingerprints = this.documentStore.list(
+ NodeStatus.class, false);
// TODO We should be able to learn if something goes wrong when
// reading the summary file, rather than silently having an empty
- // CurrentNodes instance.
- cn.setRelayRunningBits();
- cn.setBridgeRunningBits();
+ // list of fingerprints.
+ for (String fingerprint : fingerprints) {
+ NodeStatus node = this.documentStore.retrieve(NodeStatus.class,
+ true, fingerprint);
+ if (node.isRelay()) {
+ relaysLastValidAfterMillis = Math.max(
+ relaysLastValidAfterMillis, node.getLastSeenMillis());
+ currentRelays.add(node);
+ } else {
+ bridgesLastPublishedMillis = Math.max(
+ bridgesLastPublishedMillis, node.getLastSeenMillis());
+ currentBridges.add(node);
+ }
+ }
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
this.relaysPublishedString = dateTimeFormat.format(
- cn.getLastValidAfterMillis());
+ relaysLastValidAfterMillis);
this.bridgesPublishedString = dateTimeFormat.format(
- cn.getLastPublishedMillis());
+ bridgesLastPublishedMillis);
List<String> orderRelaysByConsensusWeight = new ArrayList<String>();
- for (Node entry : cn.getCurrentRelays().values()) {
+ for (NodeStatus entry : currentRelays) {
String fingerprint = entry.getFingerprint().toUpperCase();
String hashedFingerprint = entry.getHashedFingerprint().
toUpperCase();
+ entry.setRunning(entry.getLastSeenMillis() ==
+ relaysLastValidAfterMillis);
String line = this.formatRelaySummaryLine(entry);
relayFingerprintSummaryLines.put(fingerprint, line);
relayFingerprintSummaryLines.put(hashedFingerprint, line);
@@ -169,10 +188,12 @@ public class ResourceServlet extends HttpServlet {
for (String relay : orderRelaysByConsensusWeight) {
relaysByConsensusWeight.add(relay.split(" ")[1]);
}
- for (Node entry : cn.getCurrentBridges().values()) {
+ for (NodeStatus entry : currentBridges) {
String hashedFingerprint = entry.getFingerprint().toUpperCase();
String hashedHashedFingerprint = entry.getHashedFingerprint().
toUpperCase();
+ entry.setRunning(entry.getRelayFlags().contains("Running") &&
+ entry.getLastSeenMillis() == bridgesLastPublishedMillis);
String line = this.formatBridgeSummaryLine(entry);
bridgeFingerprintSummaryLines.put(hashedFingerprint, line);
bridgeFingerprintSummaryLines.put(hashedHashedFingerprint, line);
@@ -212,7 +233,7 @@ public class ResourceServlet extends HttpServlet {
this.readSummaryFile = true;
}
- private String formatRelaySummaryLine(Node entry) {
+ private String formatRelaySummaryLine(NodeStatus entry) {
String nickname = !entry.getNickname().equals("Unnamed") ?
entry.getNickname() : null;
String fingerprint = entry.getFingerprint();
@@ -238,7 +259,7 @@ public class ResourceServlet extends HttpServlet {
fingerprint, addressesBuilder.toString(), running);
}
- private String formatBridgeSummaryLine(Node entry) {
+ private String formatBridgeSummaryLine(NodeStatus entry) {
String nickname = !entry.getNickname().equals("Unnamed") ?
entry.getNickname() : null;
String hashedFingerprint = entry.getFingerprint();
@@ -921,8 +942,9 @@ public class ResourceServlet extends HttpServlet {
return "";
}
fingerprint = fingerprint.substring(0, 40);
- String documentString = this.documentStore.retrieve(
- DocumentType.OUT_DETAILS, fingerprint);
+ DetailsDocument detailsDocument = this.documentStore.retrieve(
+ DetailsDocument.class, false, fingerprint);
+ String documentString = detailsDocument.documentString;
StringBuilder sb = new StringBuilder();
String detailsLines = null;
if (documentString != null) {
@@ -969,8 +991,9 @@ public class ResourceServlet extends HttpServlet {
return "";
}
fingerprint = fingerprint.substring(0, 40);
- String bandwidthLines = this.documentStore.retrieve(
- DocumentType.OUT_BANDWIDTH, fingerprint);
+ BandwidthDocument bandwidthDocument = this.documentStore.retrieve(
+ BandwidthDocument.class, false, fingerprint);
+ String bandwidthLines = bandwidthDocument.documentString;
if (bandwidthLines != null) {
bandwidthLines = bandwidthLines.substring(0,
bandwidthLines.length() - 1);
@@ -991,8 +1014,9 @@ public class ResourceServlet extends HttpServlet {
return "";
}
fingerprint = fingerprint.substring(0, 40);
- String weightsLines = this.documentStore.retrieve(
- DocumentType.OUT_WEIGHTS, fingerprint);
+ WeightsDocument weightsDocument = this.documentStore.retrieve(
+ WeightsDocument.class, false, fingerprint);
+ String weightsLines = weightsDocument.documentString;
if (weightsLines != null) {
weightsLines = weightsLines.substring(0, weightsLines.length() - 1);
return weightsLines;
diff --git a/src/org/torproject/onionoo/SummaryDocument.java b/src/org/torproject/onionoo/SummaryDocument.java
new file mode 100644
index 0000000..e0aadb5
--- /dev/null
+++ b/src/org/torproject/onionoo/SummaryDocument.java
@@ -0,0 +1,27 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+import java.util.List;
+
+class SummaryDocument extends Document {
+
+ class RelaySummary {
+ String n;
+ String f;
+ String[] a;
+ Boolean r;
+ }
+
+ class BridgeSummary {
+ String n;
+ String h;
+ Boolean r;
+ }
+
+ String relays_published;
+ List<RelaySummary> relays;
+ String bridges_published;
+ List<BridgeSummary> bridges;
+}
+
diff --git a/src/org/torproject/onionoo/UpdateStatus.java b/src/org/torproject/onionoo/UpdateStatus.java
new file mode 100644
index 0000000..bd3648d
--- /dev/null
+++ b/src/org/torproject/onionoo/UpdateStatus.java
@@ -0,0 +1,7 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+package org.torproject.onionoo;
+
+class UpdateStatus extends Document {
+}
+
diff --git a/src/org/torproject/onionoo/WeightsDataWriter.java b/src/org/torproject/onionoo/WeightsDataWriter.java
index de9ad42..317b8e2 100644
--- a/src/org/torproject/onionoo/WeightsDataWriter.java
+++ b/src/org/torproject/onionoo/WeightsDataWriter.java
@@ -29,15 +29,17 @@ public class WeightsDataWriter {
private DocumentStore documentStore;
+ private SortedSet<String> currentFingerprints = new TreeSet<String>();
+
public WeightsDataWriter(DescriptorSource descriptorSource,
DocumentStore documentStore) {
this.descriptorSource = descriptorSource;
this.documentStore = documentStore;
}
- private SortedSet<String> currentFingerprints = new TreeSet<String>();
- public void setCurrentRelays(SortedMap<String, Node> currentRelays) {
- this.currentFingerprints.addAll(currentRelays.keySet());
+ public void setCurrentNodes(
+ SortedMap<String, NodeStatus> currentNodes) {
+ this.currentFingerprints.addAll(currentNodes.keySet());
}
/* Read advertised bandwidths of all server descriptors in
@@ -265,9 +267,10 @@ public class WeightsDataWriter {
return a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;
}
});
- String historyString = this.documentStore.retrieve(
- DocumentType.STATUS_WEIGHTS, fingerprint);
- if (historyString != null) {
+ WeightsStatus weightsStatus = this.documentStore.retrieve(
+ WeightsStatus.class, false, fingerprint);
+ if (weightsStatus != null) {
+ String historyString = weightsStatus.documentString;
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setLenient(false);
@@ -378,9 +381,9 @@ public class WeightsDataWriter {
}
sb.append("\n");
}
- String historyString = sb.toString();
- this.documentStore.store(historyString, DocumentType.STATUS_WEIGHTS,
- fingerprint);
+ WeightsStatus weightsStatus = new WeightsStatus();
+ weightsStatus.documentString = sb.toString();
+ this.documentStore.store(weightsStatus, fingerprint);
}
public void writeWeightsDataFiles() {
@@ -392,10 +395,10 @@ public class WeightsDataWriter {
/* Don't write weights data file to disk. */
continue;
}
- String historyString = this.formatHistoryString(fingerprint,
- history);
- this.documentStore.store(historyString, DocumentType.OUT_WEIGHTS,
- fingerprint);
+ WeightsDocument weightsDocument = new WeightsDocument();
+ weightsDocument.documentString = this.formatHistoryString(
+ fingerprint, history);
+ this.documentStore.store(weightsDocument, fingerprint);
}
}
@@ -550,15 +553,15 @@ public class WeightsDataWriter {
public void deleteObsoleteWeightsDataFiles() {
SortedSet<String> obsoleteWeightsFiles;
- obsoleteWeightsFiles = this.documentStore.list(
- DocumentType.OUT_WEIGHTS);
+ obsoleteWeightsFiles = this.documentStore.list(WeightsDocument.class,
+ false);
for (String fingerprint : this.currentFingerprints) {
if (obsoleteWeightsFiles.contains(fingerprint)) {
obsoleteWeightsFiles.remove(fingerprint);
}
}
for (String fingerprint : obsoleteWeightsFiles) {
- this.documentStore.remove(DocumentType.OUT_WEIGHTS, fingerprint);
+ this.documentStore.remove(WeightsDocument.class, fingerprint);
}
}
}
diff --git a/src/org/torproject/onionoo/WeightsDocument.java b/src/org/torproject/onionoo/WeightsDocument.java
new file mode 100644
index 0000000..6739333
--- /dev/null
+++ b/src/org/torproject/onionoo/WeightsDocument.java
@@ -0,0 +1,31 @@
+package org.torproject.onionoo;
+
+import java.util.List;
+import java.util.Map;
+
+class WeightsDocument extends Document {
+
+ class WeightsHistory {
+ String first;
+ String last;
+ Integer interval;
+ Double factor;
+ Integer count;
+ List<Integer> values;
+ }
+
+ class NodeWeights {
+ String fingerprint;
+ Map<String, WeightsHistory> advertised_bandwidth_fraction;
+ Map<String, WeightsHistory> consensus_weight_fraction;
+ Map<String, WeightsHistory> guard_probability;
+ Map<String, WeightsHistory> middle_probability;
+ Map<String, WeightsHistory> exit_probability;
+ }
+
+ String relays_published;
+ List<NodeWeights> relays;
+ String bridges_published;
+ List<NodeWeights> bridges;
+}
+
diff --git a/src/org/torproject/onionoo/WeightsStatus.java b/src/org/torproject/onionoo/WeightsStatus.java
new file mode 100644
index 0000000..f8f78ad
--- /dev/null
+++ b/src/org/torproject/onionoo/WeightsStatus.java
@@ -0,0 +1,5 @@
+package org.torproject.onionoo;
+
+class WeightsStatus extends Document {
+}
+
1
0

[translation/vidalia_alpha_completed] Update translations for vidalia_alpha_completed
by translation@torproject.org 28 Jun '13
by translation@torproject.org 28 Jun '13
28 Jun '13
commit 93646075e5ce9a3d00a2fe6947198db12c8e11a9
Author: Translation commit bot <translation(a)torproject.org>
Date: Fri Jun 28 04:15:32 2013 +0000
Update translations for vidalia_alpha_completed
---
zh_CN/vidalia_zh_CN.po | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/zh_CN/vidalia_zh_CN.po b/zh_CN/vidalia_zh_CN.po
index d1b81f6..cda6085 100644
--- a/zh_CN/vidalia_zh_CN.po
+++ b/zh_CN/vidalia_zh_CN.po
@@ -9,7 +9,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2012-03-21 17:46+0000\n"
-"PO-Revision-Date: 2013-06-27 23:50+0000\n"
+"PO-Revision-Date: 2013-06-28 04:00+0000\n"
"Last-Translator: runasand <runa.sandvik(a)gmail.com>\n"
"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/torproject/language/zh_CN/)\n"
"MIME-Version: 1.0\n"
1
0

[translation/vidalia_alpha] Update translations for vidalia_alpha
by translation@torproject.org 28 Jun '13
by translation@torproject.org 28 Jun '13
28 Jun '13
commit c14d7bb9f7a65ed33423e5b1a02b5cac863cbd0d
Author: Translation commit bot <translation(a)torproject.org>
Date: Fri Jun 28 04:15:29 2013 +0000
Update translations for vidalia_alpha
---
zh_CN/vidalia_zh_CN.po | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/zh_CN/vidalia_zh_CN.po b/zh_CN/vidalia_zh_CN.po
index d1b81f6..cda6085 100644
--- a/zh_CN/vidalia_zh_CN.po
+++ b/zh_CN/vidalia_zh_CN.po
@@ -9,7 +9,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2012-03-21 17:46+0000\n"
-"PO-Revision-Date: 2013-06-27 23:50+0000\n"
+"PO-Revision-Date: 2013-06-28 04:00+0000\n"
"Last-Translator: runasand <runa.sandvik(a)gmail.com>\n"
"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/torproject/language/zh_CN/)\n"
"MIME-Version: 1.0\n"
1
0

[translation/vidalia_completed] Update translations for vidalia_completed
by translation@torproject.org 28 Jun '13
by translation@torproject.org 28 Jun '13
28 Jun '13
commit a3a51d2404485e847341371e9d62f8a3b7e32d60
Author: Translation commit bot <translation(a)torproject.org>
Date: Fri Jun 28 04:15:25 2013 +0000
Update translations for vidalia_completed
---
zh_CN/qt_zh_CN.po | 22 +++++++++++-----------
zh_CN/vidalia_zh_CN.po | 2 +-
2 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/zh_CN/qt_zh_CN.po b/zh_CN/qt_zh_CN.po
index 8439b01..50ef6f2 100644
--- a/zh_CN/qt_zh_CN.po
+++ b/zh_CN/qt_zh_CN.po
@@ -10,7 +10,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2008-08-20 03:25+0000\n"
-"PO-Revision-Date: 2013-06-28 03:42+0000\n"
+"PO-Revision-Date: 2013-06-28 04:10+0000\n"
"Last-Translator: simabull tsai\n"
"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/torproject/language/zh_CN/)\n"
"MIME-Version: 1.0\n"
@@ -93,7 +93,7 @@ msgstr "不保存"
#: qdialogbuttonbox.cpp:554
msgctxt "QDialogButtonBox"
msgid "Discard"
-msgstr "抛弃"
+msgstr "放弃"
#: qdialogbuttonbox.cpp:557
msgctxt "QDialogButtonBox"
@@ -103,7 +103,7 @@ msgstr "是(&Y)"
#: qdialogbuttonbox.cpp:560
msgctxt "QDialogButtonBox"
msgid "Yes to &All"
-msgstr "全部选是(&A)"
+msgstr "全是(&A)"
#: qdialogbuttonbox.cpp:563
msgctxt "QDialogButtonBox"
@@ -113,7 +113,7 @@ msgstr "否(&N)"
#: qdialogbuttonbox.cpp:566
msgctxt "QDialogButtonBox"
msgid "N&o to All"
-msgstr "全部选否(&O)"
+msgstr "全否(&O)"
#: qdialogbuttonbox.cpp:569
msgctxt "QDialogButtonBox"
@@ -138,12 +138,12 @@ msgstr "忽略"
#: qdialogbuttonbox.cpp:581
msgctxt "QDialogButtonBox"
msgid "Restore Defaults"
-msgstr "恢复默认"
+msgstr "恢复默认值"
#: qdialogbuttonbox.cpp:552
msgctxt "QDialogButtonBox"
msgid "Close without Saving"
-msgstr "不保存关闭"
+msgstr "关闭且不保存"
#: qdialogbuttonbox.cpp:525
msgctxt "QDialogButtonBox"
@@ -163,7 +163,7 @@ msgstr "大小"
#: qdirmodel.cpp:427
msgctxt "QDirModel"
msgid "Kind"
-msgstr "类型"
+msgstr "种类"
#: qdirmodel.cpp:429
msgctxt "QDirModel"
@@ -173,7 +173,7 @@ msgstr "类型"
#: qdirmodel.cpp:435
msgctxt "QDirModel"
msgid "Date Modified"
-msgstr "日期被修改"
+msgstr "已修改的日期"
#: qfiledialog_win.cpp:126
msgctxt "QFileDialog"
@@ -205,7 +205,7 @@ msgctxt "QFileDialog"
msgid ""
"%1 already exists.\n"
"Do you want to replace it?"
-msgstr "%1已经存在。你想要替换它么?"
+msgstr "%1 已存在。\n的否替换?"
#: qfiledialog.cpp:1690
msgctxt "QFileDialog"
@@ -213,12 +213,12 @@ msgid ""
"%1\n"
"File not found.\n"
"Please verify the correct file name was given."
-msgstr "文件 %1 没有找到。\n请核实已给定文件名的文件是否正确。"
+msgstr " %1 文件未找到。\n请确认指定文件名是否正确。"
#: qdirmodel.cpp:833
msgctxt "QFileDialog"
msgid "My Computer"
-msgstr "我的计算机"
+msgstr "我的电脑"
#: qfiledialog.cpp:462
msgctxt "QFileDialog"
diff --git a/zh_CN/vidalia_zh_CN.po b/zh_CN/vidalia_zh_CN.po
index 5c69e23..8e8c184 100644
--- a/zh_CN/vidalia_zh_CN.po
+++ b/zh_CN/vidalia_zh_CN.po
@@ -12,7 +12,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2012-03-21 17:52+0000\n"
-"PO-Revision-Date: 2013-06-27 23:50+0000\n"
+"PO-Revision-Date: 2013-06-28 04:00+0000\n"
"Last-Translator: simabull tsai\n"
"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/torproject/language/zh_CN/)\n"
"MIME-Version: 1.0\n"
1
0

28 Jun '13
commit 33818cc3d1bbd0bc1a7f325a27e9a57eda5afa3b
Author: Translation commit bot <translation(a)torproject.org>
Date: Fri Jun 28 04:15:19 2013 +0000
Update translations for vidalia
---
zh_CN/qt_zh_CN.po | 22 +++++++++++-----------
zh_CN/vidalia_zh_CN.po | 2 +-
2 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/zh_CN/qt_zh_CN.po b/zh_CN/qt_zh_CN.po
index 8439b01..50ef6f2 100644
--- a/zh_CN/qt_zh_CN.po
+++ b/zh_CN/qt_zh_CN.po
@@ -10,7 +10,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2008-08-20 03:25+0000\n"
-"PO-Revision-Date: 2013-06-28 03:42+0000\n"
+"PO-Revision-Date: 2013-06-28 04:10+0000\n"
"Last-Translator: simabull tsai\n"
"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/torproject/language/zh_CN/)\n"
"MIME-Version: 1.0\n"
@@ -93,7 +93,7 @@ msgstr "不保存"
#: qdialogbuttonbox.cpp:554
msgctxt "QDialogButtonBox"
msgid "Discard"
-msgstr "抛弃"
+msgstr "放弃"
#: qdialogbuttonbox.cpp:557
msgctxt "QDialogButtonBox"
@@ -103,7 +103,7 @@ msgstr "是(&Y)"
#: qdialogbuttonbox.cpp:560
msgctxt "QDialogButtonBox"
msgid "Yes to &All"
-msgstr "全部选是(&A)"
+msgstr "全是(&A)"
#: qdialogbuttonbox.cpp:563
msgctxt "QDialogButtonBox"
@@ -113,7 +113,7 @@ msgstr "否(&N)"
#: qdialogbuttonbox.cpp:566
msgctxt "QDialogButtonBox"
msgid "N&o to All"
-msgstr "全部选否(&O)"
+msgstr "全否(&O)"
#: qdialogbuttonbox.cpp:569
msgctxt "QDialogButtonBox"
@@ -138,12 +138,12 @@ msgstr "忽略"
#: qdialogbuttonbox.cpp:581
msgctxt "QDialogButtonBox"
msgid "Restore Defaults"
-msgstr "恢复默认"
+msgstr "恢复默认值"
#: qdialogbuttonbox.cpp:552
msgctxt "QDialogButtonBox"
msgid "Close without Saving"
-msgstr "不保存关闭"
+msgstr "关闭且不保存"
#: qdialogbuttonbox.cpp:525
msgctxt "QDialogButtonBox"
@@ -163,7 +163,7 @@ msgstr "大小"
#: qdirmodel.cpp:427
msgctxt "QDirModel"
msgid "Kind"
-msgstr "类型"
+msgstr "种类"
#: qdirmodel.cpp:429
msgctxt "QDirModel"
@@ -173,7 +173,7 @@ msgstr "类型"
#: qdirmodel.cpp:435
msgctxt "QDirModel"
msgid "Date Modified"
-msgstr "日期被修改"
+msgstr "已修改的日期"
#: qfiledialog_win.cpp:126
msgctxt "QFileDialog"
@@ -205,7 +205,7 @@ msgctxt "QFileDialog"
msgid ""
"%1 already exists.\n"
"Do you want to replace it?"
-msgstr "%1已经存在。你想要替换它么?"
+msgstr "%1 已存在。\n的否替换?"
#: qfiledialog.cpp:1690
msgctxt "QFileDialog"
@@ -213,12 +213,12 @@ msgid ""
"%1\n"
"File not found.\n"
"Please verify the correct file name was given."
-msgstr "文件 %1 没有找到。\n请核实已给定文件名的文件是否正确。"
+msgstr " %1 文件未找到。\n请确认指定文件名是否正确。"
#: qdirmodel.cpp:833
msgctxt "QFileDialog"
msgid "My Computer"
-msgstr "我的计算机"
+msgstr "我的电脑"
#: qfiledialog.cpp:462
msgctxt "QFileDialog"
diff --git a/zh_CN/vidalia_zh_CN.po b/zh_CN/vidalia_zh_CN.po
index 5c69e23..8e8c184 100644
--- a/zh_CN/vidalia_zh_CN.po
+++ b/zh_CN/vidalia_zh_CN.po
@@ -12,7 +12,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2012-03-21 17:52+0000\n"
-"PO-Revision-Date: 2013-06-27 23:50+0000\n"
+"PO-Revision-Date: 2013-06-28 04:00+0000\n"
"Last-Translator: simabull tsai\n"
"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/torproject/language/zh_CN/)\n"
"MIME-Version: 1.0\n"
1
0

[translation/vidalia_completed] Update translations for vidalia_completed
by translation@torproject.org 28 Jun '13
by translation@torproject.org 28 Jun '13
28 Jun '13
commit cb7f919daea5a3b33d35a08872b1d4fa1db007e6
Author: Translation commit bot <translation(a)torproject.org>
Date: Fri Jun 28 03:45:26 2013 +0000
Update translations for vidalia_completed
---
zh_CN/qt_zh_CN.po | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/zh_CN/qt_zh_CN.po b/zh_CN/qt_zh_CN.po
index 97379ed..8439b01 100644
--- a/zh_CN/qt_zh_CN.po
+++ b/zh_CN/qt_zh_CN.po
@@ -10,7 +10,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2008-08-20 03:25+0000\n"
-"PO-Revision-Date: 2013-06-28 02:15+0000\n"
+"PO-Revision-Date: 2013-06-28 03:42+0000\n"
"Last-Translator: simabull tsai\n"
"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/torproject/language/zh_CN/)\n"
"MIME-Version: 1.0\n"
1
0