tor-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
June 2013
- 19 participants
- 1571 discussions

17 Jun '13
commit 9b51f84e43d8e626c4c4368b4859e535abfebb0c
Author: Translation commit bot <translation(a)torproject.org>
Date: Mon Jun 17 11:45:20 2013 +0000
Update translations for vidalia
---
vi/vidalia_vi.po | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/vi/vidalia_vi.po b/vi/vidalia_vi.po
index 14ed338..24e72c6 100755
--- a/vi/vidalia_vi.po
+++ b/vi/vidalia_vi.po
@@ -6,7 +6,7 @@ msgstr ""
"Project-Id-Version: The Tor Project\n"
"Report-Msgid-Bugs-To: https://trac.torproject.org/projects/tor\n"
"POT-Creation-Date: 2012-03-21 17:52+0000\n"
-"PO-Revision-Date: 2013-05-30 23:40+0000\n"
+"PO-Revision-Date: 2013-06-17 11:30+0000\n"
"Last-Translator: runasand <runa.sandvik(a)gmail.com>\n"
"Language-Team: Vietnamese (http://www.transifex.com/projects/p/torproject/language/vi/)\n"
"MIME-Version: 1.0\n"
1
0

17 Jun '13
commit 162b4f2d0e77986239bcdcb09f33045caa4d57b7
Author: Translation commit bot <translation(a)torproject.org>
Date: Mon Jun 17 11:45:10 2013 +0000
Update translations for orbot
---
values-vi/strings.xml | 23 ++++++++++++++++++-----
1 file changed, 18 insertions(+), 5 deletions(-)
diff --git a/values-vi/strings.xml b/values-vi/strings.xml
index cde6ed6..ec50ff6 100644
--- a/values-vi/strings.xml
+++ b/values-vi/strings.xml
@@ -5,22 +5,35 @@
<string name="default_web_url">http://check.torproject.org</string>
<string name="secure_default_web_url">https://check.torproject.org</string>
<string name="tor_check_api_url">https://check.torproject.org/?TorButton=true</string>
- <string name="control_permission_label">bắt đầu và ngừng Tor</string>
- <string name="status_activated">Kết nối với mạng Tor</string>
- <string name="status_shutting_down">Orbot đang tắt</string>
+ <string name="control_permission_label">khởi động và ngừng Tor</string>
+ <string name="tor_proxy_service_process">dịch vụ tor proxy</string>
+ <string name="status_activated">Đã kết nối với mạng Tor</string>
+ <string name="status_shutting_down">Orbot đang được tắt</string>
<string name="tor_process_starting">Bắt đầu Tor</string>
- <string name="not_anonymous_yet">Chú ý: luồng thông tin của bạn không phải là ẩn danh! Hãy cấu hình các ứng dụng của bạn để sử dụng HTTP proxy 127.0.0.1:8118 hoặc SOCKS4A hoặc proxy SOCKS5 127.0.0.1:9050</string>
- <string name="menu_home">Trang chu</string>
+ <string name="not_anonymous_yet">Chú ý: luồng thông tin của bạn không phải là ẩn danh! Hãy cài đặt các ứng dụng của bạn để sử dụng HTTP proxy 127.0.0.1:8118 hoặc SOCKS4A hoặc proxy SOCKS5 127.0.0.1:9050</string>
+ <string name="menu_home">Trang chủ</string>
<string name="menu_browse">Trình duyệt</string>
<string name="menu_settings">Thiết lập</string>
+ <string name="menu_log">Đang nhập</string>
<string name="menu_info">Giúp đỡ</string>
+ <string name="menu_apps">Ứng dụng</string>
<string name="menu_start">Bắt đầu</string>
<string name="menu_stop">Ngừng</string>
<string name="menu_about">Về</string>
<string name="button_help">Giúp đỡ</string>
<string name="button_close">Đóng</string>
<string name="button_about">Giới thiệu</string>
+ <string name="menu_verify">Kiểm Tra</string>
<string name="menu_exit">Thoát</string>
+ <string name="press_to_start">- nhấn lâu để khởi động -</string>
+ <string name="pref_trans_proxy_summary">Tự động áp dụng Tor cho các ứng dụng</string>
+ <string name="pref_transparent_all_title">Áp dụng Tor cho tất cả</string>
+ <string name="pref_transparent_all_summary">Proxy luồng thông tin cho các ứng dụng qua Tor</string>
+ <string name="pref_transparent_port_fallback_title">Cổng Prox dự phòng</string>
+ <string name="pref_transparent_port_fallback_summary">Chú Ý: Vượt qua các cổng thông dụng (80, 443, vv). *CHỈ DÙNG* nếu chế độ \"Tất Cả\" hoặc \"Ứng Dụng\" không dùng được.</string>
+ <string name="pref_transparent_port_title">Danh sách cổng</string>
+ <string name="pref_transparent_port_summary">Liệt kê các cổng để proxy. *CHỈ DÙNG* nếu chế độ \"Tất Cả\" hoặc \"Ứng Dụng\" không dùng được.</string>
+ <string name="pref_transparent_port_dialog">Điền vào số cổng để proxy</string>
<string name="btn_back">Trở về</string>
<string name="btn_cancel">Hủy</string>
<!--Welcome Wizard strings (DJH)-->
1
0

16 Jun '13
commit 456bc64f2fbd41918dd2db711eaf259a90f200e3
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sun Jun 16 14:44:30 2013 -0700
FAQ entry about how to reload your torrc
---
docs/faq.rst | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/docs/faq.rst b/docs/faq.rst
index 905b889..999ec2a 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -13,6 +13,7 @@ Frequently Asked Questions
* :ref:`how_do_i_request_a_new_identity_from_tor`
* :ref:`how_do_i_get_information_about_my_exits`
+ * :ref:`how_do_i_reload_my_torrc`
* **Development**
@@ -160,6 +161,25 @@ To learn about the Tor relays you're presently using call :func:`~stem.control.C
nickname: chaoscomputerclub19
address: 31.172.30.2
+.. _how_do_i_reload_my_torrc:
+
+How do I reload my torrc?
+-------------------------
+
+Tor is configured through its `torrc <https://www.torproject.org/docs/faq.html.en#torrc>`_. When you edit this file you need to either restart Tor or issue a **SIGHUP** for the changes to be reflected. To issue a SIGHUP you can either...
+
+ * Run **pkill -sighup tor**.
+ * Send Tor a **SIGHUP** signal through its control port...
+
+::
+
+ from stem import Signal
+ from stem.control import Controller
+
+ with Controller.from_port(port = 9051) as controller:
+ controller.authenticate()
+ controller.signal(Signal.SIGHUP)
+
Development
===========
1
0

r26220: {website} Adding Kostas' GSoC application as an example Kostas copied (website/trunk/about/en)
by Damian Johnson 16 Jun '13
by Damian Johnson 16 Jun '13
16 Jun '13
Author: atagar
Date: 2013-06-16 21:31:19 +0000 (Sun, 16 Jun 2013)
New Revision: 26220
Modified:
website/trunk/about/en/gsoc.wml
Log:
Adding Kostas' GSoC application as an example
Kostas copied his application to his site and sent me his ok.
Modified: website/trunk/about/en/gsoc.wml
===================================================================
--- website/trunk/about/en/gsoc.wml 2013-06-16 21:27:17 UTC (rev 26219)
+++ website/trunk/about/en/gsoc.wml 2013-06-16 21:31:19 UTC (rev 26220)
@@ -214,6 +214,7 @@
</p>
<ul>
+ <li><h4><a href="http://kostas.mkj.lt/gsoc2013/gsoc2013.html">Searchable Tor descriptor archive</a> by Kostas Jakeliunas</h4></li>
<li><h4><a href="https://www.google-melange.com/gsoc/proposal/review/google/gsoc2013/weltrau…">Create an Internet Censorship Virtual Machine Based Simulator</a> by Johannes Fürmann</h4></li>
<li><h4><a href="../about/gsocProposal/gsoc12-proposal-stemImprovements.html">Stem Improvements and Arm port</a> by Ravi Padmala</h4></li>
<li><h4><a href="http://feroze.in/gsoc12.html">Implementing Hidden Service Configuration and Bandwidth Scheduling Plugins</a> by Feroze Naina</h4></li>
1
0

r26219: {website} Adding Johannes' GSoC application as an example Johannes mad (website/trunk/about/en)
by Damian Johnson 16 Jun '13
by Damian Johnson 16 Jun '13
16 Jun '13
Author: atagar
Date: 2013-06-16 21:27:17 +0000 (Sun, 16 Jun 2013)
New Revision: 26219
Modified:
website/trunk/about/en/gsoc.wml
Log:
Adding Johannes' GSoC application as an example
Johannes made his application public and sent me his ok.
Modified: website/trunk/about/en/gsoc.wml
===================================================================
--- website/trunk/about/en/gsoc.wml 2013-06-16 12:34:03 UTC (rev 26218)
+++ website/trunk/about/en/gsoc.wml 2013-06-16 21:27:17 UTC (rev 26219)
@@ -214,6 +214,7 @@
</p>
<ul>
+ <li><h4><a href="https://www.google-melange.com/gsoc/proposal/review/google/gsoc2013/weltrau…">Create an Internet Censorship Virtual Machine Based Simulator</a> by Johannes Fürmann</h4></li>
<li><h4><a href="../about/gsocProposal/gsoc12-proposal-stemImprovements.html">Stem Improvements and Arm port</a> by Ravi Padmala</h4></li>
<li><h4><a href="http://feroze.in/gsoc12.html">Implementing Hidden Service Configuration and Bandwidth Scheduling Plugins</a> by Feroze Naina</h4></li>
<li><h4><a href="../about/gsocProposal/gsoc10-proposal-soat.txt">SOAT Expansion</a> by John Schanck</h4></li>
1
0
commit d921dfdfe2f085cd7de986ed16956b2a7361e32a
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sun Jun 16 12:50:50 2013 -0700
Renaming FAQ categories
Trivial change to rename 'Usage' to 'General Information', and 'Tasks' to
'Usage'.
---
docs/faq.rst | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/docs/faq.rst b/docs/faq.rst
index b10e01a..905b889 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -1,7 +1,7 @@
Frequently Asked Questions
==========================
-* **Usage**
+* **General Information**
* :ref:`what_is_stem`
* :ref:`does_stem_have_any_dependencies`
@@ -9,7 +9,7 @@ Frequently Asked Questions
* :ref:`what_license_is_stem_under`
* :ref:`where_can_i_get_help`
-* **Tasks**
+* **Usage**
* :ref:`how_do_i_request_a_new_identity_from_tor`
* :ref:`how_do_i_get_information_about_my_exits`
@@ -21,8 +21,8 @@ Frequently Asked Questions
* :ref:`how_do_i_build_the_site`
* :ref:`what_is_the_copyright_for_patches`
-Usage
-=====
+General Information
+===================
.. _what_is_stem:
@@ -63,7 +63,7 @@ Where can I get help?
Do you have a tor related question or project that you would like to discuss? If so then find us on the `tor-dev@ email list <https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-dev>`_ and `IRC <https://www.torproject.org/about/contact.html.en#irc>`_.
-Tasks
+Usage
=====
.. _how_do_i_request_a_new_identity_from_tor:
@@ -112,7 +112,7 @@ For lower level control over Tor's circuits and path selection see the `client u
How do I get information about my exits?
----------------------------------------
-To learn about the Tor relays you're presently using call :func:`stem.control.Controller.get_circuits`. The last relay in the circuit's path is your exit...
+To learn about the Tor relays you're presently using call :func:`~stem.control.Controller.get_circuits`. The last relay in the circuit's path is your exit...
::
1
0
commit 2613c98f574d26d8386b198d406a8d6f52283492
Author: Roger Dingledine <arma(a)torproject.org>
Date: Sun Jun 16 15:12:23 2013 -0400
forward-port 0.2.4.13-alpha changelog
---
ChangeLog | 94 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 94 insertions(+)
diff --git a/ChangeLog b/ChangeLog
index 6528f7a..6513d0c 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,97 @@
+Changes in version 0.2.4.13-alpha - 2013-06-14
+ Tor 0.2.4.13-alpha fixes a variety of potential remote crash
+ vulnerabilities, makes socks5 username/password circuit isolation
+ actually actually work (this time for sure!), and cleans up a bunch
+ of other issues in preparation for a release candidate.
+
+ o Major bugfixes (robustness):
+ - Close any circuit that has too many cells queued on it. Fixes
+ bug 9063; bugfix on the 54th commit of Tor. This bug is a further
+ fix beyond bug 6252, whose fix was merged into 0.2.3.21-rc.
+ - Prevent the get_freelists() function from running off the end of
+ the list of freelists if it somehow gets an unrecognized
+ allocation. Fixes bug 8844; bugfix on 0.2.0.16-alpha. Reported by
+ eugenis.
+ - Avoid an assertion failure on OpenBSD (and perhaps other BSDs)
+ when an exit connection with optimistic data succeeds immediately
+ rather than returning EINPROGRESS. Fixes bug 9017; bugfix on
+ 0.2.3.1-alpha.
+ - Fix a directory authority crash bug when building a consensus
+ using an older consensus as its basis. Fixes bug 8833. Bugfix
+ on 0.2.4.12-alpha.
+
+ o Major bugfixes:
+ - Avoid a memory leak where we would leak a consensus body when we
+ find that a consensus which we couldn't previously verify due to
+ missing certificates is now verifiable. Fixes bug 8719; bugfix
+ on 0.2.0.10-alpha.
+ - We used to always request authority certificates by identity digest,
+ meaning we'd get the newest one even when we wanted one with a
+ different signing key. Then we would complain about being given
+ a certificate we already had, and never get the one we really
+ wanted. Now we use the "fp-sk/" resource as well as the "fp/"
+ resource to request the one we want. Fixes bug 5595; bugfix on
+ 0.2.0.8-alpha.
+ - Follow the socks5 protocol when offering username/password
+ authentication. The fix for bug 8117 exposed this bug, and it
+ turns out real-world applications like Pidgin do care. Bugfix on
+ 0.2.3.2-alpha; fixes bug 8879.
+ - Prevent failures on Windows Vista and later when rebuilding the
+ microdescriptor cache. Diagnosed by Robert Ransom. Fixes bug 8822;
+ bugfix on 0.2.4.12-alpha.
+
+ o Minor bugfixes:
+ - Fix an impossible buffer overrun in the AES unit tests. Fixes
+ bug 8845; bugfix on 0.2.0.7-alpha. Found by eugenis.
+ - If for some reason we fail to write a microdescriptor while
+ rebuilding the cache, do not let the annotations from that
+ microdescriptor linger in the cache file, and do not let the
+ microdescriptor stay recorded as present in its old location.
+ Fixes bug 9047; bugfix on 0.2.2.6-alpha.
+ - Fix a memory leak that would occur whenever a configuration
+ option changed. Fixes bug 8718; bugfix on 0.2.3.3-alpha.
+ - Paste the description for PathBias parameters from the man
+ page into or.h, so the code documents them too. Fixes bug 7982;
+ bugfix on 0.2.3.17-beta and 0.2.4.8-alpha.
+ - Relays now treat a changed IPv6 ORPort as sufficient reason to
+ publish an updated descriptor. Fixes bug 6026; bugfix on
+ 0.2.4.1-alpha.
+ - When launching a resolve request on behalf of an AF_UNIX control
+ socket, omit the address field of the new entry connection, used in
+ subsequent controller events, rather than letting tor_dup_addr()
+ set it to "<unknown address type>". Fixes bug 8639; bugfix on
+ 0.2.4.12-alpha.
+
+ o Minor bugfixes (log messages):
+ - Fix a scaling issue in the path bias accounting code that
+ resulted in "Bug:" log messages from either
+ pathbias_scale_close_rates() or pathbias_count_build_success().
+ This represents a bugfix on a previous bugfix: the original fix
+ attempted in 0.2.4.10-alpha was incomplete. Fixes bug 8235; bugfix
+ on 0.2.4.1-alpha.
+ - Give a less useless error message when the user asks for an IPv4
+ address on an IPv6-only port, or vice versa. Fixes bug 8846; bugfix
+ on 0.2.4.7-alpha.
+
+ o Minor features:
+ - Downgrade "unexpected SENDME" warnings to protocol-warn for 0.2.4.x,
+ to tolerate bug 8093 for now.
+ - Add an "ignoring-advertised-bws" boolean to the flag-threshold lines
+ in directory authority votes to describe whether they have enough
+ measured bandwidths to ignore advertised (relay descriptor)
+ bandwidth claims. Resolves ticket 8711.
+ - Update to the June 5 2013 Maxmind GeoLite Country database.
+
+ o Removed documentation:
+ - Remove some of the older contents of doc/ as obsolete; move others
+ to torspec.git. Fixes bug 8965.
+
+ o Code simplification and refactoring:
+ - Avoid using character buffers when constructing most directory
+ objects: this approach was unwieldy and error-prone. Instead,
+ build smartlists of strings, and concatenate them when done.
+
+
Changes in version 0.2.4.12-alpha - 2013-04-18
Tor 0.2.4.12-alpha moves Tor forward on several fronts: it starts the
process for lengthening the guard rotation period, makes directory
1
0

[onionoo/master] Add abstractions for descriptor parsing and document storage.
by karsten@torproject.org 16 Jun '13
by karsten@torproject.org 16 Jun '13
16 Jun '13
commit abd10ca8c755af2e8257b4232fb07ef076f858c2
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Sat Jun 15 15:11:59 2013 +0200
Add abstractions for descriptor parsing and document storage.
- Add new DescriptorSource class that abstracts away details of
configuring metrics-lib's DescriptorReader and handling parse
histories. This moves descriptor file paths to a single place and
makes it easier to test classes that process descriptors.
- Add new DocumentStore class that abstracts away storing,
retrieving, listing, and deleting files produced as output of
processing descriptors. Allows for easier testing of classes that
store results from parsing descriptors, and prepares exchanging
file system storage with a database.
- Write parse history file to disk after all new descriptors are
processed, rather than have the descriptor reader do that after
providing the last descriptor. Avoids edge cases where processing
breaks and we don't re-process descriptors in the next execution
because we think we already processed them last time.
- When overwriting files, write contents to temporary files, delete
original files, and rename written temporary file to original file.
Minimizes effects of concurrent executions changing the same set of
files.
- Print out statistics of parsed descriptors and document store
activity. Can help figuring out performance bottlenecks in the
future.
---
.../torproject/onionoo/BandwidthDataWriter.java | 205 ++++------
src/org/torproject/onionoo/CurrentNodes.java | 336 +++++++--------
src/org/torproject/onionoo/DescriptorSource.java | 304 ++++++++++++++
src/org/torproject/onionoo/DetailDataWriter.java | 431 ++++++++------------
src/org/torproject/onionoo/DocumentStore.java | 264 ++++++++++++
src/org/torproject/onionoo/Main.java | 62 ++-
src/org/torproject/onionoo/ResourceServlet.java | 127 +++---
src/org/torproject/onionoo/WeightsDataWriter.java | 203 ++++-----
.../torproject/onionoo/ResourceServletTest.java | 7 +-
9 files changed, 1183 insertions(+), 756 deletions(-)
diff --git a/src/org/torproject/onionoo/BandwidthDataWriter.java b/src/org/torproject/onionoo/BandwidthDataWriter.java
index af0fc61..664c050 100644
--- a/src/org/torproject/onionoo/BandwidthDataWriter.java
+++ b/src/org/torproject/onionoo/BandwidthDataWriter.java
@@ -2,18 +2,12 @@
* See LICENSE for licensing information */
package org.torproject.onionoo;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
-import java.util.Iterator;
import java.util.List;
import java.util.Locale;
+import java.util.Scanner;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TimeZone;
@@ -21,9 +15,6 @@ import java.util.TreeMap;
import java.util.TreeSet;
import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.ExtraInfoDescriptor;
/* Write bandwidth data files to disk and delete bandwidth files of relays
@@ -47,6 +38,16 @@ import org.torproject.descriptor.ExtraInfoDescriptor;
* work around. */
public class BandwidthDataWriter {
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ public BandwidthDataWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ }
+
private SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
public BandwidthDataWriter() {
@@ -63,27 +64,17 @@ public class BandwidthDataWriter {
}
public void readExtraInfoDescriptors() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File("in/relay-descriptors/extra-infos"));
- reader.addDirectory(new File("in/bridge-descriptors/extra-infos"));
- reader.setExcludeFiles(new File("status/extrainfo-history"));
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof ExtraInfoDescriptor) {
- ExtraInfoDescriptor extraInfoDescriptor =
- (ExtraInfoDescriptor) descriptor;
- this.parseDescriptor(extraInfoDescriptor);
- }
- }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ new DescriptorType[] { DescriptorType.RELAY_EXTRA_INFOS,
+ DescriptorType.BRIDGE_EXTRA_INFOS },
+ DescriptorHistory.EXTRAINFO_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ExtraInfoDescriptor) {
+ ExtraInfoDescriptor extraInfoDescriptor =
+ (ExtraInfoDescriptor) descriptor;
+ this.parseDescriptor(extraInfoDescriptor);
}
}
}
@@ -140,49 +131,46 @@ public class BandwidthDataWriter {
private void readHistoryFromDisk(String fingerprint,
SortedMap<Long, long[]> writeHistory,
SortedMap<Long, long[]> readHistory) {
- File historyFile = new File(String.format("status/bandwidth/%s/%s/%s",
- fingerprint.substring(0, 1), fingerprint.substring(1, 2),
- fingerprint));
- if (historyFile.exists()) {
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- historyFile));
- String line;
- while ((line = br.readLine()) != null) {
- String[] parts = line.split(" ");
- if (parts.length != 6) {
- System.err.println("Illegal line '" + line + "' in history "
- + "file '" + historyFile.getAbsolutePath()
- + "'. Skipping this line.");
- continue;
- }
- SortedMap<Long, long[]> history = parts[0].equals("r")
- ? readHistory : writeHistory;
- long startMillis = this.dateTimeFormat.parse(parts[1] + " "
- + parts[2]).getTime();
- long endMillis = this.dateTimeFormat.parse(parts[3] + " "
- + parts[4]).getTime();
- long bandwidth = Long.parseLong(parts[5]);
- long previousEndMillis = history.headMap(startMillis).isEmpty()
- ? startMillis
- : history.get(history.headMap(startMillis).lastKey())[1];
- long nextStartMillis = history.tailMap(startMillis).isEmpty()
- ? endMillis : history.tailMap(startMillis).firstKey();
- if (previousEndMillis <= startMillis &&
- nextStartMillis >= endMillis) {
- history.put(startMillis, new long[] { startMillis, endMillis,
- bandwidth });
- }
+ String historyString = this.documentStore.retrieve(
+ DocumentType.STATUS_BANDWIDTH, fingerprint);
+ if (historyString == null) {
+ return;
+ }
+ try {
+ Scanner s = new Scanner(historyString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ String[] parts = line.split(" ");
+ if (parts.length != 6) {
+ System.err.println("Illegal line '" + line + "' in bandwidth "
+ + "history for fingerprint '" + fingerprint + "'. "
+ + "Skipping this line.");
+ continue;
+ }
+ SortedMap<Long, long[]> history = parts[0].equals("r")
+ ? readHistory : writeHistory;
+ long startMillis = this.dateTimeFormat.parse(parts[1] + " "
+ + parts[2]).getTime();
+ long endMillis = this.dateTimeFormat.parse(parts[3] + " "
+ + parts[4]).getTime();
+ long bandwidth = Long.parseLong(parts[5]);
+ long previousEndMillis = history.headMap(startMillis).isEmpty()
+ ? startMillis
+ : history.get(history.headMap(startMillis).lastKey())[1];
+ long nextStartMillis = history.tailMap(startMillis).isEmpty()
+ ? endMillis : history.tailMap(startMillis).firstKey();
+ if (previousEndMillis <= startMillis &&
+ nextStartMillis >= endMillis) {
+ history.put(startMillis, new long[] { startMillis, endMillis,
+ bandwidth });
}
- br.close();
- } catch (ParseException e) {
- System.err.println("Could not parse timestamp while reading "
- + "history file '" + historyFile.getAbsolutePath()
- + "'. Skipping.");
- } catch (IOException e) {
- System.err.println("Could not read history file '"
- + historyFile.getAbsolutePath() + "'. Skipping.");
}
+ s.close();
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp while reading "
+ + "bandwidth history for fingerprint '" + fingerprint + "'. "
+ + "Skipping.");
+ e.printStackTrace();
}
}
@@ -233,30 +221,22 @@ public class BandwidthDataWriter {
private void writeHistoryToDisk(String fingerprint,
SortedMap<Long, long[]> writeHistory,
SortedMap<Long, long[]> readHistory) {
- File historyFile = new File(String.format("status/bandwidth/%s/%s/%s",
- fingerprint.substring(0, 1), fingerprint.substring(1, 2),
- fingerprint));
- try {
- historyFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(historyFile));
- for (long[] v : writeHistory.values()) {
- bw.write("w " + this.dateTimeFormat.format(v[0]) + " "
- + this.dateTimeFormat.format(v[1]) + " "
- + String.valueOf(v[2]) + "\n");
- }
- for (long[] v : readHistory.values()) {
- bw.write("r " + this.dateTimeFormat.format(v[0]) + " "
- + this.dateTimeFormat.format(v[1]) + " "
- + String.valueOf(v[2]) + "\n");
- }
- bw.close();
- } catch (IOException e) {
- System.err.println("Could not write history file '"
- + historyFile.getAbsolutePath() + "'. Skipping.");
+ StringBuilder sb = new StringBuilder();
+ for (long[] v : writeHistory.values()) {
+ sb.append("w " + this.dateTimeFormat.format(v[0]) + " "
+ + this.dateTimeFormat.format(v[1]) + " "
+ + String.valueOf(v[2]) + "\n");
+ }
+ for (long[] v : readHistory.values()) {
+ sb.append("r " + this.dateTimeFormat.format(v[0]) + " "
+ + this.dateTimeFormat.format(v[1]) + " "
+ + String.valueOf(v[2]) + "\n");
}
+ String historyString = sb.toString();
+ this.documentStore.store(historyString, DocumentType.STATUS_BANDWIDTH,
+ fingerprint);
}
- private File bandwidthFileDirectory = new File("out/bandwidth");
private void writeBandwidthDataFileToDisk(String fingerprint,
SortedMap<Long, long[]> writeHistory,
SortedMap<Long, long[]> readHistory) {
@@ -269,19 +249,13 @@ public class BandwidthDataWriter {
}
String writeHistoryString = formatHistoryString(writeHistory);
String readHistoryString = formatHistoryString(readHistory);
- File bandwidthFile = new File("out/bandwidth", fingerprint);
- try {
- bandwidthFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- bandwidthFile));
- bw.write("{\"fingerprint\":\"" + fingerprint + "\",\n"
- + "\"write_history\":{\n" + writeHistoryString + "},\n"
- + "\"read_history\":{\n" + readHistoryString + "}}\n");
- bw.close();
- } catch (IOException e) {
- System.err.println("Could not write bandwidth data file '"
- + bandwidthFile.getAbsolutePath() + "'. Skipping.");
- }
+ StringBuilder sb = new StringBuilder();
+ sb.append("{\"fingerprint\":\"" + fingerprint + "\",\n"
+ + "\"write_history\":{\n" + writeHistoryString + "},\n"
+ + "\"read_history\":{\n" + readHistoryString + "}}\n");
+ String historyString = sb.toString();
+ this.documentStore.store(historyString, DocumentType.OUT_BANDWIDTH,
+ fingerprint);
}
private String[] graphNames = new String[] {
@@ -402,23 +376,16 @@ public class BandwidthDataWriter {
}
public void deleteObsoleteBandwidthFiles() {
- SortedMap<String, File> obsoleteBandwidthFiles =
- new TreeMap<String, File>();
- if (bandwidthFileDirectory.exists() &&
- bandwidthFileDirectory.isDirectory()) {
- for (File file : bandwidthFileDirectory.listFiles()) {
- if (file.getName().length() == 40) {
- obsoleteBandwidthFiles.put(file.getName(), file);
- }
- }
- }
+ SortedSet<String> obsoleteBandwidthFiles = this.documentStore.list(
+ DocumentType.OUT_BANDWIDTH);
for (String fingerprint : this.currentFingerprints) {
- if (obsoleteBandwidthFiles.containsKey(fingerprint)) {
+ if (obsoleteBandwidthFiles.contains(fingerprint)) {
obsoleteBandwidthFiles.remove(fingerprint);
}
}
- for (File bandwidthFile : obsoleteBandwidthFiles.values()) {
- bandwidthFile.delete();
+ for (String fingerprint : obsoleteBandwidthFiles) {
+ this.documentStore.remove(DocumentType.OUT_BANDWIDTH,
+ fingerprint);
}
}
}
diff --git a/src/org/torproject/onionoo/CurrentNodes.java b/src/org/torproject/onionoo/CurrentNodes.java
index 32d6576..64db6a1 100644
--- a/src/org/torproject/onionoo/CurrentNodes.java
+++ b/src/org/torproject/onionoo/CurrentNodes.java
@@ -3,10 +3,8 @@
package org.torproject.onionoo;
import java.io.BufferedReader;
-import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
-import java.io.FileWriter;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@@ -15,8 +13,8 @@ import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.Map;
+import java.util.Scanner;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
@@ -27,9 +25,6 @@ import java.util.regex.Pattern;
import org.torproject.descriptor.BridgeNetworkStatus;
import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
@@ -37,23 +32,44 @@ import org.torproject.descriptor.RelayNetworkStatusConsensus;
* days. */
public class CurrentNodes {
- /* Read the internal relay search data file from disk. */
- public void readRelaySearchDataFile(File summaryFile) {
- if (summaryFile.exists() && !summaryFile.isDirectory()) {
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- summaryFile));
- String line;
- while ((line = br.readLine()) != null) {
- this.parseSummaryFileLine(line);
- }
- br.close();
- } catch (IOException e) {
- System.err.println("I/O error while reading "
- + summaryFile.getAbsolutePath() + ": " + e.getMessage()
- + ". Ignoring.");
- }
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ /* Initialize an instance for the back-end that is read-only and doesn't
+ * support parsing new descriptor contents. */
+ public CurrentNodes(DocumentStore documentStore) {
+ this(null, documentStore);
+ }
+
+ public CurrentNodes(DescriptorSource descriptorSource,
+ DocumentStore documentStore) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ }
+
+ public void readStatusSummary() {
+ String summaryString = this.documentStore.retrieve(
+ DocumentType.STATUS_SUMMARY);
+ this.initializeFromSummaryString(summaryString);
+ }
+
+ public void readOutSummary() {
+ String summaryString = this.documentStore.retrieve(
+ DocumentType.OUT_SUMMARY);
+ this.initializeFromSummaryString(summaryString);
+ }
+
+ private void initializeFromSummaryString(String summaryString) {
+ if (summaryString == null) {
+ return;
+ }
+ Scanner s = new Scanner(summaryString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ this.parseSummaryFileLine(line);
}
+ s.close();
}
private void parseSummaryFileLine(String line) {
@@ -169,137 +185,127 @@ public class CurrentNodes {
}
}
- /* Write the internal relay search data file to disk. */
- public void writeRelaySearchDataFile(File summaryFile,
- boolean includeOldNodes) {
- try {
- summaryFile.getParentFile().mkdirs();
- File summaryTempFile = new File(
- summaryFile.getAbsolutePath() + ".tmp");
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- summaryTempFile));
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- Collection<Node> relays = includeOldNodes
- ? this.knownRelays.values() : this.getCurrentRelays().values();
- for (Node entry : relays) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String address = entry.getAddress();
- StringBuilder addressesBuilder = new StringBuilder();
- addressesBuilder.append(address + ";");
- int written = 0;
- for (String orAddressAndPort : entry.getOrAddressesAndPorts()) {
- addressesBuilder.append((written++ > 0 ? "+" : "") +
- orAddressAndPort);
- }
- addressesBuilder.append(";");
- written = 0;
- for (String exitAddress : entry.getExitAddresses()) {
- addressesBuilder.append((written++ > 0 ? "+" : "")
- + exitAddress);
- }
- String lastSeen = dateTimeFormat.format(
- entry.getLastSeenMillis());
- String orPort = String.valueOf(entry.getOrPort());
- String dirPort = String.valueOf(entry.getDirPort());
- StringBuilder flagsBuilder = new StringBuilder();
- written = 0;
- for (String relayFlag : entry.getRelayFlags()) {
- flagsBuilder.append((written++ > 0 ? "," : "") + relayFlag);
- }
- String consensusWeight = String.valueOf(
- entry.getConsensusWeight());
- String countryCode = entry.getCountryCode() != null
- ? entry.getCountryCode() : "??";
- String hostName = entry.getHostName() != null
- ? entry.getHostName() : "null";
- long lastRdnsLookup = entry.getLastRdnsLookup();
- String defaultPolicy = entry.getDefaultPolicy() != null
- ? entry.getDefaultPolicy() : "null";
- String portList = entry.getPortList() != null
- ? entry.getPortList() : "null";
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- String lastChangedAddresses = dateTimeFormat.format(
- entry.getLastChangedOrAddress());
- String aSNumber = entry.getASNumber() != null
- ? entry.getASNumber() : "null";
- bw.write("r " + nickname + " " + fingerprint + " "
- + addressesBuilder.toString() + " " + lastSeen + " "
- + orPort + " " + dirPort + " " + flagsBuilder.toString() + " "
- + consensusWeight + " " + countryCode + " " + hostName + " "
- + String.valueOf(lastRdnsLookup) + " " + defaultPolicy + " "
- + portList + " " + firstSeen + " " + lastChangedAddresses
- + " " + aSNumber + "\n");
- }
- Collection<Node> bridges = includeOldNodes
- ? this.knownBridges.values()
- : this.getCurrentBridges().values();
- for (Node entry : bridges) {
- String nickname = entry.getNickname();
- String fingerprint = entry.getFingerprint();
- String published = dateTimeFormat.format(
- entry.getLastSeenMillis());
- String address = entry.getAddress();
- StringBuilder addressesBuilder = new StringBuilder();
- addressesBuilder.append(address + ";");
- int written = 0;
- for (String orAddressAndPort : entry.getOrAddressesAndPorts()) {
- addressesBuilder.append((written++ > 0 ? "+" : "") +
- orAddressAndPort);
- }
- addressesBuilder.append(";");
- String orPort = String.valueOf(entry.getOrPort());
- String dirPort = String.valueOf(entry.getDirPort());
- StringBuilder flagsBuilder = new StringBuilder();
- written = 0;
- for (String relayFlag : entry.getRelayFlags()) {
- flagsBuilder.append((written++ > 0 ? "," : "") + relayFlag);
- }
- String firstSeen = dateTimeFormat.format(
- entry.getFirstSeenMillis());
- bw.write("b " + nickname + " " + fingerprint + " "
- + addressesBuilder.toString() + " " + published + " " + orPort
- + " " + dirPort + " " + flagsBuilder.toString()
- + " -1 ?? null -1 null null " + firstSeen + " null null "
- + "null\n");
- }
- bw.close();
- summaryFile.delete();
- summaryTempFile.renameTo(summaryFile);
- } catch (IOException e) {
- System.err.println("Could not write '"
- + summaryFile.getAbsolutePath() + "' to disk. Exiting.");
- e.printStackTrace();
- System.exit(1);
- }
+ public void writeStatusSummary() {
+ String summaryString = this.writeSummaryString(true);
+ this.documentStore.store(summaryString, DocumentType.STATUS_SUMMARY);
+ }
+
+ public void writeOutSummary() {
+ String summaryString = this.writeSummaryString(false);
+ this.documentStore.store(summaryString, DocumentType.OUT_SUMMARY);
+ this.documentStore.store(String.valueOf(System.currentTimeMillis()),
+ DocumentType.OUT_UPDATE);
+ }
+
+ /* Write internal relay search data to a string. */
+ private String writeSummaryString(boolean includeOldNodes) {
+ StringBuilder sb = new StringBuilder();
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ Collection<Node> relays = includeOldNodes
+ ? this.knownRelays.values() : this.getCurrentRelays().values();
+ for (Node entry : relays) {
+ String nickname = entry.getNickname();
+ String fingerprint = entry.getFingerprint();
+ String address = entry.getAddress();
+ StringBuilder addressesBuilder = new StringBuilder();
+ addressesBuilder.append(address + ";");
+ int written = 0;
+ for (String orAddressAndPort : entry.getOrAddressesAndPorts()) {
+ addressesBuilder.append((written++ > 0 ? "+" : "") +
+ orAddressAndPort);
+ }
+ addressesBuilder.append(";");
+ written = 0;
+ for (String exitAddress : entry.getExitAddresses()) {
+ addressesBuilder.append((written++ > 0 ? "+" : "")
+ + exitAddress);
+ }
+ String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis());
+ String orPort = String.valueOf(entry.getOrPort());
+ String dirPort = String.valueOf(entry.getDirPort());
+ StringBuilder flagsBuilder = new StringBuilder();
+ written = 0;
+ for (String relayFlag : entry.getRelayFlags()) {
+ flagsBuilder.append((written++ > 0 ? "," : "") + relayFlag);
+ }
+ String consensusWeight = String.valueOf(entry.getConsensusWeight());
+ String countryCode = entry.getCountryCode() != null
+ ? entry.getCountryCode() : "??";
+ String hostName = entry.getHostName() != null
+ ? entry.getHostName() : "null";
+ long lastRdnsLookup = entry.getLastRdnsLookup();
+ String defaultPolicy = entry.getDefaultPolicy() != null
+ ? entry.getDefaultPolicy() : "null";
+ String portList = entry.getPortList() != null
+ ? entry.getPortList() : "null";
+ String firstSeen = dateTimeFormat.format(
+ entry.getFirstSeenMillis());
+ String lastChangedAddresses = dateTimeFormat.format(
+ entry.getLastChangedOrAddress());
+ String aSNumber = entry.getASNumber() != null
+ ? entry.getASNumber() : "null";
+ sb.append("r " + nickname + " " + fingerprint + " "
+ + addressesBuilder.toString() + " " + lastSeen + " "
+ + orPort + " " + dirPort + " " + flagsBuilder.toString() + " "
+ + consensusWeight + " " + countryCode + " " + hostName + " "
+ + String.valueOf(lastRdnsLookup) + " " + defaultPolicy + " "
+ + portList + " " + firstSeen + " " + lastChangedAddresses
+ + " " + aSNumber + "\n");
+ }
+ Collection<Node> bridges = includeOldNodes
+ ? this.knownBridges.values() : this.getCurrentBridges().values();
+ for (Node entry : bridges) {
+ String nickname = entry.getNickname();
+ String fingerprint = entry.getFingerprint();
+ String published = dateTimeFormat.format(
+ entry.getLastSeenMillis());
+ String address = entry.getAddress();
+ StringBuilder addressesBuilder = new StringBuilder();
+ addressesBuilder.append(address + ";");
+ int written = 0;
+ for (String orAddressAndPort : entry.getOrAddressesAndPorts()) {
+ addressesBuilder.append((written++ > 0 ? "+" : "") +
+ orAddressAndPort);
+ }
+ addressesBuilder.append(";");
+ String orPort = String.valueOf(entry.getOrPort());
+ String dirPort = String.valueOf(entry.getDirPort());
+ StringBuilder flagsBuilder = new StringBuilder();
+ written = 0;
+ for (String relayFlag : entry.getRelayFlags()) {
+ flagsBuilder.append((written++ > 0 ? "," : "") + relayFlag);
+ }
+ String firstSeen = dateTimeFormat.format(
+ entry.getFirstSeenMillis());
+ sb.append("b " + nickname + " " + fingerprint + " "
+ + addressesBuilder.toString() + " " + published + " " + orPort
+ + " " + dirPort + " " + flagsBuilder.toString()
+ + " -1 ?? null -1 null null " + firstSeen + " null null "
+ + "null\n");
+ }
+ return sb.toString();
}
private long lastValidAfterMillis = 0L;
private long lastPublishedMillis = 0L;
public void readRelayNetworkConsensuses() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File("in/relay-descriptors/consensuses"));
- reader.setExcludeFiles(new File("status/relay-consensus-history"));
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof RelayNetworkStatusConsensus) {
- updateRelayNetworkStatusConsensus(
- (RelayNetworkStatusConsensus) descriptor);
- }
- }
+ if (this.descriptorSource == null) {
+ System.err.println("Not configured to read relay network "
+ + "consensuses.");
+ return;
+ }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.RELAY_CONSENSUSES,
+ DescriptorHistory.RELAY_CONSENSUS_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof RelayNetworkStatusConsensus) {
+ updateRelayNetworkStatusConsensus(
+ (RelayNetworkStatusConsensus) descriptor);
}
}
}
@@ -407,6 +413,11 @@ public class CurrentNodes {
public void lookUpCitiesAndASes() {
/* Make sure we have all required .csv files. */
+ // TODO Make paths configurable or allow passing file contents as
+ // strings in order to facilitate testing.
+ // TODO Move look-up code to new LookupService class that is
+ // initialized with geoip files, receives a sorted set of addresses,
+ // performs lookups, and returns results to CurrentNodes.
File[] geoLiteCityBlocksCsvFiles = new File[] {
new File("geoip/Manual-GeoLiteCity-Blocks.csv"),
new File("geoip/Automatic-GeoLiteCity-Blocks.csv"),
@@ -744,24 +755,19 @@ public class CurrentNodes {
}
public void readBridgeNetworkStatuses() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File("in/bridge-descriptors/statuses"));
- reader.setExcludeFiles(new File("status/bridge-status-history"));
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof BridgeNetworkStatus) {
- updateBridgeNetworkStatus((BridgeNetworkStatus) descriptor);
- }
- }
+ if (this.descriptorSource == null) {
+ System.err.println("Not configured to read bridge network "
+ + "statuses.");
+ return;
+ }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.BRIDGE_STATUSES,
+ DescriptorHistory.BRIDGE_STATUS_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof BridgeNetworkStatus) {
+ updateBridgeNetworkStatus((BridgeNetworkStatus) descriptor);
}
}
}
diff --git a/src/org/torproject/onionoo/DescriptorSource.java b/src/org/torproject/onionoo/DescriptorSource.java
new file mode 100644
index 0000000..5936a93
--- /dev/null
+++ b/src/org/torproject/onionoo/DescriptorSource.java
@@ -0,0 +1,304 @@
+package org.torproject.onionoo;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import org.torproject.descriptor.Descriptor;
+import org.torproject.descriptor.DescriptorFile;
+import org.torproject.descriptor.DescriptorReader;
+import org.torproject.descriptor.DescriptorSourceFactory;
+
+enum DescriptorType {
+ RELAY_CONSENSUSES,
+ RELAY_SERVER_DESCRIPTORS,
+ RELAY_EXTRA_INFOS,
+ BRIDGE_STATUSES,
+ BRIDGE_SERVER_DESCRIPTORS,
+ BRIDGE_EXTRA_INFOS,
+ BRIDGE_POOL_ASSIGNMENTS,
+ EXIT_LISTS,
+}
+
+enum DescriptorHistory {
+ EXTRAINFO_HISTORY,
+ EXIT_LIST_HISTORY,
+ BRIDGE_POOLASSIGN_HISTORY,
+ WEIGHTS_RELAY_CONSENSUS_HISTORY,
+ RELAY_CONSENSUS_HISTORY,
+ BRIDGE_STATUS_HISTORY,
+}
+
+class DescriptorQueue {
+
+ private File inDir;
+
+ private File statusDir;
+
+ private DescriptorReader descriptorReader;
+
+ private File historyFile;
+
+ private Iterator<DescriptorFile> descriptorFiles;
+
+ private List<Descriptor> descriptors;
+
+ int historySizeBefore;
+
+ int historySizeAfter;
+
+ long returnedDescriptors = 0L;
+
+ long returnedBytes = 0L;
+
+ public DescriptorQueue(File inDir, File statusDir) {
+ this.inDir = inDir;
+ this.statusDir = statusDir;
+ this.descriptorReader =
+ DescriptorSourceFactory.createDescriptorReader();
+ }
+
+ public void addDirectory(DescriptorType descriptorType) {
+ String directoryName = null;
+ switch (descriptorType) {
+ case RELAY_CONSENSUSES:
+ directoryName = "relay-descriptors/consensuses";
+ break;
+ case RELAY_SERVER_DESCRIPTORS:
+ directoryName = "relay-descriptors/server-descriptors";
+ break;
+ case RELAY_EXTRA_INFOS:
+ directoryName = "relay-descriptors/extra-infos";
+ break;
+ case BRIDGE_STATUSES:
+ directoryName = "bridge-descriptors/statuses";
+ break;
+ case BRIDGE_SERVER_DESCRIPTORS:
+ directoryName = "bridge-descriptors/server-descriptors";
+ break;
+ case BRIDGE_EXTRA_INFOS:
+ directoryName = "bridge-descriptors/extra-infos";
+ break;
+ case BRIDGE_POOL_ASSIGNMENTS:
+ directoryName = "bridge-pool-assignments";
+ break;
+ case EXIT_LISTS:
+ directoryName = "exit-lists";
+ break;
+ default:
+ System.err.println("Unknown descriptor type. Not adding directory "
+ + "to descriptor reader.");
+ return;
+ }
+ this.descriptorReader.addDirectory(new File(this.inDir,
+ directoryName));
+ }
+
+ public void readHistoryFile(DescriptorHistory descriptorHistory) {
+ String historyFileName = null;
+ switch (descriptorHistory) {
+ case EXTRAINFO_HISTORY:
+ historyFileName = "extrainfo-history";
+ break;
+ case EXIT_LIST_HISTORY:
+ historyFileName = "exit-list-history";
+ break;
+ case BRIDGE_POOLASSIGN_HISTORY:
+ historyFileName = "bridge-poolassign-history";
+ break;
+ case WEIGHTS_RELAY_CONSENSUS_HISTORY:
+ historyFileName = "weights-relay-consensus-history";
+ break;
+ case RELAY_CONSENSUS_HISTORY:
+ historyFileName = "relay-consensus-history";
+ break;
+ case BRIDGE_STATUS_HISTORY:
+ historyFileName = "bridge-status-history";
+ break;
+ default:
+ System.err.println("Unknown descriptor history. Not excluding "
+ + "files.");
+ return;
+ }
+ this.historyFile = new File(this.statusDir, historyFileName);
+ if (this.historyFile.exists()) {
+ SortedMap<String, Long> excludedFiles = new TreeMap<String, Long>();
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(
+ this.historyFile));
+ String line;
+ while ((line = br.readLine()) != null) {
+ try {
+ String[] parts = line.split(" ", 2);
+ excludedFiles.put(parts[1], Long.parseLong(parts[0]));
+ } catch (NumberFormatException e) {
+ System.err.println("Illegal line '" + line + "' in parse "
+ + "history. Skipping line.");
+ }
+ }
+ br.close();
+ } catch (IOException e) {
+ System.err.println("Could not read history file '"
+ + this.historyFile.getAbsolutePath() + "'. Not excluding "
+ + "descriptors in this execution.");
+ e.printStackTrace();
+ return;
+ }
+ this.historySizeBefore = excludedFiles.size();
+ this.descriptorReader.setExcludedFiles(excludedFiles);
+ }
+ }
+
+ public void writeHistoryFile() {
+ if (this.historyFile == null) {
+ return;
+ }
+ SortedMap<String, Long> excludedAndParsedFiles =
+ new TreeMap<String, Long>();
+ excludedAndParsedFiles.putAll(
+ this.descriptorReader.getExcludedFiles());
+ excludedAndParsedFiles.putAll(this.descriptorReader.getParsedFiles());
+ this.historySizeAfter = excludedAndParsedFiles.size();
+ try {
+ this.historyFile.getParentFile().mkdirs();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ this.historyFile));
+ for (Map.Entry<String, Long> e : excludedAndParsedFiles.entrySet()) {
+ String absolutePath = e.getKey();
+ long lastModifiedMillis = e.getValue();
+ bw.write(String.valueOf(lastModifiedMillis) + " " + absolutePath
+ + "\n");
+ }
+ bw.close();
+ } catch (IOException e) {
+ System.err.println("Could not write history file '"
+ + this.historyFile.getAbsolutePath() + "'. Not excluding "
+ + "descriptors in next execution.");
+ return;
+ }
+ }
+
+ public Descriptor nextDescriptor() {
+ Descriptor nextDescriptor = null;
+ if (this.descriptorFiles == null) {
+ this.descriptorFiles = this.descriptorReader.readDescriptors();
+ }
+ while (this.descriptors == null && this.descriptorFiles.hasNext()) {
+ DescriptorFile descriptorFile = this.descriptorFiles.next();
+ if (descriptorFile.getException() != null) {
+ System.err.println("Could not parse "
+ + descriptorFile.getFileName());
+ descriptorFile.getException().printStackTrace();
+ }
+ if (descriptorFile.getDescriptors() != null &&
+ !descriptorFile.getDescriptors().isEmpty()) {
+ this.descriptors = descriptorFile.getDescriptors();
+ }
+ }
+ if (this.descriptors != null) {
+ nextDescriptor = this.descriptors.remove(0);
+ this.returnedDescriptors++;
+ this.returnedBytes += nextDescriptor.getRawDescriptorBytes().length;
+ if (this.descriptors.isEmpty()) {
+ this.descriptors = null;
+ }
+ }
+ return nextDescriptor;
+ }
+}
+
+public class DescriptorSource {
+
+ private File inDir;
+
+ private File statusDir;
+
+ private List<DescriptorQueue> descriptorQueues;
+
+ public DescriptorSource(File inDir, File statusDir) {
+ this.inDir = inDir;
+ this.statusDir = statusDir;
+ this.descriptorQueues = new ArrayList<DescriptorQueue>();
+ }
+
+ public DescriptorQueue getDescriptorQueue(
+ DescriptorType descriptorType) {
+ DescriptorQueue descriptorQueue = new DescriptorQueue(this.inDir,
+ this.statusDir);
+ descriptorQueue.addDirectory(descriptorType);
+ this.descriptorQueues.add(descriptorQueue);
+ return descriptorQueue;
+ }
+
+ public DescriptorQueue getDescriptorQueue(
+ DescriptorType[] descriptorTypes,
+ DescriptorHistory descriptorHistory) {
+ DescriptorQueue descriptorQueue = new DescriptorQueue(this.inDir,
+ this.statusDir);
+ for (DescriptorType descriptorType : descriptorTypes) {
+ descriptorQueue.addDirectory(descriptorType);
+ }
+ descriptorQueue.readHistoryFile(descriptorHistory);
+ this.descriptorQueues.add(descriptorQueue);
+ return descriptorQueue;
+ }
+
+ public DescriptorQueue getDescriptorQueue(DescriptorType descriptorType,
+ DescriptorHistory descriptorHistory) {
+ DescriptorQueue descriptorQueue = new DescriptorQueue(this.inDir,
+ this.statusDir);
+ descriptorQueue.addDirectory(descriptorType);
+ descriptorQueue.readHistoryFile(descriptorHistory);
+ this.descriptorQueues.add(descriptorQueue);
+ return descriptorQueue;
+ }
+
+ public void writeHistoryFiles() {
+ for (DescriptorQueue descriptorQueue : this.descriptorQueues) {
+ descriptorQueue.writeHistoryFile();
+ }
+ }
+
+ public String getStatsString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append(" " + this.descriptorQueues.size() + " descriptor "
+ + "queues created\n");
+ int historySizeBefore = 0, historySizeAfter = 0;
+ long descriptors = 0L, bytes = 0L;
+ for (DescriptorQueue descriptorQueue : descriptorQueues) {
+ historySizeBefore += descriptorQueue.historySizeBefore;
+ historySizeAfter += descriptorQueue.historySizeAfter;
+ descriptors += descriptorQueue.returnedDescriptors;
+ bytes += descriptorQueue.returnedBytes;
+ }
+ sb.append(" " + String.format("%,d", historySizeBefore)
+ + " descriptors excluded from this execution\n");
+ sb.append(" " + String.format("%,d", descriptors)
+ + " descriptors provided\n");
+ sb.append(" " + formatBytes(bytes) + " provided\n");
+ sb.append(" " + String.format("%,d", historySizeAfter)
+ + " descriptors excluded from next execution\n");
+ return sb.toString();
+ }
+
+ // TODO This method should go into a utility class.
+ private static String formatBytes(long bytes) {
+ if (bytes < 1024) {
+ return bytes + " B";
+ } else {
+ int exp = (int) (Math.log(bytes) / Math.log(1024));
+ return String.format("%.1f %siB", bytes / Math.pow(1024, exp),
+ "KMGTPE".charAt(exp-1));
+ }
+ }
+}
+
diff --git a/src/org/torproject/onionoo/DetailDataWriter.java b/src/org/torproject/onionoo/DetailDataWriter.java
index 8239de9..e95a919 100644
--- a/src/org/torproject/onionoo/DetailDataWriter.java
+++ b/src/org/torproject/onionoo/DetailDataWriter.java
@@ -2,12 +2,6 @@
* See LICENSE for licensing information */
package org.torproject.onionoo;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.ParseException;
@@ -16,9 +10,9 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Scanner;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
@@ -30,9 +24,6 @@ import org.apache.commons.lang.StringEscapeUtils;
import org.torproject.descriptor.BridgePoolAssignment;
import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.ExitList;
import org.torproject.descriptor.ExitListEntry;
import org.torproject.descriptor.ServerDescriptor;
@@ -45,6 +36,16 @@ import org.torproject.descriptor.ServerDescriptor;
* descriptor that was last referenced in a network status. */
public class DetailDataWriter {
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ public DetailDataWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ }
+
private SortedMap<String, Node> relays;
public void setCurrentRelays(SortedMap<String, Node> currentRelays) {
this.relays = currentRelays;
@@ -164,37 +165,25 @@ public class DetailDataWriter {
private Map<String, ServerDescriptor> relayServerDescriptors =
new HashMap<String, ServerDescriptor>();
public void readRelayServerDescriptors() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File(
- "in/relay-descriptors/server-descriptors"));
/* Don't remember which server descriptors we already parsed. If we
* parse a server descriptor now and first learn about the relay in a
* later consensus, we'll never write the descriptor content anywhere.
* The result would be details files containing no descriptor parts
* until the relay publishes the next descriptor. */
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof ServerDescriptor) {
- ServerDescriptor serverDescriptor =
- (ServerDescriptor) descriptor;
- String fingerprint = serverDescriptor.getFingerprint();
- if (!this.relayServerDescriptors.containsKey(fingerprint) ||
- this.relayServerDescriptors.get(fingerprint).
- getPublishedMillis()
- < serverDescriptor.getPublishedMillis()) {
- this.relayServerDescriptors.put(fingerprint,
- serverDescriptor);
- }
- }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ServerDescriptor) {
+ ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor;
+ String fingerprint = serverDescriptor.getFingerprint();
+ if (!this.relayServerDescriptors.containsKey(fingerprint) ||
+ this.relayServerDescriptors.get(fingerprint).
+ getPublishedMillis()
+ < serverDescriptor.getPublishedMillis()) {
+ this.relayServerDescriptors.put(fingerprint,
+ serverDescriptor);
}
}
}
@@ -318,37 +307,25 @@ public class DetailDataWriter {
private Map<String, Set<ExitListEntry>> exitListEntries =
new HashMap<String, Set<ExitListEntry>>();
public void readExitLists() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File(
- "in/exit-lists"));
- reader.setExcludeFiles(new File("status/exit-list-history"));
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof ExitList) {
- ExitList exitList = (ExitList) descriptor;
- for (ExitListEntry exitListEntry :
- exitList.getExitListEntries()) {
- if (exitListEntry.getScanMillis() <
- this.now - 24L * 60L * 60L * 1000L) {
- continue;
- }
- String fingerprint = exitListEntry.getFingerprint();
- if (!this.exitListEntries.containsKey(fingerprint)) {
- this.exitListEntries.put(fingerprint,
- new HashSet<ExitListEntry>());
- }
- this.exitListEntries.get(fingerprint).add(exitListEntry);
- }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.EXIT_LISTS, DescriptorHistory.EXIT_LIST_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ExitList) {
+ ExitList exitList = (ExitList) descriptor;
+ for (ExitListEntry exitListEntry :
+ exitList.getExitListEntries()) {
+ if (exitListEntry.getScanMillis() <
+ this.now - 24L * 60L * 60L * 1000L) {
+ continue;
+ }
+ String fingerprint = exitListEntry.getFingerprint();
+ if (!this.exitListEntries.containsKey(fingerprint)) {
+ this.exitListEntries.put(fingerprint,
+ new HashSet<ExitListEntry>());
}
+ this.exitListEntries.get(fingerprint).add(exitListEntry);
}
}
}
@@ -357,37 +334,25 @@ public class DetailDataWriter {
private Map<String, ServerDescriptor> bridgeServerDescriptors =
new HashMap<String, ServerDescriptor>();
public void readBridgeServerDescriptors() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File(
- "in/bridge-descriptors/server-descriptors"));
/* Don't remember which server descriptors we already parsed. If we
* parse a server descriptor now and first learn about the relay in a
* later status, we'll never write the descriptor content anywhere.
* The result would be details files containing no descriptor parts
* until the bridge publishes the next descriptor. */
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof ServerDescriptor) {
- ServerDescriptor serverDescriptor =
- (ServerDescriptor) descriptor;
- String fingerprint = serverDescriptor.getFingerprint();
- if (!this.bridgeServerDescriptors.containsKey(fingerprint) ||
- this.bridgeServerDescriptors.get(fingerprint).
- getPublishedMillis()
- < serverDescriptor.getPublishedMillis()) {
- this.bridgeServerDescriptors.put(fingerprint,
- serverDescriptor);
- }
- }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.BRIDGE_SERVER_DESCRIPTORS);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ServerDescriptor) {
+ ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor;
+ String fingerprint = serverDescriptor.getFingerprint();
+ if (!this.bridgeServerDescriptors.containsKey(fingerprint) ||
+ this.bridgeServerDescriptors.get(fingerprint).
+ getPublishedMillis()
+ < serverDescriptor.getPublishedMillis()) {
+ this.bridgeServerDescriptors.put(fingerprint,
+ serverDescriptor);
}
}
}
@@ -396,67 +361,40 @@ public class DetailDataWriter {
private Map<String, String> bridgePoolAssignments =
new HashMap<String, String>();
public void readBridgePoolAssignments() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File("in/bridge-pool-assignments"));
- reader.setExcludeFiles(new File("status/bridge-poolassign-history"));
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof BridgePoolAssignment) {
- BridgePoolAssignment bridgePoolAssignment =
- (BridgePoolAssignment) descriptor;
- for (Map.Entry<String, String> e :
- bridgePoolAssignment.getEntries().entrySet()) {
- String fingerprint = e.getKey();
- String details = e.getValue();
- this.bridgePoolAssignments.put(fingerprint, details);
- }
- }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.BRIDGE_POOL_ASSIGNMENTS,
+ DescriptorHistory.BRIDGE_POOLASSIGN_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof BridgePoolAssignment) {
+ BridgePoolAssignment bridgePoolAssignment =
+ (BridgePoolAssignment) descriptor;
+ for (Map.Entry<String, String> e :
+ bridgePoolAssignment.getEntries().entrySet()) {
+ String fingerprint = e.getKey();
+ String details = e.getValue();
+ this.bridgePoolAssignments.put(fingerprint, details);
}
}
}
}
- public void writeDetailDataFiles() {
- SortedMap<String, File> remainingDetailsFiles =
- this.listAllDetailsFiles();
- remainingDetailsFiles = this.updateRelayDetailsFiles(
- remainingDetailsFiles);
- remainingDetailsFiles = this.updateBridgeDetailsFiles(
- remainingDetailsFiles);
+ public void writeOutDetails() {
+ SortedSet<String> remainingDetailsFiles = new TreeSet<String>();
+ remainingDetailsFiles.addAll(this.documentStore.list(
+ DocumentType.OUT_DETAILS));
+ this.updateRelayDetailsFiles(remainingDetailsFiles);
+ this.updateBridgeDetailsFiles(remainingDetailsFiles);
this.deleteDetailsFiles(remainingDetailsFiles);
}
- private File detailsFileDirectory = new File("out/details");
- private SortedMap<String, File> listAllDetailsFiles() {
- SortedMap<String, File> result = new TreeMap<String, File>();
- if (detailsFileDirectory.exists() &&
- detailsFileDirectory.isDirectory()) {
- for (File file : detailsFileDirectory.listFiles()) {
- if (file.getName().length() == 40) {
- result.put(file.getName(), file);
- }
- }
- }
- return result;
- }
-
private static String escapeJSON(String s) {
return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'");
}
- private SortedMap<String, File> updateRelayDetailsFiles(
- SortedMap<String, File> remainingDetailsFiles) {
- SortedMap<String, File> result =
- new TreeMap<String, File>(remainingDetailsFiles);
+ private void updateRelayDetailsFiles(
+ SortedSet<String> remainingDetailsFiles) {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
@@ -466,42 +404,40 @@ public class DetailDataWriter {
/* Read details file for this relay if it exists. */
String descriptorParts = null;
long publishedMillis = -1L;
- if (result.containsKey(fingerprint)) {
- File detailsFile = result.remove(fingerprint);
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- detailsFile));
- String line;
- boolean copyDescriptorParts = false;
- StringBuilder sb = new StringBuilder();
- while ((line = br.readLine()) != null) {
- if (line.startsWith("\"desc_published\":")) {
- String published = line.substring(
- "\"desc_published\":\"".length(),
- "\"desc_published\":\"1970-01-01 00:00:00".length());
- publishedMillis = dateTimeFormat.parse(published).getTime();
- copyDescriptorParts = true;
+ if (remainingDetailsFiles.contains(fingerprint)) {
+ remainingDetailsFiles.remove(fingerprint);
+ String documentString = this.documentStore.retrieve(
+ DocumentType.OUT_DETAILS, fingerprint);
+ if (documentString != null) {
+ try {
+ boolean copyDescriptorParts = false;
+ StringBuilder sb = new StringBuilder();
+ Scanner s = new Scanner(documentString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (line.startsWith("\"desc_published\":")) {
+ String published = line.substring(
+ "\"desc_published\":\"".length(),
+ "\"desc_published\":\"1970-01-01 00:00:00".length());
+ publishedMillis = dateTimeFormat.parse(published).
+ getTime();
+ copyDescriptorParts = true;
+ }
+ if (copyDescriptorParts) {
+ sb.append(line + "\n");
+ }
}
- if (copyDescriptorParts) {
- sb.append(line + "\n");
+ s.close();
+ if (sb.length() > 0) {
+ descriptorParts = sb.toString();
}
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp in details.json "
+ + "file for '" + fingerprint + "'. Ignoring.");
+ e.printStackTrace();
+ publishedMillis = -1L;
+ descriptorParts = null;
}
- br.close();
- if (sb.length() > 0) {
- descriptorParts = sb.toString();
- }
- } catch (IOException e) {
- System.err.println("Could not read '"
- + detailsFile.getAbsolutePath() + "'. Skipping");
- e.printStackTrace();
- publishedMillis = -1L;
- descriptorParts = null;
- } catch (ParseException e) {
- System.err.println("Could not read '"
- + detailsFile.getAbsolutePath() + "'. Skipping");
- e.printStackTrace();
- publishedMillis = -1L;
- descriptorParts = null;
}
}
@@ -708,37 +644,23 @@ public class DetailDataWriter {
}
sb.append("]");
}
- String statusParts = sb.toString();
- /* Write details file to disk. */
- File detailsFile = new File(detailsFileDirectory, fingerprint);
- try {
- detailsFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- detailsFile));
- bw.write(statusParts);
- if (descriptorParts != null) {
- bw.write(",\n" + descriptorParts);
- } else {
- bw.write("\n}\n");
- }
- bw.close();
- } catch (IOException e) {
- System.err.println("Could not write details file '"
- + detailsFile.getAbsolutePath() + "'. This file may now be "
- + "broken. Ignoring.");
- e.printStackTrace();
+ /* Add descriptor parts. */
+ if (descriptorParts != null) {
+ sb.append(",\n" + descriptorParts);
+ } else {
+ sb.append("\n}\n");
}
- }
- /* Return the files that we didn't update. */
- return result;
+ /* Write details file to disk. */
+ String detailsLines = sb.toString();
+ this.documentStore.store(detailsLines, DocumentType.OUT_DETAILS,
+ fingerprint);
+ }
}
- private SortedMap<String, File> updateBridgeDetailsFiles(
- SortedMap<String, File> remainingDetailsFiles) {
- SortedMap<String, File> result =
- new TreeMap<String, File>(remainingDetailsFiles);
+ private void updateBridgeDetailsFiles(
+ SortedSet<String> remainingDetailsFiles) {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
@@ -748,52 +670,51 @@ public class DetailDataWriter {
/* Read details file for this bridge if it exists. */
String descriptorParts = null, bridgePoolAssignment = null;
long publishedMillis = -1L;
- if (result.containsKey(fingerprint)) {
- File detailsFile = result.remove(fingerprint);
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- detailsFile));
- String line;
+ if (remainingDetailsFiles.contains(fingerprint)) {
+ remainingDetailsFiles.remove(fingerprint);
+ String documentString = this.documentStore.retrieve(
+ DocumentType.OUT_DETAILS, fingerprint);
+ if (documentString != null) {
+ try {
boolean copyDescriptorParts = false;
- StringBuilder sb = new StringBuilder();
- while ((line = br.readLine()) != null) {
- if (line.startsWith("\"desc_published\":")) {
- String published = line.substring(
- "\"desc_published\":\"".length(),
- "\"desc_published\":\"1970-01-01 00:00:00".length());
- publishedMillis = dateTimeFormat.parse(published).getTime();
- copyDescriptorParts = true;
- } else if (line.startsWith("\"pool_assignment\":")) {
- bridgePoolAssignment = line;
- copyDescriptorParts = false;
- } else if (line.equals("}")) {
- copyDescriptorParts = false;
+ StringBuilder sb = new StringBuilder();
+ Scanner s = new Scanner(documentString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (line.startsWith("\"desc_published\":")) {
+ String published = line.substring(
+ "\"desc_published\":\"".length(),
+ "\"desc_published\":\"1970-01-01 00:00:00".length());
+ publishedMillis = dateTimeFormat.parse(published).
+ getTime();
+ copyDescriptorParts = true;
+ } else if (line.startsWith("\"pool_assignment\":")) {
+ bridgePoolAssignment = line;
+ copyDescriptorParts = false;
+ } else if (line.equals("}")) {
+ copyDescriptorParts = false;
+ }
+ if (copyDescriptorParts) {
+ sb.append(line + "\n");
+ }
}
- if (copyDescriptorParts) {
- sb.append(line + "\n");
+ s.close();
+ descriptorParts = sb.toString();
+ if (descriptorParts.endsWith(",\n")) {
+ descriptorParts = descriptorParts.substring(0,
+ descriptorParts.length() - 2);
+ } else if (descriptorParts.endsWith("\n")) {
+ descriptorParts = descriptorParts.substring(0,
+ descriptorParts.length() - 1);
}
+ } catch (ParseException e) {
+ System.err.println("Could not parse timestamp in "
+ + "details.json file for '" + fingerprint + "'. "
+ + "Ignoring.");
+ e.printStackTrace();
+ publishedMillis = -1L;
+ descriptorParts = null;
}
- br.close();
- descriptorParts = sb.toString();
- if (descriptorParts.endsWith(",\n")) {
- descriptorParts = descriptorParts.substring(0,
- descriptorParts.length() - 2);
- } else if (descriptorParts.endsWith("\n")) {
- descriptorParts = descriptorParts.substring(0,
- descriptorParts.length() - 1);
- }
- } catch (IOException e) {
- System.err.println("Could not read '"
- + detailsFile.getAbsolutePath() + "'. Skipping");
- e.printStackTrace();
- publishedMillis = -1L;
- descriptorParts = null;
- } catch (ParseException e) {
- System.err.println("Could not read '"
- + detailsFile.getAbsolutePath() + "'. Skipping");
- e.printStackTrace();
- publishedMillis = -1L;
- descriptorParts = null;
}
}
@@ -871,32 +792,18 @@ public class DetailDataWriter {
sb.append(",\n" + bridgePoolAssignment);
}
sb.append("\n}\n");
- String detailsLines = sb.toString();
/* Write details file to disk. */
- File detailsFile = new File(detailsFileDirectory, fingerprint);
- try {
- detailsFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- detailsFile));
- bw.write(detailsLines);
- bw.close();
- } catch (IOException e) {
- System.err.println("Could not write details file '"
- + detailsFile.getAbsolutePath() + "'. This file may now be "
- + "broken. Ignoring.");
- e.printStackTrace();
- }
+ String detailsLines = sb.toString();
+ this.documentStore.store(detailsLines, DocumentType.OUT_DETAILS,
+ fingerprint);
}
-
- /* Return the files that we didn't update. */
- return result;
}
private void deleteDetailsFiles(
- SortedMap<String, File> remainingDetailsFiles) {
- for (File detailsFile : remainingDetailsFiles.values()) {
- detailsFile.delete();
+ SortedSet<String> remainingDetailsFiles) {
+ for (String fingerprint : remainingDetailsFiles) {
+ this.documentStore.remove(DocumentType.OUT_DETAILS, fingerprint);
}
}
}
diff --git a/src/org/torproject/onionoo/DocumentStore.java b/src/org/torproject/onionoo/DocumentStore.java
new file mode 100644
index 0000000..c1097a0
--- /dev/null
+++ b/src/org/torproject/onionoo/DocumentStore.java
@@ -0,0 +1,264 @@
+/* Copyright 2013 The Tor Project
+ * See LICENSE for licensing information */
+
+package org.torproject.onionoo;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.SortedSet;
+import java.util.Stack;
+import java.util.TreeSet;
+
+enum DocumentType {
+ STATUS_SUMMARY,
+ STATUS_BANDWIDTH,
+ STATUS_WEIGHTS,
+ OUT_UPDATE,
+ OUT_SUMMARY,
+ OUT_DETAILS,
+ OUT_BANDWIDTH,
+ OUT_WEIGHTS;
+}
+
+// TODO For later migration from disk to database, do the following:
+// - read from database and then from disk if not found
+// - write only to database, delete from disk once in database
+// - move entirely to database once disk is "empty"
+// TODO Also look into simple key-value stores instead of real databases.
+public class DocumentStore {
+
+ private File statusDir;
+
+ private File outDir;
+
+ long listOperations = 0L, listedFiles = 0L, storedFiles = 0L,
+ storedBytes = 0L, retrievedFiles = 0L, retrievedBytes = 0L,
+ removedFiles = 0L;
+
+ public DocumentStore(File outDir) {
+ this.outDir = outDir;
+ }
+
+ public DocumentStore(File statusDir, File outDir) {
+ this.statusDir = statusDir;
+ this.outDir = outDir;
+ }
+
+ public SortedSet<String> list(DocumentType documentType) {
+ SortedSet<String> fingerprints = new TreeSet<String>();
+ File directory = null;
+ String subdirectory = null;
+ switch (documentType) {
+ case STATUS_BANDWIDTH:
+ directory = this.statusDir;
+ subdirectory = "bandwidth";
+ break;
+ case STATUS_WEIGHTS:
+ directory = this.statusDir;
+ subdirectory = "weights";
+ break;
+ case OUT_DETAILS:
+ directory = this.outDir;
+ subdirectory = "details";
+ break;
+ case OUT_BANDWIDTH:
+ directory = this.outDir;
+ subdirectory = "bandwidth";
+ break;
+ case OUT_WEIGHTS:
+ directory = this.outDir;
+ break;
+ default:
+ break;
+ }
+ if (directory != null && subdirectory != null) {
+ Stack<File> files = new Stack<File>();
+ files.add(new File(directory, subdirectory));
+ while (!files.isEmpty()) {
+ File file = files.pop();
+ if (file.isDirectory()) {
+ files.addAll(Arrays.asList(file.listFiles()));
+ } else if (file.getName().length() == 40) {
+ fingerprints.add(file.getName());
+ }
+ }
+ }
+ this.listOperations++;
+ this.listedFiles += fingerprints.size();
+ return fingerprints;
+ }
+
+ public boolean store(String documentString, DocumentType documentType) {
+ return this.store(documentString, documentType, null);
+ }
+
+ public boolean store(String documentString, DocumentType documentType,
+ String fingerprint) {
+ File documentFile = this.getDocumentFile(documentType, fingerprint);
+ if (documentFile == null) {
+ return false;
+ }
+ try {
+ documentFile.getParentFile().mkdirs();
+ File documentTempFile = new File(
+ documentFile.getAbsolutePath() + ".tmp");
+ BufferedWriter bw = new BufferedWriter(new FileWriter(
+ documentTempFile));
+ bw.write(documentString);
+ bw.close();
+ documentFile.delete();
+ documentTempFile.renameTo(documentFile);
+ this.storedFiles++;
+ this.storedBytes += documentString.length();
+ } catch (IOException e) {
+ System.err.println("Could not write file '"
+ + documentFile.getAbsolutePath() + "'.");
+ e.printStackTrace();
+ return false;
+ }
+ return true;
+ }
+
+ public String retrieve(DocumentType documentType) {
+ return this.retrieve(documentType, null);
+ }
+
+ public String retrieve(DocumentType documentType, String fingerprint) {
+ File documentFile = this.getDocumentFile(documentType, fingerprint);
+ if (documentFile == null || !documentFile.exists()) {
+ return null;
+ } else if (documentFile.isDirectory()) {
+ System.err.println("Could not read file '"
+ + documentFile.getAbsolutePath() + "', because it is a "
+ + "directory.");
+ return null;
+ }
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(
+ documentFile));
+ StringBuilder sb = new StringBuilder();
+ String line;
+ while ((line = br.readLine()) != null) {
+ sb.append(line + "\n");
+ }
+ br.close();
+ this.retrievedFiles++;
+ this.retrievedBytes += sb.length();
+ return sb.toString();
+ } catch (IOException e) {
+ System.err.println("Could not read file '"
+ + documentFile.getAbsolutePath() + "'.");
+ e.printStackTrace();
+ return null;
+ }
+ }
+
+ public boolean remove(DocumentType documentType) {
+ return this.remove(documentType, null);
+ }
+
+ public boolean remove(DocumentType documentType, String fingerprint) {
+ File documentFile = this.getDocumentFile(documentType, fingerprint);
+ if (documentFile == null || !documentFile.delete()) {
+ System.err.println("Could not delete file '"
+ + documentFile.getAbsolutePath() + "'.");
+ return false;
+ }
+ this.removedFiles++;
+ return true;
+ }
+
+ private File getDocumentFile(DocumentType documentType,
+ String fingerprint) {
+ File documentFile = null;
+ if (fingerprint == null && !(
+ documentType == DocumentType.STATUS_SUMMARY ||
+ documentType == DocumentType.OUT_UPDATE||
+ documentType == DocumentType.OUT_SUMMARY)) {
+ return null;
+ }
+ File directory = null;
+ String fileName = null;
+ switch (documentType) {
+ case STATUS_SUMMARY:
+ directory = this.statusDir;
+ fileName = "summary";
+ break;
+ case STATUS_BANDWIDTH:
+ directory = this.statusDir;
+ fileName = String.format("bandwidth/%s/%s/%s",
+ fingerprint.substring(0, 1), fingerprint.substring(1, 2),
+ fingerprint);
+ break;
+ case STATUS_WEIGHTS:
+ directory = this.statusDir;
+ fileName = String.format("weights/%s/%s/%s",
+ fingerprint.substring(0, 1), fingerprint.substring(1, 2),
+ fingerprint);
+ break;
+ case OUT_UPDATE:
+ directory = this.outDir;
+ fileName = "update";
+ break;
+ case OUT_SUMMARY:
+ directory = this.outDir;
+ fileName = "summary";
+ break;
+ case OUT_DETAILS:
+ directory = this.outDir;
+ fileName = String.format("details/%s", fingerprint);
+ break;
+ case OUT_BANDWIDTH:
+ directory = this.outDir;
+ fileName = String.format("bandwidth/%s", fingerprint);
+ break;
+ case OUT_WEIGHTS:
+ directory = this.outDir;
+ fileName = String.format("weights/%s", fingerprint);
+ break;
+ }
+ if (directory != null && fileName != null) {
+ documentFile = new File(directory, fileName);
+ }
+ return documentFile;
+ }
+
+ public String getStatsString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append(" " + formatDecimalNumber(listOperations)
+ + " list operations performed\n");
+ sb.append(" " + formatDecimalNumber(listedFiles)
+ + " files listed\n");
+ sb.append(" " + formatDecimalNumber(storedFiles)
+ + " files stored\n");
+ sb.append(" " + formatBytes(storedBytes) + " stored\n");
+ sb.append(" " + formatDecimalNumber(retrievedFiles)
+ + " files retrieved\n");
+ sb.append(" " + formatBytes(retrievedBytes) + " retrieved\n");
+ sb.append(" " + formatDecimalNumber(removedFiles)
+ + " files removed\n");
+ return sb.toString();
+ }
+
+ //TODO This method should go into a utility class.
+ private static String formatDecimalNumber(long decimalNumber) {
+ return String.format("%,d", decimalNumber);
+ }
+
+ // TODO This method should go into a utility class.
+ private static String formatBytes(long bytes) {
+ if (bytes < 1024) {
+ return bytes + " B";
+ } else {
+ int exp = (int) (Math.log(bytes) / Math.log(1024));
+ return String.format("%.1f %siB", bytes / Math.pow(1024, exp),
+ "KMGTPE".charAt(exp-1));
+ }
+ }
+}
+
diff --git a/src/org/torproject/onionoo/Main.java b/src/org/torproject/onionoo/Main.java
index e83e4b5..f636b5e 100644
--- a/src/org/torproject/onionoo/Main.java
+++ b/src/org/torproject/onionoo/Main.java
@@ -9,10 +9,20 @@ import java.util.Date;
public class Main {
public static void main(String[] args) {
+ printStatus("Initializing descriptor source.");
+ DescriptorSource dso = new DescriptorSource(new File("in"),
+ new File("status"));
+ printStatusTime("Initialized descriptor source");
+
+ printStatus("Initializing document store.");
+ DocumentStore ds = new DocumentStore(new File("status"),
+ new File("out"));
+ printStatusTime("Initialized document store");
+
printStatus("Updating internal node list.");
- CurrentNodes cn = new CurrentNodes();
- cn.readRelaySearchDataFile(new File("status/summary"));
- printStatusTime("Read status/summary");
+ CurrentNodes cn = new CurrentNodes(dso, ds);
+ cn.readStatusSummary();
+ printStatusTime("Read status summary");
cn.readRelayNetworkConsensuses();
printStatusTime("Read network status consensuses");
cn.setRelayRunningBits();
@@ -23,11 +33,12 @@ public class Main {
printStatusTime("Read bridge network statuses");
cn.setBridgeRunningBits();
printStatusTime("Set bridge running bits");
- cn.writeRelaySearchDataFile(new File("status/summary"), true);
- printStatusTime("Wrote status/summary");
+ cn.writeStatusSummary();
+ printStatusTime("Wrote status summary");
+ // TODO Could write statistics here, too.
printStatus("Updating detail data.");
- DetailDataWriter ddw = new DetailDataWriter();
+ DetailDataWriter ddw = new DetailDataWriter(dso, ds);
ddw.setCurrentRelays(cn.getCurrentRelays());
printStatusTime("Set current relays");
ddw.setCurrentBridges(cn.getCurrentBridges());
@@ -46,11 +57,12 @@ public class Main {
printStatusTime("Read bridge-pool assignments");
ddw.finishReverseDomainNameLookups();
printStatusTime("Finished reverse domain name lookups");
- ddw.writeDetailDataFiles();
+ ddw.writeOutDetails();
printStatusTime("Wrote detail data files");
+ // TODO Could write statistics here, too.
printStatus("Updating bandwidth data.");
- BandwidthDataWriter bdw = new BandwidthDataWriter();
+ BandwidthDataWriter bdw = new BandwidthDataWriter(dso, ds);
bdw.setCurrentRelays(cn.getCurrentRelays());
printStatusTime("Set current relays");
bdw.setCurrentBridges(cn.getCurrentBridges());
@@ -59,9 +71,10 @@ public class Main {
printStatusTime("Read extra-info descriptors");
bdw.deleteObsoleteBandwidthFiles();
printStatusTime("Deleted obsolete bandwidth files");
+ // TODO Could write statistics here, too.
printStatus("Updating weights data.");
- WeightsDataWriter wdw = new WeightsDataWriter();
+ WeightsDataWriter wdw = new WeightsDataWriter(dso, ds);
wdw.setCurrentRelays(cn.getCurrentRelays());
printStatusTime("Set current relays");
wdw.readRelayServerDescriptors();
@@ -72,10 +85,22 @@ public class Main {
printStatusTime("Wrote weights data files");
wdw.deleteObsoleteWeightsDataFiles();
printStatusTime("Deleted obsolete weights files");
+ // TODO Could write statistics here, too.
printStatus("Updating summary data.");
- cn.writeRelaySearchDataFile(new File("out/summary"), false);
- printStatusTime("Wrote out/summary");
+ cn.writeOutSummary();
+ printStatusTime("Wrote out summary");
+ // TODO Could write statistics here, too.
+
+ printStatus("Shutting down descriptor source.");
+ dso.writeHistoryFiles();
+ printStatusTime("Wrote parse histories");
+ printStatistics(dso.getStatsString());
+ printStatusTime("Shut down descriptor source");
+
+ printStatus("Shutting down document store.");
+ printStatistics(ds.getStatsString());
+ printStatusTime("Shut down document store");
printStatus("Terminating.");
}
@@ -88,11 +113,22 @@ public class Main {
printedLastStatusMessage = System.currentTimeMillis();
}
+ private static void printStatistics(String message) {
+ System.out.print(" Statistics:\n" + message);
+ }
+
private static void printStatusTime(String message) {
long now = System.currentTimeMillis();
- System.out.println(" " + message + " ("
- + (now - printedLastStatusMessage) + " millis).");
+ long millis = now - printedLastStatusMessage;
+ System.out.println(" " + message + " (" + formatMillis(millis)
+ + ").");
printedLastStatusMessage = now;
}
+
+ // TODO This method should go into a utility class.
+ private static String formatMillis(long millis) {
+ return String.format("%02d:%02d.%03d minutes",
+ millis / (1000L * 60L), (millis / 1000L) % 60L, millis % 1000L);
+ }
}
diff --git a/src/org/torproject/onionoo/ResourceServlet.java b/src/org/torproject/onionoo/ResourceServlet.java
index 75c2ac4..3ad33ae 100644
--- a/src/org/torproject/onionoo/ResourceServlet.java
+++ b/src/org/torproject/onionoo/ResourceServlet.java
@@ -2,9 +2,7 @@
* See LICENSE for licensing information */
package org.torproject.onionoo;
-import java.io.BufferedReader;
import java.io.File;
-import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
@@ -15,12 +13,11 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Scanner;
import java.util.Set;
import java.util.SortedMap;
-import java.util.SortedSet;
import java.util.TimeZone;
import java.util.TreeMap;
-import java.util.TreeSet;
import java.util.regex.Pattern;
import javax.servlet.ServletConfig;
@@ -35,7 +32,7 @@ public class ResourceServlet extends HttpServlet {
private boolean maintenanceMode = false;
- private File outDir;
+ private DocumentStore documentStore;
public void init(ServletConfig config) throws ServletException {
super.init(config);
@@ -48,7 +45,7 @@ public class ResourceServlet extends HttpServlet {
protected void init(boolean maintenanceMode, File outDir) {
this.maintenanceMode = maintenanceMode;
- this.outDir = outDir;
+ this.documentStore = new DocumentStore(outDir);
if (!maintenanceMode) {
this.readSummaryFile();
}
@@ -66,13 +63,24 @@ public class ResourceServlet extends HttpServlet {
bridgesByFirstSeenDays = null, relaysByLastSeenDays = null,
bridgesByLastSeenDays = null;
private void readSummaryFile() {
- File summaryFile = new File(outDir, "summary");
- if (!summaryFile.exists()) {
- readSummaryFile = false;
+ long summaryFileLastModified = -1L;
+ String updateString = this.documentStore.retrieve(
+ DocumentType.OUT_UPDATE);
+ if (updateString != null) {
+ try {
+ summaryFileLastModified = Long.parseLong(updateString.trim());
+ } catch (NumberFormatException e) {
+ /* Handle below. */
+ }
+ }
+ if (summaryFileLastModified < 0L) {
+ // TODO Does this actually solve anything? Should we instead
+ // switch to a variant of the maintenance mode and re-check when
+ // the next requests comes in that happens x seconds after this one?
+ this.readSummaryFile = false;
return;
}
- if (summaryFile.lastModified() > this.summaryFileLastModified) {
- long summaryFileLastModified = summaryFile.lastModified();
+ if (summaryFileLastModified > this.summaryFileLastModified) {
List<String> relaysByConsensusWeight = new ArrayList<String>();
Map<String, String>
relayFingerprintSummaryLines = new HashMap<String, String>(),
@@ -86,8 +94,11 @@ public class ResourceServlet extends HttpServlet {
bridgesByFirstSeenDays = new TreeMap<Integer, Set<String>>(),
relaysByLastSeenDays = new TreeMap<Integer, Set<String>>(),
bridgesByLastSeenDays = new TreeMap<Integer, Set<String>>();
- CurrentNodes cn = new CurrentNodes();
- cn.readRelaySearchDataFile(summaryFile);
+ CurrentNodes cn = new CurrentNodes(this.documentStore);
+ cn.readOutSummary();
+ // TODO We should be able to learn if something goes wrong when
+ // reading the summary file, rather than silently having an empty
+ // CurrentNodes instance.
cn.setRelayRunningBits();
cn.setBridgeRunningBits();
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
@@ -197,7 +208,7 @@ public class ResourceServlet extends HttpServlet {
this.bridgesByFirstSeenDays = bridgesByFirstSeenDays;
this.bridgesByLastSeenDays = bridgesByLastSeenDays;
}
- this.summaryFileLastModified = summaryFile.lastModified();
+ this.summaryFileLastModified = summaryFileLastModified;
this.readSummaryFile = true;
}
@@ -910,37 +921,38 @@ public class ResourceServlet extends HttpServlet {
return "";
}
fingerprint = fingerprint.substring(0, 40);
- File detailsFile = new File(this.outDir, "details/" + fingerprint);
+ String documentString = this.documentStore.retrieve(
+ DocumentType.OUT_DETAILS, fingerprint);
StringBuilder sb = new StringBuilder();
String detailsLines = null;
- if (detailsFile.exists()) {
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- detailsFile));
- String line = br.readLine();
- if (line != null) {
- sb.append("{");
- while ((line = br.readLine()) != null) {
- if (line.equals("}")) {
- sb.append("}\n");
- break;
- } else if (!line.startsWith("\"desc_published\":")) {
- sb.append(line + "\n");
- }
- }
- }
- br.close();
- detailsLines = sb.toString();
- if (detailsLines.length() > 1) {
- detailsLines = detailsLines.substring(0,
- detailsLines.length() - 1);
+ if (documentString != null) {
+ Scanner s = new Scanner(documentString);
+ sb.append("{");
+ if (s.hasNextLine()) {
+ /* Skip version line. */
+ s.nextLine();
+ }
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
+ if (line.equals("}")) {
+ sb.append("}\n");
+ break;
+ } else if (!line.startsWith("\"desc_published\":")) {
+ sb.append(line + "\n");
}
- } catch (IOException e) {
+ }
+ s.close();
+ detailsLines = sb.toString();
+ if (detailsLines.length() > 1) {
+ detailsLines = detailsLines.substring(0,
+ detailsLines.length() - 1);
}
}
if (detailsLines != null) {
return detailsLines;
} else {
+ // TODO We should probably log that we didn't find a details
+ // document that we expected to exist.
return "";
}
}
@@ -957,28 +969,15 @@ public class ResourceServlet extends HttpServlet {
return "";
}
fingerprint = fingerprint.substring(0, 40);
- File bandwidthFile = new File(this.outDir, "bandwidth/"
- + fingerprint);
- StringBuilder sb = new StringBuilder();
- String bandwidthLines = null;
- if (bandwidthFile.exists()) {
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- bandwidthFile));
- String line;
- while ((line = br.readLine()) != null) {
- sb.append(line + "\n");
- }
- br.close();
- bandwidthLines = sb.toString();
- } catch (IOException e) {
- }
- }
+ String bandwidthLines = this.documentStore.retrieve(
+ DocumentType.OUT_BANDWIDTH, fingerprint);
if (bandwidthLines != null) {
bandwidthLines = bandwidthLines.substring(0,
bandwidthLines.length() - 1);
return bandwidthLines;
} else {
+ // TODO We should probably log that we didn't find a bandwidth
+ // document that we expected to exist.
return "";
}
}
@@ -992,26 +991,14 @@ public class ResourceServlet extends HttpServlet {
return "";
}
fingerprint = fingerprint.substring(0, 40);
- File weightsFile = new File(this.outDir, "weights/" + fingerprint);
- StringBuilder sb = new StringBuilder();
- String weightsLines = null;
- if (weightsFile.exists()) {
- try {
- BufferedReader br = new BufferedReader(new FileReader(
- weightsFile));
- String line;
- while ((line = br.readLine()) != null) {
- sb.append(line + "\n");
- }
- br.close();
- weightsLines = sb.toString();
- } catch (IOException e) {
- }
- }
+ String weightsLines = this.documentStore.retrieve(
+ DocumentType.OUT_WEIGHTS, fingerprint);
if (weightsLines != null) {
weightsLines = weightsLines.substring(0, weightsLines.length() - 1);
return weightsLines;
} else {
+ // TODO We should probably log that we didn't find a weights
+ // document that we expected to exist.
return "";
}
}
diff --git a/src/org/torproject/onionoo/WeightsDataWriter.java b/src/org/torproject/onionoo/WeightsDataWriter.java
index 97e37ed..de9ad42 100644
--- a/src/org/torproject/onionoo/WeightsDataWriter.java
+++ b/src/org/torproject/onionoo/WeightsDataWriter.java
@@ -2,22 +2,16 @@
* See LICENSE for licensing information */
package org.torproject.onionoo;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Scanner;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TimeZone;
@@ -25,15 +19,22 @@ import java.util.TreeMap;
import java.util.TreeSet;
import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorReader;
-import org.torproject.descriptor.DescriptorSourceFactory;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
import org.torproject.descriptor.ServerDescriptor;
public class WeightsDataWriter {
+ private DescriptorSource descriptorSource;
+
+ private DocumentStore documentStore;
+
+ public WeightsDataWriter(DescriptorSource descriptorSource,
+ DocumentStore documentStore) {
+ this.descriptorSource = descriptorSource;
+ this.documentStore = documentStore;
+ }
+
private SortedSet<String> currentFingerprints = new TreeSet<String>();
public void setCurrentRelays(SortedMap<String, Node> currentRelays) {
this.currentFingerprints.addAll(currentRelays.keySet());
@@ -47,63 +48,41 @@ public class WeightsDataWriter {
private Map<String, Integer> advertisedBandwidths =
new HashMap<String, Integer>();
public void readRelayServerDescriptors() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File(
- "in/relay-descriptors/server-descriptors"));
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof ServerDescriptor) {
- ServerDescriptor serverDescriptor =
- (ServerDescriptor) descriptor;
- String digest = serverDescriptor.getServerDescriptorDigest().
- toUpperCase();
- int advertisedBandwidth = Math.min(Math.min(
- serverDescriptor.getBandwidthBurst(),
- serverDescriptor.getBandwidthObserved()),
- serverDescriptor.getBandwidthRate());
- this.advertisedBandwidths.put(digest, advertisedBandwidth);
- }
- }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.RELAY_SERVER_DESCRIPTORS);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof ServerDescriptor) {
+ ServerDescriptor serverDescriptor =
+ (ServerDescriptor) descriptor;
+ String digest = serverDescriptor.getServerDescriptorDigest().
+ toUpperCase();
+ int advertisedBandwidth = Math.min(Math.min(
+ serverDescriptor.getBandwidthBurst(),
+ serverDescriptor.getBandwidthObserved()),
+ serverDescriptor.getBandwidthRate());
+ this.advertisedBandwidths.put(digest, advertisedBandwidth);
}
}
}
public void readRelayNetworkConsensuses() {
- DescriptorReader reader =
- DescriptorSourceFactory.createDescriptorReader();
- reader.addDirectory(new File("in/relay-descriptors/consensuses"));
- reader.setExcludeFiles(new File(
- "status/weights-relay-consensus-history"));
- Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
- while (descriptorFiles.hasNext()) {
- DescriptorFile descriptorFile = descriptorFiles.next();
- if (descriptorFile.getException() != null) {
- System.out.println("Could not parse "
- + descriptorFile.getFileName());
- descriptorFile.getException().printStackTrace();
- }
- if (descriptorFile.getDescriptors() != null) {
- for (Descriptor descriptor : descriptorFile.getDescriptors()) {
- if (descriptor instanceof RelayNetworkStatusConsensus) {
- RelayNetworkStatusConsensus consensus =
- (RelayNetworkStatusConsensus) descriptor;
- long validAfterMillis = consensus.getValidAfterMillis(),
- freshUntilMillis = consensus.getFreshUntilMillis();
- SortedMap<String, double[]> pathSelectionWeights =
- this.calculatePathSelectionProbabilities(consensus);
- this.updateWeightsHistory(validAfterMillis, freshUntilMillis,
- pathSelectionWeights);
- }
- }
+ DescriptorQueue descriptorQueue =
+ this.descriptorSource.getDescriptorQueue(
+ DescriptorType.RELAY_CONSENSUSES,
+ DescriptorHistory.WEIGHTS_RELAY_CONSENSUS_HISTORY);
+ Descriptor descriptor;
+ while ((descriptor = descriptorQueue.nextDescriptor()) != null) {
+ if (descriptor instanceof RelayNetworkStatusConsensus) {
+ RelayNetworkStatusConsensus consensus =
+ (RelayNetworkStatusConsensus) descriptor;
+ long validAfterMillis = consensus.getValidAfterMillis(),
+ freshUntilMillis = consensus.getFreshUntilMillis();
+ SortedMap<String, double[]> pathSelectionWeights =
+ this.calculatePathSelectionProbabilities(consensus);
+ this.updateWeightsHistory(validAfterMillis, freshUntilMillis,
+ pathSelectionWeights);
}
}
}
@@ -286,24 +265,22 @@ public class WeightsDataWriter {
return a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;
}
});
- File historyFile = new File(String.format("status/weights/%s/%s/%s",
- fingerprint.substring(0, 1), fingerprint.substring(1, 2),
- fingerprint));
- if (historyFile.exists()) {
+ String historyString = this.documentStore.retrieve(
+ DocumentType.STATUS_WEIGHTS, fingerprint);
+ if (historyString != null) {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setLenient(false);
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
try {
- BufferedReader br = new BufferedReader(new FileReader(
- historyFile));
- String line;
- while ((line = br.readLine()) != null) {
+ Scanner s = new Scanner(historyString);
+ while (s.hasNextLine()) {
+ String line = s.nextLine();
String[] parts = line.split(" ");
if (parts.length != 9) {
- System.err.println("Illegal line '" + line + "' in history "
- + "file '" + historyFile.getAbsolutePath()
- + "'. Skipping this line.");
+ System.err.println("Illegal line '" + line + "' in weights "
+ + "history for fingerprint '" + fingerprint + "'. "
+ + "Skipping this line.");
continue;
}
long validAfterMillis = dateTimeFormat.parse(parts[0]
@@ -320,14 +297,12 @@ public class WeightsDataWriter {
Double.parseDouble(parts[8]) };
history.put(interval, weights);
}
- br.close();
+ s.close();
} catch (ParseException e) {
System.err.println("Could not parse timestamp while reading "
- + "history file '" + historyFile.getAbsolutePath()
- + "'. Skipping.");
- } catch (IOException e) {
- System.err.println("Could not read history file '"
- + historyFile.getAbsolutePath() + "'. Skipping.");
+ + "weights history for fingerprint '" + fingerprint + "'. "
+ + "Skipping.");
+ e.printStackTrace();
}
}
return history;
@@ -389,33 +364,25 @@ public class WeightsDataWriter {
private void writeHistoryToDisk(String fingerprint,
SortedMap<long[], double[]> history) {
- File historyFile = new File(String.format("status/weights/%s/%s/%s",
- fingerprint.substring(0, 1), fingerprint.substring(1, 2),
- fingerprint));
- try {
- SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- historyFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(historyFile));
- for (Map.Entry<long[], double[]> e : history.entrySet()) {
- long[] fresh = e.getKey();
- double[] weights = e.getValue();
- bw.write(dateTimeFormat.format(fresh[0]) + " "
- + dateTimeFormat.format(fresh[1]));
- for (double weight : weights) {
- bw.write(String.format(" %.12f", weight));
- }
- bw.write("\n");
+ StringBuilder sb = new StringBuilder();
+ SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
+ "yyyy-MM-dd HH:mm:ss");
+ dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ for (Map.Entry<long[], double[]> e : history.entrySet()) {
+ long[] fresh = e.getKey();
+ double[] weights = e.getValue();
+ sb.append(dateTimeFormat.format(fresh[0]) + " "
+ + dateTimeFormat.format(fresh[1]));
+ for (double weight : weights) {
+ sb.append(String.format(" %.12f", weight));
}
- bw.close();
- } catch (IOException e) {
- System.err.println("Could not write weights file '"
- + historyFile.getAbsolutePath() + "'. Skipping.");
+ sb.append("\n");
}
+ String historyString = sb.toString();
+ this.documentStore.store(historyString, DocumentType.STATUS_WEIGHTS,
+ fingerprint);
}
- private File weightsFileDirectory = new File("out/weights");
public void writeWeightsDataFiles() {
for (String fingerprint : this.currentFingerprints) {
SortedMap<long[], double[]> history =
@@ -427,17 +394,8 @@ public class WeightsDataWriter {
}
String historyString = this.formatHistoryString(fingerprint,
history);
- File weightsFile = new File(weightsFileDirectory, fingerprint);
- try {
- weightsFile.getParentFile().mkdirs();
- BufferedWriter bw = new BufferedWriter(new FileWriter(
- weightsFile));
- bw.write(historyString);
- bw.close();
- } catch (IOException e) {
- System.err.println("Could not write weights data file '"
- + weightsFile.getAbsolutePath() + "'. Skipping.");
- }
+ this.documentStore.store(historyString, DocumentType.OUT_WEIGHTS,
+ fingerprint);
}
}
@@ -591,23 +549,16 @@ public class WeightsDataWriter {
}
public void deleteObsoleteWeightsDataFiles() {
- SortedMap<String, File> obsoleteWeightsFiles =
- new TreeMap<String, File>();
- if (weightsFileDirectory.exists() &&
- weightsFileDirectory.isDirectory()) {
- for (File file : weightsFileDirectory.listFiles()) {
- if (file.getName().length() == 40) {
- obsoleteWeightsFiles.put(file.getName(), file);
- }
- }
- }
+ SortedSet<String> obsoleteWeightsFiles;
+ obsoleteWeightsFiles = this.documentStore.list(
+ DocumentType.OUT_WEIGHTS);
for (String fingerprint : this.currentFingerprints) {
- if (obsoleteWeightsFiles.containsKey(fingerprint)) {
+ if (obsoleteWeightsFiles.contains(fingerprint)) {
obsoleteWeightsFiles.remove(fingerprint);
}
}
- for (File weightsFile : obsoleteWeightsFiles.values()) {
- weightsFile.delete();
+ for (String fingerprint : obsoleteWeightsFiles) {
+ this.documentStore.remove(DocumentType.OUT_WEIGHTS, fingerprint);
}
}
}
diff --git a/test/org/torproject/onionoo/ResourceServletTest.java b/test/org/torproject/onionoo/ResourceServletTest.java
index e3e831c..5be2fba 100644
--- a/test/org/torproject/onionoo/ResourceServletTest.java
+++ b/test/org/torproject/onionoo/ResourceServletTest.java
@@ -155,6 +155,8 @@ public class ResourceServletTest {
}
}
+ /* TODO Instead of writing out/summary and out/update to a temp
+ * directory, we could also write our own DocumentStore instance. */
private void writeSummaryFile() throws IOException {
File summaryFile = new File(this.tempOutDir, "summary");
BufferedWriter bw = new BufferedWriter(new FileWriter(summaryFile));
@@ -165,7 +167,10 @@ public class ResourceServletTest {
bw.write(bridge + "\n");
}
bw.close();
- summaryFile.setLastModified(this.lastModified);
+ File updateFile = new File(this.tempOutDir, "update");
+ bw = new BufferedWriter(new FileWriter(updateFile));
+ bw.write(String.valueOf(this.lastModified));
+ bw.close();
}
private void makeRequest() throws IOException {
1
0

[metrics-web/master] Put last consensus valid-after time on relay-search page.
by karsten@torproject.org 16 Jun '13
by karsten@torproject.org 16 Jun '13
16 Jun '13
commit 30b1dd7344765ccda5c4043ad01d5420d707866a
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Sun Jun 16 19:16:04 2013 +0200
Put last consensus valid-after time on relay-search page.
Fixes #9073.
---
.../status/relaysearch/RelaySearchServlet.java | 27 ++++++++++++++++++++
1 file changed, 27 insertions(+)
diff --git a/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java b/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java
index 4e83bb5..cd5c4c1 100644
--- a/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java
+++ b/src/org/torproject/ernie/status/relaysearch/RelaySearchServlet.java
@@ -276,6 +276,28 @@ public class RelaySearchServlet extends HttpServlet {
return;
}
+ /* Look up last consensus in the database. */
+ long maxValidAfterMillis = -1L;
+ try {
+ long requestedConnection = System.currentTimeMillis();
+ Connection conn = this.ds.getConnection();
+ String query = "SELECT MAX(validafter) AS last FROM consensus";
+ Statement statement = conn.createStatement();
+ ResultSet rs = statement.executeQuery(query);
+ if (rs.next()) {
+ maxValidAfterMillis = rs.getTimestamp(1).getTime();
+ }
+ rs.close();
+ statement.close();
+ conn.close();
+ this.logger.info("Returned a database connection to the pool "
+ + "after " + (System.currentTimeMillis()
+ - requestedConnection) + " millis.");
+ } catch (SQLException e) {
+ this.logger.log(Level.WARNING, "Could not look up last consensus "
+ + "valid-after time in the database.", e);
+ }
+
/* Prepare a string that says what we're searching for. */
List<String> recognizedSearchTerms = new ArrayList<String>();
if (searchNickname.length() > 0) {
@@ -297,6 +319,11 @@ public class RelaySearchServlet extends HttpServlet {
recognizedIntervals.add("on <b>" + searchTerm + "</b>");
}
StringBuilder searchNoticeBuilder = new StringBuilder();
+ if (maxValidAfterMillis > 0L) {
+ searchNoticeBuilder.append("Most recent consensus in database is "
+ + "from " + dateTimeFormat.format(maxValidAfterMillis)
+ + ".</p><p>");
+ }
searchNoticeBuilder.append("Searching for relays with ");
if (recognizedSearchTerms.size() == 1) {
searchNoticeBuilder.append(recognizedSearchTerms.get(0));
1
0
commit 1c0022ce326261a5bcddaa8f3079e436faf84fef
Author: Anthony G. Basile <blueness(a)gentoo.org>
Date: Sun Jun 16 09:59:23 2013 -0400
Switch from sh to bash
---
build-arm.sh | 34 +++++++++++++++++-----------------
build-mips.sh | 40 ++++++++++++++++++++--------------------
build.sh | 50 +++++++++++++++++++++++++-------------------------
3 files changed, 62 insertions(+), 62 deletions(-)
diff --git a/build-arm.sh b/build-arm.sh
index 43aa9a7..20c37f6 100755
--- a/build-arm.sh
+++ b/build-arm.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
RELEASE=ar7161.testing
@@ -11,8 +11,8 @@ OPENSSH=openssh-6.1p1
set_start()
{
- [ "x$CLEAN" = "xyes" ] && rm -rf release
- [ "x$DEBUG" = "x" ] && unset DEBUG
+ [[ "x$CLEAN" = "xyes" ]] && rm -rf release
+ [[ "x$DEBUG" = "x" ]] && unset DEBUG
}
################################################################################
@@ -32,12 +32,12 @@ get_configs()
mkdir -p configs
cd configs
- if [ "x$DEBUG" = "xyes" ] ; then
- [ ! -f $BUSYBOX.debug.config ] && echo "Missing busybox config" && exit
+ if [[ "x$DEBUG" = "xyes" ]] ; then
+ [[ ! -f $BUSYBOX.debug.config ]] && echo "Missing busybox config" && exit
else
- [ ! -f $BUSYBOX.config ] && echo "Missing busybox config" && exit
+ [[ ! -f $BUSYBOX.config ]] && echo "Missing busybox config" && exit
fi
- [ ! -f setup ] && echo "Missing setup script" && exit
+ [[ ! -f setup ]] && echo "Missing setup script" && exit
}
################################################################################
@@ -48,10 +48,10 @@ get_sources()
mkdir -p sources
cd sources
- [ ! -f $BUSYBOX.tar.bz2 ] && wget http://www.busybox.net/downloads/$BUSYBOX.tar.bz2
- [ ! -f $TOR.tar.gz ] && wget http://www.torproject.org/dist/$TOR.tar.gz
- [ ! -f $NTPD.tar.gz ] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenNTPD/$NTPD.tar.gz
- [ ! -f $OPENSSH.tar.gz ] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/$OPENSSH.tar.gz
+ [[ ! -f $BUSYBOX.tar.bz2 ]] && wget http://www.busybox.net/downloads/$BUSYBOX.tar.bz2
+ [[ ! -f $TOR.tar.gz ]] && wget http://www.torproject.org/dist/$TOR.tar.gz
+ [[ ! -f $NTPD.tar.gz ]] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenNTPD/$NTPD.tar.gz
+ [[ ! -f $OPENSSH.tar.gz ]] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/$OPENSSH.tar.gz
}
################################################################################
@@ -59,11 +59,11 @@ get_sources()
build_busybox()
{
cd $WORKING
- [ -f $BUSYBOX/busybox ] && return 0
+ [[ -f $BUSYBOX/busybox ]] && return 0
tar jxvf $WORKING/../sources/$BUSYBOX.tar.bz2
cd $BUSYBOX
for i in $WORKING/../configs/busybox-*.patch; do patch -p 1 < $i ; done
- if [ "x$DEBUG" = "xyes" ] ; then
+ if [[ "x$DEBUG" = "xyes" ]] ; then
cp $WORKING/../configs/$BUSYBOX.debug.config .config
else
cp $WORKING/../configs/$BUSYBOX.config .config
@@ -77,7 +77,7 @@ build_busybox()
build_tor()
{
cd $WORKING
- [ -f $TOR/src/or/tor ] && return 0
+ [[ -f $TOR/src/or/tor ]] && return 0
tar zxvf $WORKING/../sources/$TOR.tar.gz
cd $TOR
for i in $WORKING/../configs/tor-*.patch; do patch -p 1 < $i ; done
@@ -91,7 +91,7 @@ build_tor()
build_ntpd()
{
cd $WORKING
- [ -f $NTPD/ntpd ] && return 0
+ [[ -f $NTPD/ntpd ]] && return 0
tar zxvf $WORKING/../sources/$NTPD.tar.gz
cd $NTPD
sed -i '/NTPD_USER/s:_ntp:ntp:' ntpd.h
@@ -105,7 +105,7 @@ build_ntpd()
build_scp()
{
cd $WORKING
- [ -f $OPENSSH/ssh -a -f $OPENSSH/scp ] && return 0
+ [[ -f $OPENSSH/ssh && -f $OPENSSH/scp ]] && return 0
tar zxvf $WORKING/../sources/$OPENSSH.tar.gz
cd $OPENSSH
./configure --prefix=
@@ -238,7 +238,7 @@ EOF
cat << EOF > group
root:x:0:
tor:x:500:
-ntp:x:500:
+ntp:x:501:
EOF
cat << EOF > gshadow
diff --git a/build-mips.sh b/build-mips.sh
index b28344b..274dbe4 100755
--- a/build-mips.sh
+++ b/build-mips.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
RELEASE=ar7161.testing
@@ -11,8 +11,8 @@ OPENSSH=openssh-6.1p1
set_start()
{
- [ "x$CLEAN" = "xyes" ] && rm -rf release
- [ "x$DEBUG" = "x" ] && unset DEBUG
+ [[ "x$CLEAN" = "xyes" ]] && rm -rf release
+ [[ "x$DEBUG" = "x" ]] && unset DEBUG
}
################################################################################
@@ -32,12 +32,12 @@ get_configs()
mkdir -p configs
cd configs
- if [ "x$DEBUG" = "xyes" ] ; then
- [ ! -f $BUSYBOX.debug.config ] && echo "Missing busybox config" && exit
+ if [[ "x$DEBUG" = "xyes" ]] ; then
+ [[ ! -f $BUSYBOX.debug.config ]] && echo "Missing busybox config" && exit
else
- [ ! -f $BUSYBOX.config ] && echo "Missing busybox config" && exit
+ [[ ! -f $BUSYBOX.config ]] && echo "Missing busybox config" && exit
fi
- [ ! -f setup ] && echo "Missing setup script" && exit
+ [[ ! -f setup ]] && echo "Missing setup script" && exit
}
################################################################################
@@ -48,10 +48,10 @@ get_sources()
mkdir -p sources
cd sources
- [ ! -f $BUSYBOX.tar.bz2 ] && wget http://www.busybox.net/downloads/$BUSYBOX.tar.bz2
- [ ! -f $TOR.tar.gz ] && wget http://www.torproject.org/dist/$TOR.tar.gz
- [ ! -f $NTPD.tar.gz ] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenNTPD/$NTPD.tar.gz
- [ ! -f $OPENSSH.tar.gz ] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/$OPENSSH.tar.gz
+ [[ ! -f $BUSYBOX.tar.bz2 ]] && wget http://www.busybox.net/downloads/$BUSYBOX.tar.bz2
+ [[ ! -f $TOR.tar.gz ]] && wget http://www.torproject.org/dist/$TOR.tar.gz
+ [[ ! -f $NTPD.tar.gz ]] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenNTPD/$NTPD.tar.gz
+ [[ ! -f $OPENSSH.tar.gz ]] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/$OPENSSH.tar.gz
}
################################################################################
@@ -59,11 +59,11 @@ get_sources()
build_busybox()
{
cd $WORKING
- [ -f $BUSYBOX/busybox ] && return 0
+ [[ -f $BUSYBOX/busybox ]] && return 0
tar jxvf $WORKING/../sources/$BUSYBOX.tar.bz2
cd $BUSYBOX
for i in $WORKING/../configs/busybox-*.patch; do patch -p 1 < $i ; done
- if [ "x$DEBUG" = "xyes" ] ; then
+ if [[ "x$DEBUG" = "xyes" ]] ; then
cp $WORKING/../configs/$BUSYBOX.debug.config .config
else
cp $WORKING/../configs/$BUSYBOX.config .config
@@ -77,7 +77,7 @@ build_busybox()
build_tor()
{
cd $WORKING
- [ -f $TOR/src/or/tor ] && return 0
+ [[ -f $TOR/src/or/tor ]] && return 0
tar zxvf $WORKING/../sources/$TOR.tar.gz
cd $TOR
for i in $WORKING/../configs/tor-*.patch; do patch -p 1 < $i ; done
@@ -91,7 +91,7 @@ build_tor()
build_ntpd()
{
cd $WORKING
- [ -f $NTPD/ntpd ] && return 0
+ [[ -f $NTPD/ntpd ]] && return 0
tar zxvf $WORKING/../sources/$NTPD.tar.gz
cd $NTPD
sed -i '/NTPD_USER/s:_ntp:ntp:' ntpd.h
@@ -105,7 +105,7 @@ build_ntpd()
build_scp()
{
cd $WORKING
- [ -f $OPENSSH/ssh -a -f $OPENSSH/scp ] && return 0
+ [[ -f $OPENSSH/ssh && -f $OPENSSH/scp ]] && return 0
tar zxvf $WORKING/../sources/$OPENSSH.tar.gz
cd $OPENSSH
./configure --prefix=
@@ -150,7 +150,7 @@ populate_bin()
get_needed()
{
- local A=$(readelf -a $1 | grep NEEDED | sed -e 's/^.*library://' -e 's/\[//' -e 's/\]//')
+ local A=$(readelf && $1 | grep NEEDED | sed -e 's/^.*library://' -e 's/\[[//' -e 's/\]]//')
echo $A
}
@@ -160,8 +160,8 @@ populate_lib()
for i in busybox ntpd ssh tor; do
A=$(get_needed ../bin/$i)
for j in $A ; do
- [ -e /lib/$j ] && cp -f /lib/$j .
- [ -e /usr/lib/$j ] && cp -f /usr/lib/$j .
+ [[ -e /lib/$j ]] && cp -f /lib/$j .
+ [[ -e /usr/lib/$j ]] && cp -f /usr/lib/$j .
done
done
@@ -247,7 +247,7 @@ EOF
cat << EOF > group
root:x:0:
tor:x:500:
-ntp:x:500:
+ntp:x:501:
EOF
cat << EOF > gshadow
diff --git a/build.sh b/build.sh
index c1128f5..7f2a2f3 100755
--- a/build.sh
+++ b/build.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
BUSYBOX=busybox-1.20.2
TOR=tor-0.2.3.25
@@ -13,23 +13,23 @@ PATCHES=hardened-patches-${KVERSION}-1.extras
set_start()
{
- [ "x$CLEAN" = "xyes" ] && rm -rf release
- [ "x$DEBUG" = "x" ] && unset DEBUG
+ [[ "x$CLEAN" = "xyes" ]] && rm -rf release
+ [[ "x$DEBUG" = "x" ]] && unset DEBUG
}
################################################################################
set_target()
{
- [ "x$TARGET" = "x" ] && TARGET="x86"
- [ "x$TARGET" != "xx86" -a "x$TARGET" != "xx86_64" ] && echo "Unknown ARCH" && exit
+ [[ "x$TARGET" = "x" ]] && TARGET="x86"
+ [[ "x$TARGET" != "xx86" && "x$TARGET" != "xx86_64" ]] && echo "Unknown ARCH" && exit
}
################################################################################
set_release()
{
- [ "x$RELEASE" = "x" ] && RELEASE="testing"
+ [[ "x$RELEASE" = "x" ]] && RELEASE="testing"
}
################################################################################
@@ -49,13 +49,13 @@ get_configs()
mkdir -p configs
cd configs
- if [ "x$DEBUG" = "xyes" ] ; then
- [ ! -f $BUSYBOX.debug.config ] && echo "Missing busybox config" && exit
+ if [[ "x$DEBUG" = "xyes" ]] ; then
+ [[ ! -f $BUSYBOX.debug.config ]] && echo "Missing busybox config" && exit
else
- [ ! -f $BUSYBOX.config ] && echo "Missing busybox config" && exit
+ [[ ! -f $BUSYBOX.config ]] && echo "Missing busybox config" && exit
fi
- [ ! -f setup ] && echo "Missing setup script" && exit
- [ ! -f kernel-$KVERSION.$TARGET.config ] && echo "Missing kernel config" && exit
+ [[ ! -f setup ]] && echo "Missing setup script" && exit
+ [[ ! -f kernel-$KVERSION.$TARGET.config ]] && echo "Missing kernel config" && exit
}
################################################################################
@@ -66,12 +66,12 @@ get_sources()
mkdir -p sources
cd sources
- [ ! -f $BUSYBOX.tar.bz2 ] && wget http://www.busybox.net/downloads/$BUSYBOX.tar.bz2
- [ ! -f $TOR.tar.gz ] && wget http://www.torproject.org/dist/$TOR.tar.gz
- [ ! -f $NTPD.tar.gz ] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenNTPD/$NTPD.tar.gz
- [ ! -f $LINUX.tar.bz2 ] && wget http://www.kernel.org/pub/linux/kernel/v3.x/$LINUX.tar.bz2
- [ ! -f $PATCHES.tar.bz2 ] && wget http://dev.gentoo.org/~blueness/hardened-sources/hardened-patches/$PATCHES.…
- [ ! -f $OPENSSH.tar.gz ] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/$OPENSSH.tar.gz
+ [[ ! -f $BUSYBOX.tar.bz2 ]] && wget http://www.busybox.net/downloads/$BUSYBOX.tar.bz2
+ [[ ! -f $TOR.tar.gz ]] && wget http://www.torproject.org/dist/$TOR.tar.gz
+ [[ ! -f $NTPD.tar.gz ]] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenNTPD/$NTPD.tar.gz
+ [[ ! -f $LINUX.tar.bz2 ]] && wget http://www.kernel.org/pub/linux/kernel/v3.x/$LINUX.tar.bz2
+ [[ ! -f $PATCHES.tar.bz2 ]] && wget http://dev.gentoo.org/~blueness/hardened-sources/hardened-patches/$PATCHES.…
+ [[ ! -f $OPENSSH.tar.gz ]] && wget ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/$OPENSSH.tar.gz
}
################################################################################
@@ -79,11 +79,11 @@ get_sources()
build_busybox()
{
cd $WORKING
- [ -f $BUSYBOX/busybox ] && return 0
+ [[ -f $BUSYBOX/busybox ]] && return 0
tar jxvf $WORKING/../sources/$BUSYBOX.tar.bz2
cd $BUSYBOX
for i in $WORKING/../configs/busybox-*.patch; do patch -p 1 < $i ; done
- if [ "x$DEBUG" = "xyes" ] ; then
+ if [[ "x$DEBUG" = "xyes" ]] ; then
cp $WORKING/../configs/$BUSYBOX.debug.config .config
else
cp $WORKING/../configs/$BUSYBOX.config .config
@@ -96,7 +96,7 @@ build_busybox()
build_tor()
{
cd $WORKING
- [ -f $TOR/src/or/tor ] && return 0
+ [[ -f $TOR/src/or/tor ]] && return 0
tar zxvf $WORKING/../sources/$TOR.tar.gz
cd $TOR
for i in $WORKING/../configs/tor-*.patch; do patch -p 1 < $i ; done
@@ -110,7 +110,7 @@ build_tor()
build_ntpd()
{
cd $WORKING
- [ -f $NTPD/ntpd ] && return 0
+ [[ -f $NTPD/ntpd ]] && return 0
tar zxvf $WORKING/../sources/$NTPD.tar.gz
cd $NTPD
sed -i '/NTPD_USER/s:_ntp:ntp:' ntpd.h
@@ -124,7 +124,7 @@ build_ntpd()
build_scp()
{
cd $WORKING
- [ -f $OPENSSH/ssh -a -f $OPENSSH/scp ] && return 0
+ [[ -f $OPENSSH/ssh && -f $OPENSSH/scp ]] && return 0
tar zxvf $WORKING/../sources/$OPENSSH.tar.gz
cd $OPENSSH
./configure --prefix=
@@ -191,7 +191,7 @@ cat << EOF > fstab
none /proc proc defaults 0 0
EOF
-if [ "x$DEBUG" = "xyes" ] ; then
+if [[ "x$DEBUG" = "xyes" ]] ; then
cat << EOF > inittab
::sysinit:/etc/rcS
tty1::respawn:/bin/setup
@@ -338,7 +338,7 @@ finish_initramfs()
compile_kernel()
{
cd $WORKING
- [ -f $LINUX/arch/$TARGET/boot/bzImage ] && return 0
+ [[ -f $LINUX/arch/$TARGET/boot/bzImage ]] && return 0
tar jxvf $WORKING/../sources/$LINUX.tar.bz2
tar jxvf $WORKING/../sources/$PATCHES.tar.bz2
cd $LINUX
@@ -371,7 +371,7 @@ EOF
mkisofs -R -b boot/grub/stage2_eltorito -no-emul-boot -boot-load-size 4 -boot-info-table -o tor.iso iso.tor
- if [ "x$DEBUG" = "xyes" ] ; then
+ if [[ "x$DEBUG" = "xyes" ]] ; then
mv tor.iso tor.uclibc.$TARGET.debug.$RELEASE.iso
md5sum tor.uclibc.$TARGET.debug.$RELEASE.iso > tor.uclibc.$TARGET.debug.$RELEASE.iso.md5
else
1
0