[tor-commits] [metrics-lib/master] Remove examples.

karsten at torproject.org karsten at torproject.org
Tue Jan 31 17:01:26 UTC 2012


commit e220928b10565450cb74a548ef4f2b8993b62dcb
Author: Karsten Loesing <karsten.loesing at gmx.net>
Date:   Tue Jan 31 11:22:07 2012 +0100

    Remove examples.
    
    Example were useful when starting to design the API.  Now that there are
    actual applications using the library, there's no need to main the
    examples anymore.
---
 TODO                                               |   16 ---
 .../descriptor/example/ConsensusHealthChecker.java |   88 --------------
 .../example/MetricsRelayDescriptorAggregator.java  |  124 --------------------
 .../example/TorStatusDatabaseUpdater.java          |   57 ---------
 4 files changed, 0 insertions(+), 285 deletions(-)

diff --git a/TODO b/TODO
index eac8965..f468893 100644
--- a/TODO
+++ b/TODO
@@ -12,19 +12,3 @@
     which descriptors to include, and it may be useful to exclude
     descriptors by publication date.
 
-- New example applications
-  - The metrics-web data importer could make use of this API, too.  It
-    would read relay descriptors, bridge descriptors, Torperf data files,
-    and GetTor stats files from disk.
-  - Another example application could be a relay descriptor database
-    application to support searching for relays or looking up relay
-    descriptors in general.  This application might read daily rsync'ed
-    relay descriptors and import them into a database.
-  - Another example application might be the Java version of ExoneraTor.
-    This application would index locally extracted relay descriptor
-    tarballs and parse only the relevant files.  This version of
-    ExoneraTor could also read exit lists to provide more accurate
-    results.
-  - Another example application might be the Java version of VisiTor which
-    reads locally extracted exit list tarballs.
-
diff --git a/src/org/torproject/descriptor/example/ConsensusHealthChecker.java b/src/org/torproject/descriptor/example/ConsensusHealthChecker.java
deleted file mode 100644
index b8cc12b..0000000
--- a/src/org/torproject/descriptor/example/ConsensusHealthChecker.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.descriptor.example;
-
-import java.util.Iterator;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorRequest;
-import org.torproject.descriptor.DescriptorSourceFactory;
-import org.torproject.descriptor.RelayDescriptorDownloader;
-import org.torproject.descriptor.RelayNetworkStatusConsensus;
-import org.torproject.descriptor.RelayNetworkStatusVote;
-
-/* This is a non-functional (though syntactically correct) example for how
- * the consensus-health checker could use the DescripTor API.  This class
- * will go away once the real consensus-health checker uses this API. */
-public class ConsensusHealthChecker {
-  public static void main(String[] args) {
-
-    /* Create an instance of the descriptor downloader that contains all
-     * the logic to download descriptors from the directory
-     * authorities. */
-    RelayDescriptorDownloader downloader =
-        DescriptorSourceFactory.createRelayDescriptorDownloader();
-
-    /* Make one example directory authority known to the downloader.  In
-     * the real consensus-health checker, all directory authorities would
-     * be added here.  (There is no list of current directory authorities
-     * in the DescripTor code, because it may change over time and not all
-     * DescripTor applications need to download descriptors from the
-     * directory authorities.) */
-    downloader.addDirectoryAuthority("gabelmoo", "212.112.245.170", 80);
-
-    /* Tell the descriptor that we're interested in downloading the
-     * current consensus from all directory authorities and all referenced
-     * votes.  With these two preferences set, the downloader will try to
-     * download the consensus from gabelmoo, parse it for referenced
-     * votes, and try to download all of them from gabelmoo, too. */
-    downloader.setIncludeCurrentConsensusFromAllDirectoryAuthorities();
-    downloader.setIncludeCurrentReferencedVotes();
-
-    /* Set connect and read timeouts of 1 minute each and a global timeout
-     * of 10 minutes to avoid being blocked forever by a slow download. */
-    downloader.setConnectTimeout(60L * 1000L);
-    downloader.setReadTimeout(60L * 1000L);
-    downloader.setGlobalTimeout(10L * 60L * 1000L);
-
-    /* Run the previously configured downloads and iterate over the
-     * received descriptors.  Don't process them right now, but add them
-     * to the checker class one by one and do the checking once all
-     * downloads are complete. */
-    Iterator<DescriptorRequest> descriptorRequests =
-        downloader.downloadDescriptors();
-    while (descriptorRequests.hasNext()) {
-      DescriptorRequest request = descriptorRequests.next();
-      String authority = request.getDirectoryNickname();
-      long fetchTime = request.getRequestEnd()
-          - request.getRequestStart();
-      if (request.globalTimeoutHasExpired()) {
-        System.err.println("The global timeout for downloading "
-            + "descriptors has expired.  That means we're missing one or "
-            + "more consensuses and/or votes and cannot make a good "
-            + "statement about the consensus health.  Exiting.");
-        return;
-      } else if (request.connectTimeoutHasExpired() ||
-          request.readTimeoutHasExpired()) {
-        System.out.println("The request to directory authority "
-            + request.getDirectoryNickname() + " to download the "
-            + "descriptor(s) at " + request.getRequestUrl() + " has "
-            + "timed out.");
-      } else {
-        for (Descriptor downloadedDescriptor : request.getDescriptors()) {
-          if (downloadedDescriptor instanceof
-              RelayNetworkStatusConsensus) {
-            /* Remember that we downloaded a consensus from authority and
-             * took fetchTime millis to do so. */
-          } else if (downloadedDescriptor instanceof
-              RelayNetworkStatusVote) {
-            /* Remember that we downloaded a vote. */
-          } else {
-            System.err.println("Did not expect a descriptor of type "
-                + downloadedDescriptor.getClass() + ".  Ignoring.");
-          }
-        }
-      }
-    }
-  }
-}
-
diff --git a/src/org/torproject/descriptor/example/MetricsRelayDescriptorAggregator.java b/src/org/torproject/descriptor/example/MetricsRelayDescriptorAggregator.java
deleted file mode 100644
index bfaaa22..0000000
--- a/src/org/torproject/descriptor/example/MetricsRelayDescriptorAggregator.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.descriptor.example;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorRequest;
-import org.torproject.descriptor.DescriptorSourceFactory;
-import org.torproject.descriptor.RelayDescriptorDownloader;
-import org.torproject.descriptor.RelayDescriptorReader;
-
-/* This is a non-functional (though syntactically correct) example for how
- * metrics-db could use the DescripTor API to read relay descriptors from
- * two sources and download only missing descriptors from the directory
- * authorities.  metrics-db does more than aggregating relay descriptors,
- * but the other functions (sanitizing bridge descriptors, downloading
- * GetTor statistics) are too specific to metrics-db to add them to the
- * DescripTor API.  This class will go away once a real metrics-db uses
- * this API. */
-public class MetricsRelayDescriptorAggregator {
-  public static void main(String[] args) {
-
-    /* Start by reading lists of previously processed descriptors from
-     * disk.  We'll want to exclude these descriptors, plus any that we
-     * learn in this execution, from the descriptors we download from the
-     * directory authorities.  We should remove descriptors that were
-     * published more than one week ago from the list, because they
-     * wouldn't be referenced in a consensus anyway. */
-    long lastKnownConsensusValidAfterTime = 1234567890000L;
-    Map<String, Long> lastKnownVoteValidAfterTimes =
-        new HashMap<String, Long>();
-    lastKnownVoteValidAfterTimes.put(
-        "1234567890ABCDEF1234567890ABCDEF12345678", 1234567890000L);
-    Map<String, Long> knownServerDescriptorIdentifiers =
-        new HashMap<String, Long>();
-    Map<String, Long> knownExtraInfoDescriptorIdentifiers =
-        new HashMap<String, Long>();
-
-    /* Create a relay descriptor reader to read descriptors from cached
-     * descriptor files in a local Tor data directory. */
-    RelayDescriptorReader reader =
-        DescriptorSourceFactory.createRelayDescriptorReader();
-
-    /* Tell the reader where to find relay descriptor files to parse.  In
-     * this case it's a Tor data directory with cached descriptor
-     * files. */
-    reader.addDirectory(new File("tor-data-dir"));
-
-    /* Exclude cached descriptor files that haven't changed since we last
-     * ran this application. */
-    //reader.setExcludeFile(new File("tor-data-dir/cached-descriptors"),
-    //    1234567890000L);
-
-    /* Read descriptors and process them. */
-    Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
-    while (descriptorFiles.hasNext()) {
-      DescriptorFile descriptorFile = descriptorFiles.next();
-      /* Do something with the read descriptors. */
-    }
-
-    /* Remember which descriptors we just processed to exclude them from
-     * the download.  This code is independent of the API and therefore
-     * not shown here. */
-
-    /* Do the same operations as shown above for other local directories
-     * containing relay descriptors.  For example, metrics-db rsyncs the
-     * directory-archive script output from tor26 once per day and imports
-     * them, too.  The operations are very similar.  We should use a new
-     * RelayDescriptorReader for every directory. */
-
-    /* Download missing descriptors from the directory authorities.
-     * Create an instance of the descriptor downloader that contains the
-     * logic to download descriptors from the directory authorities. */
-    RelayDescriptorDownloader downloader =
-        DescriptorSourceFactory.createRelayDescriptorDownloader();
-
-    /* Make one or more directory authorities or directory mirrors known
-     * to the downloader. */
-    downloader.addDirectoryAuthority("gabelmoo", "212.112.245.170", 80);
-
-    /* Tell the descriptor that we're interested in downloading pretty
-     * much every descriptor type there is. */
-    downloader.setIncludeCurrentConsensus();
-    downloader.setIncludeCurrentReferencedVotes();
-    downloader.setIncludeReferencedServerDescriptors();
-    downloader.setIncludeReferencedExtraInfoDescriptors();
-
-    /* Exclude the descriptors that we already know.  This is vital to
-     * avoid putting too much load on the directories.  (Excluding the
-     * consensus and votes if they have been processed before is not shown
-     * here, because it requires some timestamp parsing; using the API for
-     * this should be trivial, though.) */
-    downloader.setExcludeServerDescriptors(
-        knownServerDescriptorIdentifiers.keySet());
-    downloader.setExcludeExtraInfoDescriptors(
-        knownExtraInfoDescriptorIdentifiers.keySet());
-
-    /* Set connect and read timeouts of 2 minutes each and a global
-     * timeout of 1 hour to avoid being blocked forever by a slow
-     * download, but also to avoid giving up too quickly. */
-    downloader.setConnectTimeout(2L * 60L * 1000L);
-    downloader.setReadTimeout(2L * 60L * 1000L);
-    downloader.setGlobalTimeout(60L * 60L * 1000L);
-
-    /* Download descriptors and process them. */
-    Iterator<DescriptorRequest> descriptorRequests =
-        downloader.downloadDescriptors();
-    while (descriptorRequests.hasNext()) {
-      DescriptorRequest descriptorRequest = descriptorRequests.next();
-      /* Do something with the requests. */
-    }
-
-    /* Write the list of processed descriptors to disk, so that we don't
-     * download them in the next execution.  This code is independent of
-     * the API and therefore not shown here. */
-
-    /* That's it.  We're done. */
-  }
-}
-
diff --git a/src/org/torproject/descriptor/example/TorStatusDatabaseUpdater.java b/src/org/torproject/descriptor/example/TorStatusDatabaseUpdater.java
deleted file mode 100644
index f3ee341..0000000
--- a/src/org/torproject/descriptor/example/TorStatusDatabaseUpdater.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/* Copyright 2011, 2012 The Tor Project
- * See LICENSE for licensing information */
-package org.torproject.descriptor.example;
-
-import java.io.File;
-import java.util.Iterator;
-import org.torproject.descriptor.Descriptor;
-import org.torproject.descriptor.DescriptorFile;
-import org.torproject.descriptor.DescriptorSourceFactory;
-import org.torproject.descriptor.RelayDescriptorReader;
-import org.torproject.descriptor.RelayNetworkStatusConsensus;
-import org.torproject.descriptor.RelayNetworkStatusVote;
-
-/* This is a non-functional (though syntactically correct) example for how
- * a TorStatus application could use the DescripTor API to read the cached
- * descriptors from a local Tor data directory and update its database.
- * This class will go away once a real TorStatus application uses this
- * API. */
-public class TorStatusDatabaseUpdater {
-  public static void main(String[] args) {
-
-    /* Create an instance of the descriptor reader that implements the
-     * logic to index and parse descriptor files from a local directory,
-     * including the logic to ignore files that have been parsed in a
-     * previous run. */
-    RelayDescriptorReader reader =
-        DescriptorSourceFactory.createRelayDescriptorReader();
-
-    /* Tell the reader where to find relay descriptor files to parse.  In
-     * this case it's a Tor data directory with cached descriptor
-     * files. */
-    reader.addDirectory(new File("tor-data-dir"));
-
-    /* Exclude cached descriptor files that haven't changed since we last
-     * ran this application.  This may save some execution time.  The
-     * application needs to store the information when files were last
-     * modified, because the API is supposed to be stateless. */
-    //reader.setExcludeFile(new File("tor-data-dir/cached-descriptors"),
-    //    1234567890000L);
-
-    /* Read all descriptors in the given directory and import them into
-     * the database.  Also go through the list of parsed files and store
-     * their last modification times, so that we can exclude them the next
-     * time if they haven't changed. */
-    Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors();
-    while (descriptorFiles.hasNext()) {
-      DescriptorFile descriptorFile = descriptorFiles.next();
-      for (Descriptor readDescriptor : descriptorFile.getDescriptors()) {
-        /* Do something with the parsed descriptor. */
-      }
-      String fileName = descriptorFile.getFile().getName();
-      long lastModified = descriptorFile.getLastModified();
-      /* Do something with the file name and last modification time. */
-    }
-  }
-}
-





More information about the tor-commits mailing list