[tor-commits] [collector/master] Implements task-19015, switch from jul to slf4j and logback.

karsten at torproject.org karsten at torproject.org
Mon Jun 6 20:44:22 UTC 2016


commit e89f50ec0e0bbcddc295456ee9e83b4ec7c30db0
Author: iwakeh <iwakeh at torproject.org>
Date:   Fri Jun 3 15:10:38 2016 +0200

    Implements task-19015, switch from jul to slf4j and logback.
---
 build.xml                                          |  10 +-
 src/main/java/org/torproject/collector/Main.java   |  14 ++-
 .../bridgedescs/BridgeDescriptorParser.java        |  10 +-
 .../bridgedescs/BridgeSnapshotReader.java          |  26 ++---
 .../bridgedescs/SanitizedBridgesWriter.java        | 124 ++++++++++----------
 .../torproject/collector/conf/Configuration.java   |   2 -
 .../collector/exitlists/ExitListDownloader.java    |  28 ++---
 .../collector/index/CreateIndexJson.java           |   3 +
 .../org/torproject/collector/main/LockFile.java    |  14 ++-
 .../collector/relaydescs/ArchiveReader.java        |  27 ++---
 .../collector/relaydescs/ArchiveWriter.java        |  45 ++++----
 .../relaydescs/CachedRelayDescriptorReader.java    |  24 ++--
 .../collector/relaydescs/ReferenceChecker.java     |  15 +--
 .../relaydescs/RelayDescriptorDownloader.java      |  43 ++++---
 .../relaydescs/RelayDescriptorParser.java          |  17 +--
 .../collector/torperf/TorperfDownloader.java       |  65 +++++------
 src/main/resources/logback.xml                     | 126 +++++++++++++++++++++
 17 files changed, 370 insertions(+), 223 deletions(-)

diff --git a/build.xml b/build.xml
index 8e46584..ffb1fca 100644
--- a/build.xml
+++ b/build.xml
@@ -25,6 +25,9 @@
       <include name="gson-2.2.4.jar"/>
       <include name="xz-1.5.jar"/>
       <include name="descriptor-${descriptorversion}.jar"/>
+      <include name="logback-core-1.1.2.jar" />
+      <include name="logback-classic-1.1.2.jar" />
+      <include name="slf4j-api-1.7.7.jar" />
   </patternset>
   <path id="classpath">
     <pathelement path="${classes}"/>
@@ -134,7 +137,10 @@
     <jar destfile="${jarfile}"
          basedir="${classes}">
       <fileset dir="${classes}"/>
-      <fileset dir="${resources}" includes="collector.properties"/>
+      <fileset dir="${resources}" >
+        <include name="collector.properties"/>
+        <include name="logback.xml"/>
+      </fileset>
       <zipgroupfileset dir="${libs}" >
         <patternset refid="runtime" />
       </zipgroupfileset>
@@ -200,6 +206,7 @@
       <!-- The following jvmargs prevent test access to the network. -->
       <jvmarg value="-Djava.security.policy=${testresources}/junittest.policy"/>
       <jvmarg value="-Djava.security.manager"/>
+      <jvmarg value="-DLOGBASE=${generated}/testcoverage-logs"/>
       <classpath refid="cobertura.test.classpath" />
       <formatter type="xml" />
       <batchtest toDir="${testresult}" >
@@ -220,6 +227,7 @@
       <!-- The following jvmargs prevent test access to the network. -->
       <jvmarg value="-Djava.security.policy=${testresources}/junittest.policy"/>
       <jvmarg value="-Djava.security.manager"/>
+      <jvmarg value="-DLOGBASE=${generated}/test-logs"/>
       <classpath refid="test.classpath"/>
       <formatter type="plain" usefile="false"/>
       <batchtest>
diff --git a/src/main/java/org/torproject/collector/Main.java b/src/main/java/org/torproject/collector/Main.java
index d21cfb6..34bb95b 100644
--- a/src/main/java/org/torproject/collector/Main.java
+++ b/src/main/java/org/torproject/collector/Main.java
@@ -10,6 +10,9 @@ import org.torproject.collector.index.CreateIndexJson;
 import org.torproject.collector.relaydescs.ArchiveWriter;
 import org.torproject.collector.torperf.TorperfDownloader;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
@@ -19,7 +22,6 @@ import java.nio.file.Paths;
 import java.nio.file.StandardCopyOption;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.logging.Logger;
 
 /**
  * Main class for starting a CollecTor instance.
@@ -30,7 +32,7 @@ import java.util.logging.Logger;
  */
 public class Main {
 
-  private static Logger log = Logger.getLogger(Main.class.getName());
+  private static Logger log = LoggerFactory.getLogger(Main.class);
   public static final String CONF_FILE = "collector.properties";
 
   /** All possible main classes.
@@ -91,7 +93,7 @@ public class Main {
           + ") and provide at least one data source and one data sink. "
           + "Refer to the manual for more information.");
     } catch (IOException e) {
-      log.severe("Cannot write default configuration. Reason: " + e);
+      log.error("Cannot write default configuration. Reason: " + e, e);
     }
   }
 
@@ -99,7 +101,7 @@ public class Main {
     try (FileInputStream fis = new FileInputStream(confFile)) {
       conf.load(fis);
     } catch (Exception e) { // catch all possible problems
-      log.severe("Cannot read configuration. Reason: " + e);
+      log.error("Cannot read configuration. Reason: " + e, e);
       throw e;
     }
   }
@@ -118,8 +120,8 @@ public class Main {
           .invoke(null, (Object) conf);
     } catch (NoSuchMethodException | IllegalAccessException
        | InvocationTargetException e) {
-      log.severe("Cannot invoke 'main' method on "
-          + clazz.getName() + ". " + e);
+      log.error("Cannot invoke 'main' method on "
+          + clazz.getName() + ". " + e, e);
     }
   }
 }
diff --git a/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java b/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java
index f683ea0..94d554f 100644
--- a/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java
+++ b/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java
@@ -3,11 +3,12 @@
 
 package org.torproject.collector.bridgedescs;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.StringReader;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 public class BridgeDescriptorParser {
 
@@ -18,7 +19,7 @@ public class BridgeDescriptorParser {
   public BridgeDescriptorParser(SanitizedBridgesWriter sbw) {
     this.sbw = sbw;
     this.logger =
-        Logger.getLogger(BridgeDescriptorParser.class.getName());
+        LoggerFactory.getLogger(BridgeDescriptorParser.class);
   }
 
   public void parse(byte[] allData, String dateTime) {
@@ -42,8 +43,7 @@ public class BridgeDescriptorParser {
         }
       }
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not parse bridge descriptor.",
-          e);
+      this.logger.warn("Could not parse bridge descriptor.", e);
       return;
     }
   }
diff --git a/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java b/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java
index 2d41d18..b1aacec 100644
--- a/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java
+++ b/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java
@@ -8,6 +8,9 @@ import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
 import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -23,8 +26,6 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.Stack;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * Reads the half-hourly snapshots of bridge descriptors from Tonga.
@@ -38,15 +39,14 @@ public class BridgeSnapshotReader {
       throw new IllegalArgumentException();
     }
 
-    Logger logger =
-        Logger.getLogger(BridgeSnapshotReader.class.getName());
+    Logger logger = LoggerFactory.getLogger(BridgeSnapshotReader.class);
     SortedSet<String> parsed = new TreeSet<String>();
     File bdDir = bridgeDirectoriesDir;
     File pbdFile = new File(statsDirectory, "parsed-bridge-directories");
     boolean modified = false;
     if (bdDir.exists()) {
       if (pbdFile.exists()) {
-        logger.fine("Reading file " + pbdFile.getAbsolutePath() + "...");
+        logger.debug("Reading file " + pbdFile.getAbsolutePath() + "...");
         try {
           BufferedReader br = new BufferedReader(new FileReader(pbdFile));
           String line = null;
@@ -54,15 +54,15 @@ public class BridgeSnapshotReader {
             parsed.add(line);
           }
           br.close();
-          logger.fine("Finished reading file "
+          logger.debug("Finished reading file "
               + pbdFile.getAbsolutePath() + ".");
         } catch (IOException e) {
-          logger.log(Level.WARNING, "Failed reading file "
+          logger.warn("Failed reading file "
               + pbdFile.getAbsolutePath() + "!", e);
           return;
         }
       }
-      logger.fine("Importing files in directory " + bridgeDirectoriesDir
+      logger.debug("Importing files in directory " + bridgeDirectoriesDir
           + "/...");
       Set<String> descriptorImportHistory = new HashSet<String>();
       int parsedFiles = 0;
@@ -192,13 +192,13 @@ public class BridgeSnapshotReader {
             parsed.add(pop.getName());
             modified = true;
           } catch (IOException e) {
-            logger.log(Level.WARNING, "Could not parse bridge snapshot "
+            logger.warn("Could not parse bridge snapshot "
                 + pop.getName() + "!", e);
             continue;
           }
         }
       }
-      logger.fine("Finished importing files in directory "
+      logger.debug("Finished importing files in directory "
           + bridgeDirectoriesDir + "/.  In total, we parsed "
           + parsedFiles + " files (skipped " + skippedFiles
           + ") containing " + parsedStatuses + " statuses, "
@@ -207,7 +207,7 @@ public class BridgeSnapshotReader {
           + parsedExtraInfoDescriptors + " extra-info descriptors "
           + "(skipped " + skippedExtraInfoDescriptors + ").");
       if (!parsed.isEmpty() && modified) {
-        logger.fine("Writing file " + pbdFile.getAbsolutePath() + "...");
+        logger.debug("Writing file " + pbdFile.getAbsolutePath() + "...");
         try {
           pbdFile.getParentFile().mkdirs();
           BufferedWriter bw = new BufferedWriter(new FileWriter(pbdFile));
@@ -215,10 +215,10 @@ public class BridgeSnapshotReader {
             bw.append(f + "\n");
           }
           bw.close();
-          logger.fine("Finished writing file " + pbdFile.getAbsolutePath()
+          logger.debug("Finished writing file " + pbdFile.getAbsolutePath()
               + ".");
         } catch (IOException e) {
-          logger.log(Level.WARNING, "Failed writing file "
+          logger.warn("Failed writing file "
               + pbdFile.getAbsolutePath() + "!", e);
         }
       }
diff --git a/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java b/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java
index fa24a3d..e483353 100644
--- a/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java
+++ b/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java
@@ -13,6 +13,9 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.codec.digest.DigestUtils;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
 import java.io.File;
@@ -33,8 +36,6 @@ import java.util.SortedMap;
 import java.util.Stack;
 import java.util.TimeZone;
 import java.util.TreeMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * <p>Sanitizes bridge descriptors, i.e., removes all possibly sensitive
@@ -51,11 +52,10 @@ import java.util.logging.Logger;
  */
 public class SanitizedBridgesWriter extends Thread {
 
-  private static Logger logger;
+  private static Logger logger = LoggerFactory.getLogger(SanitizedBridgesWriter.class);
 
   public static void main(Configuration config) throws ConfigurationException {
 
-    logger = Logger.getLogger(SanitizedBridgesWriter.class.getName());
     logger.info("Starting bridge-descriptors module of CollecTor.");
 
     // Use lock file to avoid overlapping runs
@@ -108,7 +108,7 @@ public class SanitizedBridgesWriter extends Thread {
     try {
       startProcessing();
     } catch (ConfigurationException ce) {
-      logger.severe("Configuration failed: " + ce);
+      logger.error("Configuration failed: " + ce, ce);
       throw new RuntimeException(ce);
     }
   }
@@ -135,10 +135,6 @@ public class SanitizedBridgesWriter extends Thread {
     this.sanitizedBridgesDirectory = sanitizedBridgesDirectory;
     this.replaceIPAddressesWithHashes = replaceIPAddressesWithHashes;
 
-    /* Initialize logger. */
-    this.logger = Logger.getLogger(
-        SanitizedBridgesWriter.class.getName());
-
     SimpleDateFormat rsyncCatFormat = new SimpleDateFormat(
         "yyyy-MM-dd-HH-mm-ss");
     rsyncCatFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
@@ -150,7 +146,7 @@ public class SanitizedBridgesWriter extends Thread {
       try {
         this.secureRandom = SecureRandom.getInstance("SHA1PRNG", "SUN");
       } catch (GeneralSecurityException e) {
-        this.logger.log(Level.WARNING, "Could not initialize secure "
+        this.logger.warn("Could not initialize secure "
             + "random number generator! Not calculating any IP address "
             + "hashes in this execution!", e);
         this.persistenceProblemWithSecrets = true;
@@ -172,7 +168,7 @@ public class SanitizedBridgesWriter extends Thread {
           if ((line.length() != ("yyyy-MM,".length() + 31 * 2)
               && line.length() != ("yyyy-MM,".length() + 50 * 2))
               || parts.length != 2) {
-            this.logger.warning("Invalid line in bridge-ip-secrets file "
+            this.logger.warn("Invalid line in bridge-ip-secrets file "
                 + "starting with '" + line.substring(0, 7) + "'! "
                 + "Not calculating any IP address hashes in this "
                 + "execution!");
@@ -185,17 +181,17 @@ public class SanitizedBridgesWriter extends Thread {
         }
         br.close();
         if (!this.persistenceProblemWithSecrets) {
-          this.logger.fine("Read "
+          this.logger.debug("Read "
               + this.secretsForHashingIPAddresses.size() + " secrets for "
               + "hashing bridge IP addresses.");
         }
       } catch (DecoderException e) {
-        this.logger.log(Level.WARNING, "Failed to decode hex string in "
+        this.logger.warn("Failed to decode hex string in "
             + this.bridgeIpSecretsFile + "! Not calculating any IP "
             + "address hashes in this execution!", e);
         this.persistenceProblemWithSecrets = true;
       } catch (IOException e) {
-        this.logger.log(Level.WARNING, "Failed to read "
+        this.logger.warn("Failed to read "
             + this.bridgeIpSecretsFile + "! Not calculating any IP "
             + "address hashes in this execution!", e);
         this.persistenceProblemWithSecrets = true;
@@ -374,7 +370,7 @@ public class SanitizedBridgesWriter extends Thread {
       }
       if (month.compareTo(
           this.bridgeSanitizingCutOffTimestamp) < 0) {
-        this.logger.warning("Generated a secret that we won't make "
+        this.logger.warn("Generated a secret that we won't make "
             + "persistent, because it's outside our bridge descriptor "
             + "sanitizing interval.");
       } else {
@@ -390,7 +386,7 @@ public class SanitizedBridgesWriter extends Thread {
           bw.write(month + "," + Hex.encodeHexString(secret) + "\n");
           bw.close();
         } catch (IOException e) {
-          this.logger.log(Level.WARNING, "Could not store new secret "
+          this.logger.warn("Could not store new secret "
               + "to disk! Not calculating any IP address hashes in "
               + "this execution!", e);
           this.persistenceProblemWithSecrets = true;
@@ -422,11 +418,15 @@ public class SanitizedBridgesWriter extends Thread {
 
     if (this.bridgeSanitizingCutOffTimestamp
         .compareTo(publicationTime) > 0) {
-      this.logger.log(!this.haveWarnedAboutInterval ? Level.WARNING
-          : Level.FINE, "Sanitizing and storing network status with "
+      String text =  "Sanitizing and storing network status with "
           + "publication time outside our descriptor sanitizing "
-          + "interval.");
-      this.haveWarnedAboutInterval = true;
+          + "interval.";
+      if (this.haveWarnedAboutInterval) {
+        this.logger.debug(text);
+      } else {
+        this.logger.warn(text);
+        this.haveWarnedAboutInterval = true;
+      }
     }
 
     /* Parse the given network status line by line. */
@@ -510,7 +510,7 @@ public class SanitizedBridgesWriter extends Thread {
           if (scrubbedOrAddress != null) {
             scrubbed.append("a " + scrubbedOrAddress + "\n");
           } else {
-            this.logger.warning("Invalid address in line '" + line
+            this.logger.warn("Invalid address in line '" + line
                 + "' in bridge network status.  Skipping line!");
           }
 
@@ -524,7 +524,7 @@ public class SanitizedBridgesWriter extends Thread {
          * network status.  If there is, we should probably learn before
          * writing anything to the sanitized descriptors. */
         } else {
-          this.logger.fine("Unknown line '" + line + "' in bridge "
+          this.logger.debug("Unknown line '" + line + "' in bridge "
               + "network status. Not writing to disk!");
           return;
         }
@@ -544,18 +544,18 @@ public class SanitizedBridgesWriter extends Thread {
       if (formatter.parse(publicationTime).getTime()
           - formatter.parse(mostRecentDescPublished).getTime()
           > 60L * 60L * 1000L) {
-        this.logger.warning("The most recent descriptor in the bridge "
+        this.logger.warn("The most recent descriptor in the bridge "
             + "network status published at " + publicationTime + " was "
             + "published at " + mostRecentDescPublished + " which is "
             + "more than 1 hour before the status. This is a sign for "
             + "the status being stale. Please check!");
       }
     } catch (ParseException e) {
-      this.logger.log(Level.WARNING, "Could not parse timestamp in "
+      this.logger.warn("Could not parse timestamp in "
           + "bridge network status.", e);
       return;
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not parse bridge network "
+      this.logger.warn("Could not parse bridge network "
           + "status.", e);
       return;
     }
@@ -589,7 +589,7 @@ public class SanitizedBridgesWriter extends Thread {
         bw.close();
       }
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not write sanitized bridge "
+      this.logger.warn("Could not write sanitized bridge "
           + "network status to disk.", e);
       return;
     }
@@ -656,11 +656,15 @@ public class SanitizedBridgesWriter extends Thread {
           }
           if (this.bridgeSanitizingCutOffTimestamp
               .compareTo(published) > 0) {
-            this.logger.log(!this.haveWarnedAboutInterval
-                ? Level.WARNING : Level.FINE, "Sanitizing and storing "
+            String text = "Sanitizing and storing "
                 + "server descriptor with publication time outside our "
-                + "descriptor sanitizing interval.");
-            this.haveWarnedAboutInterval = true;
+                + "descriptor sanitizing interval.";
+            if (this.haveWarnedAboutInterval) {
+              this.logger.debug(text);
+            } else {
+              this.logger.warn(text);
+              this.haveWarnedAboutInterval = true;
+            }
           }
           scrubbed.append(line + "\n");
 
@@ -686,7 +690,7 @@ public class SanitizedBridgesWriter extends Thread {
                 if (scrubbedOrAddress != null) {
                   scrubbedOrAddresses.add(scrubbedOrAddress);
                 } else {
-                  this.logger.warning("Invalid address in line "
+                  this.logger.warn("Invalid address in line "
                       + "'or-address " + orAddress + "' in bridge server "
                       + "descriptor.  Skipping line!");
                 }
@@ -776,7 +780,7 @@ public class SanitizedBridgesWriter extends Thread {
               + "\n");
           if (masterKeyEd25519 != null && !masterKeyEd25519.equals(
               masterKeyEd25519FromIdentityEd25519)) {
-            this.logger.warning("Mismatch between identity-ed25519 and "
+            this.logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -787,7 +791,7 @@ public class SanitizedBridgesWriter extends Thread {
           if (masterKeyEd25519FromIdentityEd25519 != null
               && !masterKeyEd25519FromIdentityEd25519.equals(
               masterKeyEd25519)) {
-            this.logger.warning("Mismatch between identity-ed25519 and "
+            this.logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -854,14 +858,14 @@ public class SanitizedBridgesWriter extends Thread {
          * that we need to remove or replace for the sanitized descriptor
          * version. */
         } else {
-          this.logger.warning("Unrecognized line '" + line
+          this.logger.warn("Unrecognized line '" + line
               + "'. Skipping.");
           return;
         }
       }
       br.close();
     } catch (Exception e) {
-      this.logger.log(Level.WARNING, "Could not parse server "
+      this.logger.warn("Could not parse server "
           + "descriptor.", e);
       return;
     }
@@ -883,7 +887,7 @@ public class SanitizedBridgesWriter extends Thread {
       /* Handle below. */
     }
     if (descriptorDigest == null) {
-      this.logger.log(Level.WARNING, "Could not calculate server "
+      this.logger.warn("Could not calculate server "
           + "descriptor digest.");
       return;
     }
@@ -906,7 +910,7 @@ public class SanitizedBridgesWriter extends Thread {
         /* Handle below. */
       }
       if (descriptorDigestSha256Base64 == null) {
-        this.logger.log(Level.WARNING, "Could not calculate server "
+        this.logger.warn("Could not calculate server "
             + "descriptor SHA256 digest.");
         return;
       }
@@ -947,7 +951,7 @@ public class SanitizedBridgesWriter extends Thread {
         bw.close();
       }
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not write sanitized server "
+      this.logger.warn("Could not write sanitized server "
           + "descriptor to disk.", e);
       return;
     }
@@ -957,26 +961,26 @@ public class SanitizedBridgesWriter extends Thread {
       String identityEd25519Base64) {
     byte[] identityEd25519 = Base64.decodeBase64(identityEd25519Base64);
     if (identityEd25519.length < 40) {
-      this.logger.warning("Invalid length of identity-ed25519 (in "
+      this.logger.warn("Invalid length of identity-ed25519 (in "
           + "bytes): " + identityEd25519.length);
     } else if (identityEd25519[0] != 0x01) {
-      this.logger.warning("Unknown version in identity-ed25519: "
+      this.logger.warn("Unknown version in identity-ed25519: "
           + identityEd25519[0]);
     } else if (identityEd25519[1] != 0x04) {
-      this.logger.warning("Unknown cert type in identity-ed25519: "
+      this.logger.warn("Unknown cert type in identity-ed25519: "
           + identityEd25519[1]);
     } else if (identityEd25519[6] != 0x01) {
-      this.logger.warning("Unknown certified key type in "
+      this.logger.warn("Unknown certified key type in "
           + "identity-ed25519: " + identityEd25519[1]);
     } else if (identityEd25519[39] == 0x00) {
-      this.logger.warning("No extensions in identity-ed25519 (which "
+      this.logger.warn("No extensions in identity-ed25519 (which "
           + "would contain the encoded master-key-ed25519): "
           + identityEd25519[39]);
     } else {
       int extensionStart = 40;
       for (int i = 0; i < (int) identityEd25519[39]; i++) {
         if (identityEd25519.length < extensionStart + 4) {
-          this.logger.warning("Invalid extension with id " + i
+          this.logger.warn("Invalid extension with id " + i
               + " in identity-ed25519.");
           break;
         }
@@ -986,7 +990,7 @@ public class SanitizedBridgesWriter extends Thread {
         int extensionType = identityEd25519[extensionStart + 2];
         if (extensionLength == 32 && extensionType == 4) {
           if (identityEd25519.length < extensionStart + 4 + 32) {
-            this.logger.warning("Invalid extension with id " + i
+            this.logger.warn("Invalid extension with id " + i
                 + " in identity-ed25519.");
             break;
           }
@@ -1002,7 +1006,7 @@ public class SanitizedBridgesWriter extends Thread {
         extensionStart += 4 + extensionLength;
       }
     }
-    this.logger.warning("Unable to locate master-key-ed25519 in "
+    this.logger.warn("Unable to locate master-key-ed25519 in "
         + "identity-ed25519.");
     return null;
   }
@@ -1050,7 +1054,7 @@ public class SanitizedBridgesWriter extends Thread {
          * name. */
         } else if (line.startsWith("transport ")) {
           if (parts.length < 3) {
-            this.logger.fine("Illegal line in extra-info descriptor: '"
+            this.logger.debug("Illegal line in extra-info descriptor: '"
                 + line + "'.  Skipping descriptor.");
             return;
           }
@@ -1080,7 +1084,7 @@ public class SanitizedBridgesWriter extends Thread {
               + "\n");
           if (masterKeyEd25519 != null && !masterKeyEd25519.equals(
               masterKeyEd25519FromIdentityEd25519)) {
-            this.logger.warning("Mismatch between identity-ed25519 and "
+            this.logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -1091,7 +1095,7 @@ public class SanitizedBridgesWriter extends Thread {
           if (masterKeyEd25519FromIdentityEd25519 != null
               && !masterKeyEd25519FromIdentityEd25519.equals(
               masterKeyEd25519)) {
-            this.logger.warning("Mismatch between identity-ed25519 and "
+            this.logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -1128,18 +1132,18 @@ public class SanitizedBridgesWriter extends Thread {
          * that we need to remove or replace for the sanitized descriptor
          * version. */
         } else {
-          this.logger.warning("Unrecognized line '" + line
+          this.logger.warn("Unrecognized line '" + line
               + "'. Skipping.");
           return;
         }
       }
       br.close();
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not parse extra-info "
+      this.logger.warn("Could not parse extra-info "
           + "descriptor.", e);
       return;
     } catch (DecoderException e) {
-      this.logger.log(Level.WARNING, "Could not parse extra-info "
+      this.logger.warn("Could not parse extra-info "
           + "descriptor.", e);
       return;
     }
@@ -1161,7 +1165,7 @@ public class SanitizedBridgesWriter extends Thread {
       /* Handle below. */
     }
     if (descriptorDigest == null) {
-      this.logger.log(Level.WARNING, "Could not calculate extra-info "
+      this.logger.warn("Could not calculate extra-info "
           + "descriptor digest.");
       return;
     }
@@ -1184,7 +1188,7 @@ public class SanitizedBridgesWriter extends Thread {
         /* Handle below. */
       }
       if (descriptorDigestSha256Base64 == null) {
-        this.logger.log(Level.WARNING, "Could not calculate extra-info "
+        this.logger.warn("Could not calculate extra-info "
             + "descriptor SHA256 digest.");
         return;
       }
@@ -1224,7 +1228,7 @@ public class SanitizedBridgesWriter extends Thread {
         bw.close();
       }
     } catch (Exception e) {
-      this.logger.log(Level.WARNING, "Could not write sanitized "
+      this.logger.warn("Could not write sanitized "
           + "extra-info descriptor to disk.", e);
     }
   }
@@ -1261,7 +1265,7 @@ public class SanitizedBridgesWriter extends Thread {
         this.logger.info("Deleted " + deleted + " secrets that we don't "
             + "need anymore and kept " + kept + ".");
       } catch (IOException e) {
-        this.logger.log(Level.WARNING, "Could not store reduced set of "
+        this.logger.warn("Could not store reduced set of "
             + "secrets to disk! This is a bad sign, better check what's "
             + "going on!", e);
       }
@@ -1278,7 +1282,7 @@ public class SanitizedBridgesWriter extends Thread {
           dateTimeFormat.parse(maxNetworkStatusPublishedTime).getTime();
       if (maxNetworkStatusPublishedMillis > 0L
           && maxNetworkStatusPublishedMillis < tooOldMillis) {
-        this.logger.warning("The last known bridge network status was "
+        this.logger.warn("The last known bridge network status was "
             + "published " + maxNetworkStatusPublishedTime + ", which is "
             + "more than 5:30 hours in the past.");
       }
@@ -1287,7 +1291,7 @@ public class SanitizedBridgesWriter extends Thread {
           .getTime();
       if (maxServerDescriptorPublishedMillis > 0L
           && maxServerDescriptorPublishedMillis < tooOldMillis) {
-        this.logger.warning("The last known bridge server descriptor was "
+        this.logger.warn("The last known bridge server descriptor was "
             + "published " + maxServerDescriptorPublishedTime + ", which "
             + "is more than 5:30 hours in the past.");
       }
@@ -1296,12 +1300,12 @@ public class SanitizedBridgesWriter extends Thread {
           .getTime();
       if (maxExtraInfoDescriptorPublishedMillis > 0L
           && maxExtraInfoDescriptorPublishedMillis < tooOldMillis) {
-        this.logger.warning("The last known bridge extra-info descriptor "
+        this.logger.warn("The last known bridge extra-info descriptor "
             + "was published " + maxExtraInfoDescriptorPublishedTime
             + ", which is more than 5:30 hours in the past.");
       }
     } catch (ParseException e) {
-      this.logger.log(Level.WARNING, "Unable to parse timestamp for "
+      this.logger.warn("Unable to parse timestamp for "
           + "stale check.", e);
     }
   }
diff --git a/src/main/java/org/torproject/collector/conf/Configuration.java b/src/main/java/org/torproject/collector/conf/Configuration.java
index 8b8cc12..2166402 100644
--- a/src/main/java/org/torproject/collector/conf/Configuration.java
+++ b/src/main/java/org/torproject/collector/conf/Configuration.java
@@ -11,8 +11,6 @@ import java.nio.file.Paths;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * Initialize configuration with defaults from collector.properties,
diff --git a/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java b/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java
index 53fc300..65d7b87 100644
--- a/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java
+++ b/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java
@@ -13,6 +13,9 @@ import org.torproject.descriptor.DescriptorParser;
 import org.torproject.descriptor.DescriptorSourceFactory;
 import org.torproject.descriptor.ExitList;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedWriter;
 import java.io.File;
@@ -28,13 +31,10 @@ import java.util.SortedSet;
 import java.util.Stack;
 import java.util.TimeZone;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 public class ExitListDownloader extends Thread {
 
-  private static Logger logger =
-      Logger.getLogger(ExitListDownloader.class.getName());
+  private static Logger logger = LoggerFactory.getLogger(ExitListDownloader.class);
 
   public static void main(Configuration config) throws ConfigurationException {
     logger.info("Starting exit-lists module of CollecTor.");
@@ -58,7 +58,7 @@ public class ExitListDownloader extends Thread {
     try {
       startProcessing();
     } catch (ConfigurationException ce) {
-      logger.severe("Configuration failed: " + ce);
+      logger.error("Configuration failed: " + ce, ce);
       throw new RuntimeException(ce);
     }
   }
@@ -72,7 +72,7 @@ public class ExitListDownloader extends Thread {
     Date downloadedDate = new Date();
     String downloadedExitList = null;
     try {
-      logger.fine("Downloading exit list...");
+      logger.debug("Downloading exit list...");
       StringBuilder sb = new StringBuilder();
       sb.append("@type tordnsel 1.0\n");
       sb.append("Downloaded " + dateTimeFormat.format(downloadedDate)
@@ -85,7 +85,7 @@ public class ExitListDownloader extends Thread {
       huc.connect();
       int response = huc.getResponseCode();
       if (response != 200) {
-        logger.warning("Could not download exit list. Response code "
+        logger.warn("Could not download exit list. Response code "
             + response);
         return;
       }
@@ -98,13 +98,13 @@ public class ExitListDownloader extends Thread {
       }   
       in.close();
       downloadedExitList = sb.toString();
-      logger.fine("Finished downloading exit list.");
+      logger.debug("Finished downloading exit list.");
     } catch (IOException e) {
-      logger.log(Level.WARNING, "Failed downloading exit list", e);
+      logger.warn("Failed downloading exit list", e);
       return;
     }
     if (downloadedExitList == null) {
-      logger.warning("Failed downloading exit list");
+      logger.warn("Failed downloading exit list");
       return;
     }
 
@@ -123,7 +123,7 @@ public class ExitListDownloader extends Thread {
           tarballFile.getName());
       if (parsedDescriptors.size() != 1
           || !(parsedDescriptors.get(0) instanceof ExitList)) {
-        logger.warning("Could not parse downloaded exit list");
+        logger.warn("Could not parse downloaded exit list");
         return;
       }
       ExitList parsedExitList = (ExitList) parsedDescriptors.get(0);
@@ -133,12 +133,12 @@ public class ExitListDownloader extends Thread {
         }
       }
     } catch (DescriptorParseException e) {
-      logger.log(Level.WARNING, "Could not parse downloaded exit list",
+      logger.warn("Could not parse downloaded exit list",
           e);
     }
     if (maxScanMillis > 0L
         && maxScanMillis + 330L * 60L * 1000L < System.currentTimeMillis()) {
-      logger.warning("The last reported scan in the downloaded exit list "
+      logger.warn("The last reported scan in the downloaded exit list "
           + "took place at " + dateTimeFormat.format(maxScanMillis)
           + ", which is more than 5:30 hours in the past.");
     }
@@ -155,7 +155,7 @@ public class ExitListDownloader extends Thread {
         bw.write(downloadedExitList);
         bw.close();
       } catch (IOException e) {
-        logger.log(Level.WARNING, "Could not write downloaded exit list "
+        logger.warn("Could not write downloaded exit list "
             + "to " + outputFile.getAbsolutePath(), e);
       }
     }
diff --git a/src/main/java/org/torproject/collector/index/CreateIndexJson.java b/src/main/java/org/torproject/collector/index/CreateIndexJson.java
index de69488..639a4be 100644
--- a/src/main/java/org/torproject/collector/index/CreateIndexJson.java
+++ b/src/main/java/org/torproject/collector/index/CreateIndexJson.java
@@ -13,6 +13,9 @@ import com.google.gson.GsonBuilder;
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
 import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileOutputStream;
diff --git a/src/main/java/org/torproject/collector/main/LockFile.java b/src/main/java/org/torproject/collector/main/LockFile.java
index f168bc3..0931d1f 100644
--- a/src/main/java/org/torproject/collector/main/LockFile.java
+++ b/src/main/java/org/torproject/collector/main/LockFile.java
@@ -3,19 +3,21 @@
 
 package org.torproject.collector.main;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileReader;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.util.logging.Logger;
 
 public class LockFile {
 
   private final File lockFile;
   private final String moduleName;
-  private final Logger logger = Logger.getLogger(LockFile.class.getName());
+  private final Logger logger = LoggerFactory.getLogger(LockFile.class);
 
   public LockFile(String moduleName) {
     this("lock", moduleName);
@@ -27,7 +29,7 @@ public class LockFile {
   }
 
   public boolean acquireLock() {
-    this.logger.fine("Trying to acquire lock...");
+    this.logger.debug("Trying to acquire lock...");
     try {
       if (this.lockFile.exists()) {
         BufferedReader br = new BufferedReader(new FileReader(
@@ -43,7 +45,7 @@ public class LockFile {
           this.lockFile));
       bw.append("" + System.currentTimeMillis() + "\n");
       bw.close();
-      this.logger.fine("Acquired lock.");
+      this.logger.debug("Acquired lock.");
       return true;
     } catch (IOException e) {
       throw new RuntimeException("Caught exception while trying to acquire "
@@ -52,9 +54,9 @@ public class LockFile {
   }
 
   public void releaseLock() {
-    this.logger.fine("Releasing lock...");
+    this.logger.debug("Releasing lock...");
     this.lockFile.delete();
-    this.logger.fine("Released lock.");
+    this.logger.debug("Released lock.");
   }
 }
 
diff --git a/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java b/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java
index 72f8231..c1981cc 100644
--- a/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java
+++ b/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java
@@ -7,6 +7,9 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -30,8 +33,6 @@ import java.util.SortedSet;
 import java.util.Stack;
 import java.util.TimeZone;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * Read in all files in a given directory and pass buffered readers of
@@ -53,7 +54,7 @@ public class ArchiveReader {
     rdp.setArchiveReader(this);
     int parsedFiles = 0;
     int ignoredFiles = 0;
-    Logger logger = Logger.getLogger(ArchiveReader.class.getName());
+    Logger logger = LoggerFactory.getLogger(ArchiveReader.class);
     SortedSet<String> archivesImportHistory = new TreeSet<String>();
     File archivesImportHistoryFile = new File(statsDirectory,
         "archives-import-history");
@@ -67,12 +68,12 @@ public class ArchiveReader {
         }
         br.close();
       } catch (IOException e) {
-        logger.log(Level.WARNING, "Could not read in archives import "
-            + "history file. Skipping.");
+        logger.warn("Could not read in archives import "
+            + "history file. Skipping.", e);
       }
     }
     if (archivesDirectory.exists()) {
-      logger.fine("Importing files in directory " + archivesDirectory
+      logger.debug("Importing files in directory " + archivesDirectory
           + "/...");
       Stack<File> filesInInputDir = new Stack<File>();
       filesInInputDir.add(archivesDirectory);
@@ -93,7 +94,7 @@ public class ArchiveReader {
                 ignoredFiles++;
                 continue;
               } else if (pop.getName().endsWith(".tar.bz2")) {
-                logger.warning("Cannot parse compressed tarball "
+                logger.warn("Cannot parse compressed tarball "
                     + pop.getAbsolutePath() + ". Skipping.");
                 continue;
               } else if (pop.getName().endsWith(".bz2")) {
@@ -165,12 +166,12 @@ public class ArchiveReader {
             } while (line != null && line.startsWith("@"));
             br.close();
             if (line == null) {
-              logger.fine("We were given an empty descriptor for "
+              logger.debug("We were given an empty descriptor for "
                   + "parsing. Ignoring.");
               continue;
             }
             if (!line.equals("onion-key")) {
-              logger.fine("Skipping non-recognized descriptor.");
+              logger.debug("Skipping non-recognized descriptor.");
               continue;
             }
             SimpleDateFormat parseFormat =
@@ -204,7 +205,7 @@ public class ArchiveReader {
               String digest256Hex = DigestUtils.sha256Hex(descBytes);
               if (!this.microdescriptorValidAfterTimes.containsKey(
                   digest256Hex)) {
-                logger.fine("Could not store microdescriptor '"
+                logger.debug("Could not store microdescriptor '"
                     + digest256Hex + "', which was not contained in a "
                     + "microdesc consensus.");
                 continue;
@@ -217,7 +218,7 @@ public class ArchiveReader {
                   rdp.storeMicrodescriptor(descBytes, digest256Hex,
                       digest256Base64, validAfter);
                 } catch (ParseException e) {
-                  logger.log(Level.WARNING, "Could not parse "
+                  logger.warn("Could not parse "
                       + "valid-after time '" + validAfterTime + "'. Not "
                       + "storing microdescriptor.", e);
                 }
@@ -236,7 +237,7 @@ public class ArchiveReader {
         }
       }
       if (problems.isEmpty()) {
-        logger.fine("Finished importing files in directory "
+        logger.debug("Finished importing files in directory "
             + archivesDirectory + "/.");
       } else {
         StringBuilder sb = new StringBuilder("Failed importing files in "
@@ -261,7 +262,7 @@ public class ArchiveReader {
         }
         bw.close();
       } catch (IOException e) {
-        logger.log(Level.WARNING, "Could not write archives import "
+        logger.warn("Could not write archives import "
             + "history file.");
       }
     }
diff --git a/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java b/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java
index 43c7975..6495df6 100644
--- a/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java
+++ b/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java
@@ -11,6 +11,9 @@ import org.torproject.descriptor.DescriptorParseException;
 import org.torproject.descriptor.DescriptorParser;
 import org.torproject.descriptor.DescriptorSourceFactory;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedOutputStream;
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -35,12 +38,10 @@ import java.util.SortedSet;
 import java.util.Stack;
 import java.util.TimeZone;
 import java.util.TreeMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 public class ArchiveWriter extends Thread {
 
-  private static Logger logger = Logger.getLogger(ArchiveWriter.class.getName());
+  private static Logger logger = LoggerFactory.getLogger(ArchiveWriter.class);
 
   private Configuration config;
 
@@ -145,7 +146,7 @@ public class ArchiveWriter extends Thread {
     try {
       startProcessing();
     } catch (ConfigurationException ce) {
-      logger.severe("Configuration failed: " + ce);
+      logger.error("Configuration failed: " + ce, ce);
       throw new RuntimeException(ce);
     }
   }
@@ -227,7 +228,7 @@ public class ArchiveWriter extends Thread {
         while ((line = br.readLine()) != null) {
           String[] parts = line.split(",");
           if (parts.length != 3) {
-            this.logger.warning("Could not load server descriptor "
+            this.logger.warn("Could not load server descriptor "
                 + "digests because of illegal line '" + line + "'.  We "
                 + "might not be able to correctly check descriptors for "
                 + "completeness.");
@@ -256,7 +257,7 @@ public class ArchiveWriter extends Thread {
         while ((line = br.readLine()) != null) {
           String[] parts = line.split(",");
           if (parts.length != 2) {
-            this.logger.warning("Could not load extra-info descriptor "
+            this.logger.warn("Could not load extra-info descriptor "
                 + "digests because of illegal line '" + line + "'.  We "
                 + "might not be able to correctly check descriptors for "
                 + "completeness.");
@@ -283,7 +284,7 @@ public class ArchiveWriter extends Thread {
         while ((line = br.readLine()) != null) {
           String[] parts = line.split(",");
           if (parts.length != 2) {
-            this.logger.warning("Could not load microdescriptor digests "
+            this.logger.warn("Could not load microdescriptor digests "
                 + "because of illegal line '" + line + "'.  We might not "
                 + "be able to correctly check descriptors for "
                 + "completeness.");
@@ -304,11 +305,11 @@ public class ArchiveWriter extends Thread {
         br.close();
       }
     } catch (ParseException e) {
-      this.logger.log(Level.WARNING, "Could not load descriptor "
+      this.logger.warn("Could not load descriptor "
           + "digests.  We might not be able to correctly check "
           + "descriptors for completeness.", e);
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not load descriptor "
+      this.logger.warn("Could not load descriptor "
           + "digests.  We might not be able to correctly check "
           + "descriptors for completeness.", e);
     }
@@ -494,7 +495,7 @@ public class ArchiveWriter extends Thread {
     }
     this.logger.info(sb.toString());
     if (missingDescriptors) {
-      this.logger.fine("We are missing at least 0.5% of server or "
+      this.logger.debug("We are missing at least 0.5% of server or "
           + "extra-info descriptors referenced from a consensus or "
           + "vote or at least 0.5% of microdescriptors referenced from a "
           + "microdesc consensus.");
@@ -502,13 +503,13 @@ public class ArchiveWriter extends Thread {
     if (missingVotes) {
       /* TODO Shouldn't warn if we're not trying to archive votes at
        * all. */
-      this.logger.fine("We are missing at least one vote that was "
+      this.logger.debug("We are missing at least one vote that was "
           + "referenced from a consensus.");
     }
     if (missingMicrodescConsensus) {
       /* TODO Shouldn't warn if we're not trying to archive microdesc
        * consensuses at all. */
-      this.logger.fine("We are missing at least one microdesc "
+      this.logger.debug("We are missing at least one microdesc "
           + "consensus that was published together with a known "
           + "consensus.");
     }
@@ -521,14 +522,14 @@ public class ArchiveWriter extends Thread {
     long tooOldMillis = this.now - 330L * 60L * 1000L;
     if (!this.storedConsensuses.isEmpty()
         && this.storedConsensuses.lastKey() < tooOldMillis) {
-      this.logger.warning("The last known relay network status "
+      this.logger.warn("The last known relay network status "
           + "consensus was valid after "
           + dateTimeFormat.format(this.storedConsensuses.lastKey())
           + ", which is more than 5:30 hours in the past.");
     }
     if (!this.storedMicrodescConsensuses.isEmpty()
         && this.storedMicrodescConsensuses.lastKey() < tooOldMillis) {
-      this.logger.warning("The last known relay network status "
+      this.logger.warn("The last known relay network status "
           + "microdesc consensus was valid after "
           + dateTimeFormat.format(
           this.storedMicrodescConsensuses.lastKey())
@@ -536,28 +537,28 @@ public class ArchiveWriter extends Thread {
     }
     if (!this.storedVotes.isEmpty()
         && this.storedVotes.lastKey() < tooOldMillis) {
-      this.logger.warning("The last known relay network status vote "
+      this.logger.warn("The last known relay network status vote "
           + "was valid after " + dateTimeFormat.format(
           this.storedVotes.lastKey()) + ", which is more than 5:30 hours "
           + "in the past.");
     }
     if (!this.storedServerDescriptors.isEmpty()
         && this.storedServerDescriptors.lastKey() < tooOldMillis) {
-      this.logger.warning("The last known relay server descriptor was "
+      this.logger.warn("The last known relay server descriptor was "
           + "published at "
           + dateTimeFormat.format(this.storedServerDescriptors.lastKey())
           + ", which is more than 5:30 hours in the past.");
     }
     if (!this.storedExtraInfoDescriptors.isEmpty()
         && this.storedExtraInfoDescriptors.lastKey() < tooOldMillis) {
-      this.logger.warning("The last known relay extra-info descriptor "
+      this.logger.warn("The last known relay extra-info descriptor "
           + "was published at " + dateTimeFormat.format(
           this.storedExtraInfoDescriptors.lastKey())
           + ", which is more than 5:30 hours in the past.");
     }
     if (!this.storedMicrodescriptors.isEmpty()
         && this.storedMicrodescriptors.lastKey() < tooOldMillis) {
-      this.logger.warning("The last known relay microdescriptor was "
+      this.logger.warn("The last known relay microdescriptor was "
           + "contained in a microdesc consensus that was valid after "
           + dateTimeFormat.format(this.storedMicrodescriptors.lastKey())
           + ", which is more than 5:30 hours in the past.");
@@ -637,7 +638,7 @@ public class ArchiveWriter extends Thread {
       }
       bw.close();
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not save descriptor "
+      this.logger.warn("Could not save descriptor "
           + "digests.  We might not be able to correctly check "
           + "descriptors for completeness in the next run.", e);
     }
@@ -825,7 +826,7 @@ public class ArchiveWriter extends Thread {
   private boolean store(byte[] typeAnnotation, byte[] data,
       File[] outputFiles, boolean[] append) {
     try {
-      this.logger.finer("Storing " + outputFiles[0]);
+      this.logger.trace("Storing " + outputFiles[0]);
       if (this.descriptorParser.parseDescriptors(data,
           outputFiles[0].getName()).size() != 1) {
         this.logger.info("Relay descriptor file " + outputFiles[0]
@@ -846,10 +847,10 @@ public class ArchiveWriter extends Thread {
       }
       return true;
     } catch (DescriptorParseException e) {
-      this.logger.log(Level.WARNING, "Could not parse relay descriptor "
+      this.logger.warn("Could not parse relay descriptor "
           + outputFiles[0] + " before storing it to disk.  Skipping.", e);
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not store relay descriptor "
+      this.logger.warn("Could not store relay descriptor "
           + outputFiles[0], e);
     }
     return false;
diff --git a/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java b/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java
index 00eeab1..6bee6d6 100644
--- a/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java
+++ b/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java
@@ -6,6 +6,9 @@ package org.torproject.collector.relaydescs;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.codec.digest.DigestUtils;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -26,8 +29,6 @@ import java.util.SortedSet;
 import java.util.Stack;
 import java.util.TimeZone;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * Parses all descriptors in local directory cacheddesc/ and sorts them
@@ -44,8 +45,7 @@ public class CachedRelayDescriptorReader {
 
     StringBuilder dumpStats = new StringBuilder("Finished importing "
         + "relay descriptors from local Tor data directories:");
-    Logger logger = Logger.getLogger(
-        CachedRelayDescriptorReader.class.getName());
+    Logger logger = LoggerFactory.getLogger(CachedRelayDescriptorReader.class);
 
     /* Read import history containing SHA-1 digests of previously parsed
      * statuses and descriptors, so that we can skip them in this run. */
@@ -63,7 +63,7 @@ public class CachedRelayDescriptorReader {
         }
         br.close();
       } catch (IOException e) {
-        logger.log(Level.WARNING, "Could not read import history from "
+        logger.warn("Could not read import history from "
             + importHistoryFile.getAbsolutePath() + ".", e);
       }
     }
@@ -72,11 +72,11 @@ public class CachedRelayDescriptorReader {
     for (String inputDirectory : inputDirectories) {
       File cachedDescDir = new File(inputDirectory);
       if (!cachedDescDir.exists()) {
-        logger.warning("Directory " + cachedDescDir.getAbsolutePath()
+        logger.warn("Directory " + cachedDescDir.getAbsolutePath()
             + " does not exist. Skipping.");
         continue;
       }
-      logger.fine("Reading " + cachedDescDir.getAbsolutePath()
+      logger.debug("Reading " + cachedDescDir.getAbsolutePath()
           + " directory.");
       SortedSet<File> cachedDescFiles = new TreeSet<File>();
       Stack<File> files = new Stack<File>();
@@ -118,7 +118,7 @@ public class CachedRelayDescriptorReader {
                 if (dateTimeFormat.parse(line.substring("valid-after "
                     .length())).getTime() < System.currentTimeMillis()
                     - 6L * 60L * 60L * 1000L) {
-                  logger.warning("Cached descriptor files in "
+                  logger.warn("Cached descriptor files in "
                       + cachedDescDir.getAbsolutePath() + " are stale. "
                       + "The valid-after line in cached-consensus is '"
                       + line + "'.");
@@ -224,14 +224,14 @@ public class CachedRelayDescriptorReader {
                 ? "server" : "extra-info") + " descriptors");
           }
         } catch (IOException e) {
-          logger.log(Level.WARNING, "Failed reading "
+          logger.warn("Failed reading "
               + cachedDescDir.getAbsolutePath() + " directory.", e);
         } catch (ParseException e) {
-          logger.log(Level.WARNING, "Failed reading "
+          logger.warn("Failed reading "
               + cachedDescDir.getAbsolutePath() + " directory.", e);
         }
       }
-      logger.fine("Finished reading "
+      logger.debug("Finished reading "
           + cachedDescDir.getAbsolutePath() + " directory.");
     }
 
@@ -245,7 +245,7 @@ public class CachedRelayDescriptorReader {
       }
       bw.close();
     } catch (IOException e) {
-      logger.log(Level.WARNING, "Could not write import history to "
+      logger.warn("Could not write import history to "
            + importHistoryFile.getAbsolutePath() + ".", e);
     }
 
diff --git a/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java b/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java
index 9f0f183..0255163 100644
--- a/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java
+++ b/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java
@@ -17,6 +17,9 @@ import org.torproject.descriptor.ServerDescriptor;
 
 import com.google.gson.Gson;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.File;
 import java.io.FileReader;
 import java.io.FileWriter;
@@ -31,12 +34,10 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TimeZone;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 public class ReferenceChecker {
 
-  private Logger log = Logger.getLogger(ReferenceChecker.class.getName());
+  private Logger log = LoggerFactory.getLogger(ReferenceChecker.class);
 
   private File descriptorsDir;
 
@@ -141,7 +142,7 @@ public class ReferenceChecker {
           Reference[].class)));
       fr.close();
     } catch (IOException e) {
-      this.log.log(Level.WARNING, "Cannot read existing references file "
+      this.log.warn("Cannot read existing references file "
           + "from previous run.", e);
     }
   }
@@ -297,9 +298,9 @@ public class ReferenceChecker {
             totalMissingDescriptorsWeight));
       }
     }
-    this.log.log(Level.INFO, sb.toString());
+    this.log.info(sb.toString());
     if (totalMissingDescriptorsWeight > 0.999) {
-      this.log.log(Level.WARNING, "Missing too many referenced "
+      this.log.warn("Missing too many referenced "
           + "descriptors (" + totalMissingDescriptorsWeight + ").");
     }
   }
@@ -311,7 +312,7 @@ public class ReferenceChecker {
       gson.toJson(this.references, fw);
       fw.close();
     } catch (IOException e) {
-      this.log.log(Level.WARNING, "Cannot write references file for next "
+      this.log.warn("Cannot write references file for next "
           + "run.", e);
     }
   }
diff --git a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java
index bd0a482..fe3d504 100644
--- a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java
+++ b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java
@@ -6,6 +6,9 @@ package org.torproject.collector.relaydescs;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.codec.digest.DigestUtils;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -31,8 +34,6 @@ import java.util.SortedSet;
 import java.util.TimeZone;
 import java.util.TreeMap;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 import java.util.zip.InflaterInputStream;
 
 /**
@@ -319,8 +320,7 @@ public class RelayDescriptorDownloader {
     Collections.shuffle(this.authorities);
 
     /* Initialize logger. */
-    this.logger = Logger.getLogger(
-        RelayDescriptorDownloader.class.getName());
+    this.logger = LoggerFactory.getLogger(RelayDescriptorDownloader.class);
 
     /* Prepare cut-off times and timestamp for the missing descriptors
      * list and the list of authorities to download all server and
@@ -345,7 +345,7 @@ public class RelayDescriptorDownloader {
         "stats/missing-relay-descriptors");
     if (this.missingDescriptorsFile.exists()) {
       try {
-        this.logger.fine("Reading file "
+        this.logger.debug("Reading file "
             + this.missingDescriptorsFile.getAbsolutePath() + "...");
         BufferedReader br = new BufferedReader(new FileReader(
             this.missingDescriptorsFile));
@@ -396,16 +396,16 @@ public class RelayDescriptorDownloader {
               }
             }
           } else {
-            this.logger.fine("Invalid line '" + line + "' in "
+            this.logger.debug("Invalid line '" + line + "' in "
                 + this.missingDescriptorsFile.getAbsolutePath()
                 + ". Ignoring.");
           }
         }
         br.close();
-        this.logger.fine("Finished reading file "
+        this.logger.debug("Finished reading file "
             + this.missingDescriptorsFile.getAbsolutePath() + ".");
       } catch (IOException e) {
-        this.logger.log(Level.WARNING, "Failed to read file "
+        this.logger.warn("Failed to read file "
             + this.missingDescriptorsFile.getAbsolutePath()
             + "! This means that we might forget to dowload relay "
             + "descriptors we are missing.", e);
@@ -419,7 +419,7 @@ public class RelayDescriptorDownloader {
         "stats/last-downloaded-all-descriptors");
     if (this.lastDownloadedAllDescriptorsFile.exists()) {
       try {
-        this.logger.fine("Reading file "
+        this.logger.debug("Reading file "
             + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
             + "...");
         BufferedReader br = new BufferedReader(new FileReader(
@@ -427,7 +427,7 @@ public class RelayDescriptorDownloader {
         String line;
         while ((line = br.readLine()) != null) {
           if (line.split(",").length != 2) {
-            this.logger.fine("Invalid line '" + line + "' in "
+            this.logger.debug("Invalid line '" + line + "' in "
                 + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
                 + ". Ignoring.");
           } else {
@@ -439,11 +439,11 @@ public class RelayDescriptorDownloader {
           }
         }
         br.close();
-        this.logger.fine("Finished reading file "
+        this.logger.debug("Finished reading file "
             + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
             + ".");
       } catch (IOException e) {
-        this.logger.log(Level.WARNING, "Failed to read file "
+        this.logger.warn("Failed to read file "
             + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
             + "! This means that we might download all server and "
             + "extra-info descriptors more often than we should.", e);
@@ -842,8 +842,7 @@ public class RelayDescriptorDownloader {
       /* If a download failed, stop requesting descriptors from this
        * authority and move on to the next. */
       } catch (IOException e) {
-        logger.log(Level.FINE, "Failed downloading from " + authority
-            + "!", e);
+        logger.debug("Failed downloading from " + authority + "!", e);
       }
     }
   }
@@ -886,7 +885,7 @@ public class RelayDescriptorDownloader {
       in.close();
       allData = baos.toByteArray();
     }
-    logger.fine("Downloaded " + fullUrl + " -> " + response + " ("
+    logger.debug("Downloaded " + fullUrl + " -> " + response + " ("
         + (allData == null ? 0 : allData.length) + " bytes)");
     int receivedDescriptors = 0;
     if (allData != null) {
@@ -980,7 +979,7 @@ public class RelayDescriptorDownloader {
               this.rdp.storeMicrodescriptor(descBytes, digest256Hex,
                   digest256Base64, validAfter);
             } catch (ParseException e) {
-              this.logger.log(Level.WARNING, "Could not parse "
+              this.logger.warn("Could not parse "
                   + "valid-after time '" + validAfterTime + "' in "
                   + "microdescriptor key. Not storing microdescriptor.",
                   e);
@@ -1006,7 +1005,7 @@ public class RelayDescriptorDownloader {
     int missingServerDescriptors = 0;
     int missingExtraInfoDescriptors = 0;
     try {
-      this.logger.fine("Writing file "
+      this.logger.debug("Writing file "
           + this.missingDescriptorsFile.getAbsolutePath() + "...");
       this.missingDescriptorsFile.getParentFile().mkdirs();
       BufferedWriter bw = new BufferedWriter(new FileWriter(
@@ -1033,10 +1032,10 @@ public class RelayDescriptorDownloader {
         bw.write(key + "," + value + "\n");
       }
       bw.close();
-      this.logger.fine("Finished writing file "
+      this.logger.debug("Finished writing file "
           + this.missingDescriptorsFile.getAbsolutePath() + ".");
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Failed writing "
+      this.logger.warn("Failed writing "
           + this.missingDescriptorsFile.getAbsolutePath() + "!", e);
     }
     int missingMicrodescriptors = this.missingMicrodescriptors.size();
@@ -1045,7 +1044,7 @@ public class RelayDescriptorDownloader {
      * last downloaded all server and extra-info descriptors from them to
      * disk. */
     try {
-      this.logger.fine("Writing file "
+      this.logger.debug("Writing file "
           + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
           + "...");
       this.lastDownloadedAllDescriptorsFile.getParentFile().mkdirs();
@@ -1058,11 +1057,11 @@ public class RelayDescriptorDownloader {
         bw.write(authority + "," + lastDownloaded + "\n");
       }
       bw.close();
-      this.logger.fine("Finished writing file "
+      this.logger.debug("Finished writing file "
           + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
           + ".");
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Failed writing "
+      this.logger.warn("Failed writing "
           + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() + "!",
           e);
     }
diff --git a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java
index 3f9b912..125b32a 100644
--- a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java
+++ b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java
@@ -7,6 +7,9 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.codec.digest.DigestUtils;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.StringReader;
@@ -15,8 +18,6 @@ import java.text.SimpleDateFormat;
 import java.util.SortedSet;
 import java.util.TimeZone;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * Parses relay descriptors including network status consensuses and
@@ -54,7 +55,7 @@ public class RelayDescriptorParser {
     this.aw = aw;
 
     /* Initialize logger. */
-    this.logger = Logger.getLogger(RelayDescriptorParser.class.getName());
+    this.logger = LoggerFactory.getLogger(RelayDescriptorParser.class);
 
     this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
     this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
@@ -82,7 +83,7 @@ public class RelayDescriptorParser {
         line = br.readLine();
       } while (line != null && line.startsWith("@"));
       if (line == null) {
-        this.logger.fine("We were given an empty descriptor for "
+        this.logger.debug("We were given an empty descriptor for "
             + "parsing. Ignoring.");
         return false;
       }
@@ -150,7 +151,7 @@ public class RelayDescriptorParser {
                   + lastRelayIdentity + "," + serverDesc);
               serverDescriptorDigests.add(serverDesc);
             } else {
-              this.logger.log(Level.WARNING, "Could not parse r line '"
+              this.logger.warn("Could not parse r line '"
                   + line + "' in descriptor. Skipping.");
               break;
             }
@@ -167,7 +168,7 @@ public class RelayDescriptorParser {
             } else if (parts.length != 3
                 || !parts[2].startsWith("sha256=")
                 || parts[2].length() != 50) {
-              this.logger.log(Level.WARNING, "Could not parse m line '"
+              this.logger.warn("Could not parse m line '"
                   + line + "' in descriptor. Skipping.");
               break;
             }
@@ -315,10 +316,10 @@ public class RelayDescriptorParser {
       }
       br.close();
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Could not parse descriptor. "
+      this.logger.warn("Could not parse descriptor. "
           + "Skipping.", e);
     } catch (ParseException e) {
-      this.logger.log(Level.WARNING, "Could not parse descriptor. "
+      this.logger.warn("Could not parse descriptor. "
           + "Skipping.", e);
     }
     return stored;
diff --git a/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java b/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java
index c80f99e..53b1523 100644
--- a/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java
+++ b/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java
@@ -8,6 +8,9 @@ import org.torproject.collector.conf.ConfigurationException;
 import org.torproject.collector.conf.Key;
 import org.torproject.collector.main.LockFile;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
 import java.io.File;
@@ -26,14 +29,12 @@ import java.util.SortedMap;
 import java.util.Stack;
 import java.util.TimeZone;
 import java.util.TreeMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /* Download possibly truncated Torperf .data and .extradata files from
  * configured sources, append them to the files we already have, and merge
  * the two files into the .tpf format. */
 public class TorperfDownloader extends Thread {
-  private static Logger logger = Logger.getLogger(TorperfDownloader.class.getName());
+  private static Logger logger = LoggerFactory.getLogger(TorperfDownloader.class);
 
   public static void main(Configuration config) throws ConfigurationException {
     logger.info("Starting torperf module of CollecTor.");
@@ -66,7 +67,7 @@ public class TorperfDownloader extends Thread {
     try {
       startProcessing();
     } catch (ConfigurationException ce) {
-      logger.severe("Configuration failed: " + ce);
+      logger.error("Configuration failed: " + ce, ce);
       throw new RuntimeException(ce);
     }
   }
@@ -120,7 +121,7 @@ public class TorperfDownloader extends Thread {
           }
         }
         if (fileName == null || timestamp == null) {
-          this.logger.log(Level.WARNING, "Invalid line '" + line + "' in "
+          this.logger.warn("Invalid line '" + line + "' in "
               + this.torperfLastMergedFile.getAbsolutePath() + ".  "
               + "Ignoring past history of merging .data and .extradata "
               + "files.");
@@ -131,7 +132,7 @@ public class TorperfDownloader extends Thread {
       }
       br.close();
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Error while reading '"
+      this.logger.warn("Error while reading '"
           + this.torperfLastMergedFile.getAbsolutePath() + ".  Ignoring "
           + "past history of merging .data and .extradata files.");
       this.lastMergedTimestamps.clear();
@@ -151,7 +152,7 @@ public class TorperfDownloader extends Thread {
       }
       bw.close();
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Error while writing '"
+      this.logger.warn("Error while writing '"
           + this.torperfLastMergedFile.getAbsolutePath() + ".  This may "
           + "result in ignoring history of merging .data and .extradata "
           + "files in the next execution.", e);
@@ -165,9 +166,9 @@ public class TorperfDownloader extends Thread {
     try {
       fileSize = Integer.parseInt(parts[1]);
     } catch (NumberFormatException e) {
-      this.logger.log(Level.WARNING, "Could not parse file size in "
+      this.logger.warn("Could not parse file size in "
           + "TorperfFiles configuration line '" + torperfFilesLine
-          + "'.");
+          + "'.", e);
       return;
     }
 
@@ -202,7 +203,7 @@ public class TorperfDownloader extends Thread {
       skipUntil = this.mergeFiles(dataOutputFile, extradataOutputFile,
           sourceName, fileSize, skipUntil);
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Failed merging " + dataOutputFile
+      this.logger.warn("Failed merging " + dataOutputFile
           + " and " + extradataOutputFile + ".", e);
     }
     if (skipUntil != null) {
@@ -232,14 +233,14 @@ public class TorperfDownloader extends Thread {
         }
         br.close();
       } catch (IOException e) {
-        this.logger.log(Level.WARNING, "Failed reading '"
+        this.logger.warn("Failed reading '"
             + outputFile.getAbsolutePath() + "' to determine the first "
             + "line to append to it.", e);
         return false;
       }
     }
     try {
-      this.logger.fine("Downloading " + (isDataFile ? ".data" :
+      this.logger.debug("Downloading " + (isDataFile ? ".data" :
           ".extradata") + " file from '" + url + "' and merging it into "
           + "'" + outputFile.getAbsolutePath() + "'.");
       URL u = new URL(url);
@@ -267,19 +268,19 @@ public class TorperfDownloader extends Thread {
       bw.close();
       br.close();
       if (!copyLines) {
-        this.logger.warning("The last timestamp line in '"
+        this.logger.warn("The last timestamp line in '"
             + outputFile.getAbsolutePath() + "' is not contained in the "
             + "new file downloaded from '" + url + "'.  Cannot append "
             + "new lines without possibly leaving a gap.  Skipping.");
         return false;
       }
     } catch (IOException e) {
-      this.logger.log(Level.WARNING, "Failed downloading and/or merging '"
+      this.logger.warn("Failed downloading and/or merging '"
           + url + "'.", e);
       return false;
     }
     if (lastTimestampLine == null) {
-      this.logger.warning("'" + outputFile.getAbsolutePath()
+      this.logger.warn("'" + outputFile.getAbsolutePath()
           + "' doesn't contain any timestamp lines.  Unable to check "
           + "whether that file is stale or not.");
     } else {
@@ -295,7 +296,7 @@ public class TorperfDownloader extends Thread {
       }
       if (lastTimestampMillis < System.currentTimeMillis()
           - 330L * 60L * 1000L) {
-        this.logger.warning("The last timestamp in '"
+        this.logger.warn("The last timestamp in '"
             + outputFile.getAbsolutePath() + "' is more than 5:30 hours "
             + "old: " + lastTimestampMillis);
       }
@@ -309,11 +310,11 @@ public class TorperfDownloader extends Thread {
     config.put("SOURCE", source);
     config.put("FILESIZE", String.valueOf(fileSize));
     if (!dataFile.exists() || !extradataFile.exists()) {
-      this.logger.warning("File " + dataFile.getAbsolutePath() + " or "
+      this.logger.warn("File " + dataFile.getAbsolutePath() + " or "
           + extradataFile.getAbsolutePath() + " is missing.");
       return null;
     }
-    this.logger.fine("Merging " + dataFile.getAbsolutePath() + " and "
+    this.logger.debug("Merging " + dataFile.getAbsolutePath() + " and "
           + extradataFile.getAbsolutePath() + " into .tpf format.");
     BufferedReader brD = new BufferedReader(new FileReader(dataFile));
     BufferedReader brE = new BufferedReader(new FileReader(extradataFile));
@@ -329,14 +330,14 @@ public class TorperfDownloader extends Thread {
        * format, either with additional information from the .extradata
        * file or without it. */
       if (lineD.isEmpty()) {
-        this.logger.finer("Skipping empty line " + dataFile.getName()
+        this.logger.trace("Skipping empty line " + dataFile.getName()
             + ":" + d++ + ".");
         lineD = brD.readLine();
         continue;
       }
       SortedMap<String, String> data = this.parseDataLine(lineD);
       if (data == null) {
-        this.logger.finer("Skipping illegal line " + dataFile.getName()
+        this.logger.trace("Skipping illegal line " + dataFile.getName()
             + ":" + d++ + " '" + lineD + "'.");
         lineD = brD.readLine();
         continue;
@@ -344,7 +345,7 @@ public class TorperfDownloader extends Thread {
       String dataComplete = data.get("DATACOMPLETE");
       double dataCompleteSeconds = Double.parseDouble(dataComplete);
       if (skipUntil != null && dataComplete.compareTo(skipUntil) < 0) {
-        this.logger.finer("Skipping " + dataFile.getName() + ":"
+        this.logger.trace("Skipping " + dataFile.getName() + ":"
             + d++ + " which we already processed before.");
         lineD = brD.readLine();
         continue;
@@ -356,33 +357,33 @@ public class TorperfDownloader extends Thread {
       SortedMap<String, String> extradata = null;
       while (lineE != null) {
         if (lineE.isEmpty()) {
-          this.logger.finer("Skipping " + extradataFile.getName() + ":"
+          this.logger.trace("Skipping " + extradataFile.getName() + ":"
               + e++ + " which is empty.");
           lineE = brE.readLine();
           continue;
         }
         if (lineE.startsWith("BUILDTIMEOUT_SET ")) {
-          this.logger.finer("Skipping " + extradataFile.getName() + ":"
+          this.logger.trace("Skipping " + extradataFile.getName() + ":"
               + e++ + " which is a BUILDTIMEOUT_SET line.");
           lineE = brE.readLine();
           continue;
         } else if (lineE.startsWith("ok ")
             || lineE.startsWith("error ")) {
-          this.logger.finer("Skipping " + extradataFile.getName() + ":"
+          this.logger.trace("Skipping " + extradataFile.getName() + ":"
               + e++ + " which is in the old format.");
           lineE = brE.readLine();
           continue;
         }
         extradata = this.parseExtradataLine(lineE);
         if (extradata == null) {
-          this.logger.finer("Skipping Illegal line "
+          this.logger.trace("Skipping Illegal line "
               + extradataFile.getName() + ":" + e++ + " '" + lineE
               + "'.");
           lineE = brE.readLine();
           continue;
         }
         if (!extradata.containsKey("USED_AT")) {
-          this.logger.finer("Skipping " + extradataFile.getName() + ":"
+          this.logger.trace("Skipping " + extradataFile.getName() + ":"
               + e++ + " which doesn't contain a USED_AT element.");
           lineE = brE.readLine();
           continue;
@@ -390,24 +391,24 @@ public class TorperfDownloader extends Thread {
         String usedAt = extradata.get("USED_AT");
         double usedAtSeconds = Double.parseDouble(usedAt);
         if (skipUntil != null && usedAt.compareTo(skipUntil) < 0) {
-          this.logger.finer("Skipping " + extradataFile.getName() + ":"
+          this.logger.trace("Skipping " + extradataFile.getName() + ":"
               + e++ + " which we already processed before.");
           lineE = brE.readLine();
           continue;
         }
         maxUsedAt = usedAt;
         if (Math.abs(usedAtSeconds - dataCompleteSeconds) <= 1.0) {
-          this.logger.fine("Merging " + extradataFile.getName() + ":"
+          this.logger.debug("Merging " + extradataFile.getName() + ":"
               + e++ + " into the current .data line.");
           lineE = brE.readLine();
           break;
         } else if (usedAtSeconds > dataCompleteSeconds) {
-          this.logger.finer("Comparing " + extradataFile.getName()
+          this.logger.trace("Comparing " + extradataFile.getName()
               + " to the next .data line.");
           extradata = null;
           break;
         } else {
-          this.logger.finer("Skipping " + extradataFile.getName() + ":"
+          this.logger.trace("Skipping " + extradataFile.getName() + ":"
               + e++ + " which is too old to be merged with "
               + dataFile.getName() + ":" + d + ".");
           lineE = brE.readLine();
@@ -423,12 +424,12 @@ public class TorperfDownloader extends Thread {
       }
       keysAndValues.putAll(data);
       keysAndValues.putAll(config);
-      this.logger.fine("Writing " + dataFile.getName() + ":" + d++ + ".");
+      this.logger.debug("Writing " + dataFile.getName() + ":" + d++ + ".");
       lineD = brD.readLine();
       try {
         this.writeTpfLine(source, fileSize, keysAndValues);
       } catch (IOException ex) {
-        this.logger.log(Level.WARNING, "Error writing output line.  "
+        this.logger.warn("Error writing output line.  "
             + "Aborting to merge " + dataFile.getName() + " and "
             + extradataFile.getName() + ".", e);
         break;
diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml
new file mode 100644
index 0000000..1b78d58
--- /dev/null
+++ b/src/main/resources/logback.xml
@@ -0,0 +1,126 @@
+<configuration debug="false">
+
+  <!-- a path and a prefix -->
+  <property name="logfile-base" value="${LOGBASE}/collector-" />
+
+  <!-- log file names -->
+  <property name="fileall-logname" value="${logfile-base}all" />
+  <property name="file-bridgedescs-logname" value="${logfile-base}bridgedescs" />
+  <property name="file-exitlists-logname" value="${logfile-base}exitlists" />
+  <property name="file-relaydescs-logname" value="${logfile-base}relaydescs" />
+  <property name="file-torperf-logname" value="${logfile-base}torperf" />
+  <property name="file-updateindex-logname" value="${logfile-base}updateindex" />
+
+  <!-- date pattern -->
+  <property name="utc-date-pattern" value="%date{ISO8601, UTC}" />
+
+  <!-- appender section -->
+  <appender name="FILEALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
+    <file>${fileall-logname}.log</file>
+    <encoder>
+      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
+    </encoder>
+    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+      <!-- rollover daily -->
+      <FileNamePattern>${fileall-logname}.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
+      <maxHistory>10</maxHistory>
+      <timeBasedFileNamingAndTriggeringPolicy
+            class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+        <!-- or whenever the file size reaches 1MB -->
+        <maxFileSize>1MB</maxFileSize>
+      </timeBasedFileNamingAndTriggeringPolicy>
+    </rollingPolicy>
+  </appender>
+
+  <appender name="FILEBRIDGEDESCS" class="ch.qos.logback.core.FileAppender">
+    <file>${file-bridgedescs-logname}.log</file>
+    <encoder>
+      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
+    </encoder>
+
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>TRACE</level>
+    </filter>
+  </appender>
+
+  <appender name="FILEEXITLISTS" class="ch.qos.logback.core.FileAppender">
+    <file>${file-exitlists-logname}.log</file>
+    <encoder>
+      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
+    </encoder>
+
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>TRACE</level>
+    </filter>
+  </appender>
+
+  <appender name="FILERELAYDESCS" class="ch.qos.logback.core.FileAppender">
+    <file>${file-relaydescs-logname}.log</file>
+    <encoder>
+      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
+    </encoder>
+
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>TRACE</level>
+    </filter>
+  </appender>
+
+  <appender name="FILETORPERF" class="ch.qos.logback.core.FileAppender">
+    <file>${file-torperf-logname}.log</file>
+    <encoder>
+      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
+    </encoder>
+
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>TRACE</level>
+    </filter>
+  </appender>
+
+  <appender name="FILEUPDATEINDEX" class="ch.qos.logback.core.FileAppender">
+    <file>${file-updateindex-logname}.log</file>
+    <encoder>
+      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
+    </encoder>
+
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>TRACE</level>
+    </filter>
+  </appender>
+
+  <!-- logger section -->
+  <logger name="org.torproject.collector.bridgedescs" >
+    <appender-ref ref="FILEBRIDGEDESCS" />
+  </logger>
+
+  <logger name="org.torproject.collector.exitlists" >
+    <appender-ref ref="FILEEXITLISTS" />
+  </logger>
+
+  <logger name="org.torproject.collector.relaydescs" >
+    <appender-ref ref="FILERELAYDESCS" />
+  </logger>
+
+  <logger name="org.torproject.collector.torperf" >
+    <appender-ref ref="FILETORPERF" />
+  </logger>
+
+  <logger name="org.torproject.collector.index" >
+    <appender-ref ref="FILEUPDATEINDEX" />
+  </logger>
+
+  <logger name="org.torproject.collector.Main" >
+    <appender-ref ref="FILEBRIDGEDESCS" />
+    <appender-ref ref="FILEEXITLISTS" />
+    <appender-ref ref="FILERELAYDESCS" />
+    <appender-ref ref="FILETORPERF" />
+    <appender-ref ref="FILEUPDATEINDEX" />
+  </logger>
+
+  <logger name="sun" level="ERROR" />
+
+  <root level="ALL">
+    <appender-ref ref="FILEALL" />
+  </root>
+
+</configuration>
+





More information about the tor-commits mailing list