[tor-commits] [collector/master] Always access static loggers in static way.

karsten at torproject.org karsten at torproject.org
Thu Aug 11 08:44:43 UTC 2016


commit 80da4550ab77bb30608ad84a7e3836317836b7fb
Author: Karsten Loesing <karsten.loesing at gmx.net>
Date:   Wed Aug 10 11:30:02 2016 +0200

    Always access static loggers in static way.
    
    Also unify loggers by making them all `static final` and by calling
    them `logger`.
---
 src/main/java/org/torproject/collector/Main.java   |  3 +-
 .../bridgedescs/BridgeDescriptorParser.java        |  7 +-
 .../bridgedescs/BridgeSnapshotReader.java          |  4 +-
 .../bridgedescs/SanitizedBridgesWriter.java        | 99 +++++++++++-----------
 .../torproject/collector/conf/Configuration.java   |  9 +-
 .../torproject/collector/cron/CollecTorMain.java   | 17 ++--
 .../org/torproject/collector/cron/Scheduler.java   | 18 ++--
 .../collector/exitlists/ExitListDownloader.java    |  3 +-
 .../collector/index/CreateIndexJson.java           | 11 +--
 .../collector/relaydescs/ArchiveReader.java        |  4 +-
 .../collector/relaydescs/ArchiveWriter.java        | 43 +++++-----
 .../relaydescs/CachedRelayDescriptorReader.java    |  4 +-
 .../collector/relaydescs/ReferenceChecker.java     | 11 +--
 .../relaydescs/RelayDescriptorDownloader.java      | 50 ++++++-----
 .../relaydescs/RelayDescriptorParser.java          | 16 ++--
 .../collector/torperf/TorperfDownloader.java       | 60 ++++++-------
 16 files changed, 185 insertions(+), 174 deletions(-)

diff --git a/src/main/java/org/torproject/collector/Main.java b/src/main/java/org/torproject/collector/Main.java
index cefbe2d..96097b3 100644
--- a/src/main/java/org/torproject/collector/Main.java
+++ b/src/main/java/org/torproject/collector/Main.java
@@ -33,7 +33,8 @@ import java.util.Map;
  */
 public class Main {
 
-  private static Logger log = LoggerFactory.getLogger(Main.class);
+  private static final Logger log = LoggerFactory.getLogger(Main.class);
+
   public static final String CONF_FILE = "collector.properties";
 
   /** All possible main classes.
diff --git a/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java b/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java
index 8850598..511b171 100644
--- a/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java
+++ b/src/main/java/org/torproject/collector/bridgedescs/BridgeDescriptorParser.java
@@ -16,14 +16,13 @@ public class BridgeDescriptorParser {
 
   private SanitizedBridgesWriter sbw;
 
-  private Logger logger;
+  private static final Logger logger = LoggerFactory.getLogger(
+      BridgeDescriptorParser.class);
 
   /** Initializes a new bridge descriptor parser and links it to a
    * sanitized bridges writer to sanitize and store bridge descriptors. */
   public BridgeDescriptorParser(SanitizedBridgesWriter sbw) {
     this.sbw = sbw;
-    this.logger =
-        LoggerFactory.getLogger(BridgeDescriptorParser.class);
   }
 
   /** Parses the first line of the given descriptor data to determine the
@@ -50,7 +49,7 @@ public class BridgeDescriptorParser {
         }
       }
     } catch (IOException e) {
-      this.logger.warn("Could not parse bridge descriptor.", e);
+      logger.warn("Could not parse bridge descriptor.", e);
       return;
     }
   }
diff --git a/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java b/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java
index 797d8cf..25bb2ea 100644
--- a/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java
+++ b/src/main/java/org/torproject/collector/bridgedescs/BridgeSnapshotReader.java
@@ -30,6 +30,9 @@ import java.util.TreeSet;
 
 public class BridgeSnapshotReader {
 
+  private static final Logger logger = LoggerFactory.getLogger(
+      BridgeSnapshotReader.class);
+
   /**
    * Reads the half-hourly snapshots of bridge descriptors from Tonga.
    */
@@ -41,7 +44,6 @@ public class BridgeSnapshotReader {
       throw new IllegalArgumentException();
     }
 
-    Logger logger = LoggerFactory.getLogger(BridgeSnapshotReader.class);
     SortedSet<String> parsed = new TreeSet<String>();
     File bdDir = bridgeDirectoriesDir;
     File pbdFile = new File(statsDirectory, "parsed-bridge-directories");
diff --git a/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java b/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java
index 8147487..dcac024 100644
--- a/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java
+++ b/src/main/java/org/torproject/collector/bridgedescs/SanitizedBridgesWriter.java
@@ -51,7 +51,8 @@ import java.util.TreeMap;
  */
 public class SanitizedBridgesWriter extends CollecTorMain {
 
-  private static Logger logger = LoggerFactory.getLogger(SanitizedBridgesWriter.class);
+  private static final Logger logger = LoggerFactory.getLogger(
+      SanitizedBridgesWriter.class);
 
   public SanitizedBridgesWriter(Configuration config) {
     super(config);
@@ -117,7 +118,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
       try {
         this.secureRandom = SecureRandom.getInstance("SHA1PRNG", "SUN");
       } catch (GeneralSecurityException e) {
-        this.logger.warn("Could not initialize secure "
+        logger.warn("Could not initialize secure "
             + "random number generator! Not calculating any IP address "
             + "hashes in this execution!", e);
         this.persistenceProblemWithSecrets = true;
@@ -139,7 +140,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           if ((line.length() != ("yyyy-MM,".length() + 31 * 2)
               && line.length() != ("yyyy-MM,".length() + 50 * 2))
               || parts.length != 2) {
-            this.logger.warn("Invalid line in bridge-ip-secrets file "
+            logger.warn("Invalid line in bridge-ip-secrets file "
                 + "starting with '" + line.substring(0, 7) + "'! "
                 + "Not calculating any IP address hashes in this "
                 + "execution!");
@@ -152,17 +153,17 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         }
         br.close();
         if (!this.persistenceProblemWithSecrets) {
-          this.logger.debug("Read "
+          logger.debug("Read "
               + this.secretsForHashingIpAddresses.size() + " secrets for "
               + "hashing bridge IP addresses.");
         }
       } catch (DecoderException e) {
-        this.logger.warn("Failed to decode hex string in "
+        logger.warn("Failed to decode hex string in "
             + this.bridgeIpSecretsFile + "! Not calculating any IP "
             + "address hashes in this execution!", e);
         this.persistenceProblemWithSecrets = true;
       } catch (IOException e) {
-        this.logger.warn("Failed to read "
+        logger.warn("Failed to read "
             + this.bridgeIpSecretsFile + "! Not calculating any IP "
             + "address hashes in this execution!", e);
         this.persistenceProblemWithSecrets = true;
@@ -344,7 +345,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
       }
       if (month.compareTo(
           this.bridgeSanitizingCutOffTimestamp) < 0) {
-        this.logger.warn("Generated a secret that we won't make "
+        logger.warn("Generated a secret that we won't make "
             + "persistent, because it's outside our bridge descriptor "
             + "sanitizing interval.");
       } else {
@@ -360,7 +361,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           bw.write(month + "," + Hex.encodeHexString(secret) + "\n");
           bw.close();
         } catch (IOException e) {
-          this.logger.warn("Could not store new secret "
+          logger.warn("Could not store new secret "
               + "to disk! Not calculating any IP address hashes in "
               + "this execution!", e);
           this.persistenceProblemWithSecrets = true;
@@ -396,9 +397,9 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           + "publication time outside our descriptor sanitizing "
           + "interval.";
       if (this.haveWarnedAboutInterval) {
-        this.logger.debug(text);
+        logger.debug(text);
       } else {
-        this.logger.warn(text);
+        logger.warn(text);
         this.haveWarnedAboutInterval = true;
       }
     }
@@ -484,7 +485,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           if (scrubbedOrAddress != null) {
             scrubbed.append("a " + scrubbedOrAddress + "\n");
           } else {
-            this.logger.warn("Invalid address in line '" + line
+            logger.warn("Invalid address in line '" + line
                 + "' in bridge network status.  Skipping line!");
           }
 
@@ -498,7 +499,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
          * network status.  If there is, we should probably learn before
          * writing anything to the sanitized descriptors. */
         } else {
-          this.logger.debug("Unknown line '" + line + "' in bridge "
+          logger.debug("Unknown line '" + line + "' in bridge "
               + "network status. Not writing to disk!");
           return;
         }
@@ -518,18 +519,18 @@ public class SanitizedBridgesWriter extends CollecTorMain {
       if (formatter.parse(publicationTime).getTime()
           - formatter.parse(mostRecentDescPublished).getTime()
           > 60L * 60L * 1000L) {
-        this.logger.warn("The most recent descriptor in the bridge "
+        logger.warn("The most recent descriptor in the bridge "
             + "network status published at " + publicationTime + " was "
             + "published at " + mostRecentDescPublished + " which is "
             + "more than 1 hour before the status. This is a sign for "
             + "the status being stale. Please check!");
       }
     } catch (ParseException e) {
-      this.logger.warn("Could not parse timestamp in "
+      logger.warn("Could not parse timestamp in "
           + "bridge network status.", e);
       return;
     } catch (IOException e) {
-      this.logger.warn("Could not parse bridge network "
+      logger.warn("Could not parse bridge network "
           + "status.", e);
       return;
     }
@@ -563,7 +564,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         bw.close();
       }
     } catch (IOException e) {
-      this.logger.warn("Could not write sanitized bridge "
+      logger.warn("Could not write sanitized bridge "
           + "network status to disk.", e);
       return;
     }
@@ -634,9 +635,9 @@ public class SanitizedBridgesWriter extends CollecTorMain {
                 + "server descriptor with publication time outside our "
                 + "descriptor sanitizing interval.";
             if (this.haveWarnedAboutInterval) {
-              this.logger.debug(text);
+              logger.debug(text);
             } else {
-              this.logger.warn(text);
+              logger.warn(text);
               this.haveWarnedAboutInterval = true;
             }
           }
@@ -664,7 +665,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
                 if (scrubbedOrAddress != null) {
                   scrubbedOrAddresses.add(scrubbedOrAddress);
                 } else {
-                  this.logger.warn("Invalid address in line "
+                  logger.warn("Invalid address in line "
                       + "'or-address " + orAddress + "' in bridge server "
                       + "descriptor.  Skipping line!");
                 }
@@ -754,7 +755,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
               + "\n");
           if (masterKeyEd25519 != null && !masterKeyEd25519.equals(
               masterKeyEd25519FromIdentityEd25519)) {
-            this.logger.warn("Mismatch between identity-ed25519 and "
+            logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -765,7 +766,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           if (masterKeyEd25519FromIdentityEd25519 != null
               && !masterKeyEd25519FromIdentityEd25519.equals(
               masterKeyEd25519)) {
-            this.logger.warn("Mismatch between identity-ed25519 and "
+            logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -832,14 +833,14 @@ public class SanitizedBridgesWriter extends CollecTorMain {
          * that we need to remove or replace for the sanitized descriptor
          * version. */
         } else {
-          this.logger.warn("Unrecognized line '" + line
+          logger.warn("Unrecognized line '" + line
               + "'. Skipping.");
           return;
         }
       }
       br.close();
     } catch (Exception e) {
-      this.logger.warn("Could not parse server "
+      logger.warn("Could not parse server "
           + "descriptor.", e);
       return;
     }
@@ -861,7 +862,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
       /* Handle below. */
     }
     if (descriptorDigest == null) {
-      this.logger.warn("Could not calculate server "
+      logger.warn("Could not calculate server "
           + "descriptor digest.");
       return;
     }
@@ -884,7 +885,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         /* Handle below. */
       }
       if (descriptorDigestSha256Base64 == null) {
-        this.logger.warn("Could not calculate server "
+        logger.warn("Could not calculate server "
             + "descriptor SHA256 digest.");
         return;
       }
@@ -925,7 +926,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         bw.close();
       }
     } catch (ConfigurationException | IOException e) {
-      this.logger.warn("Could not write sanitized server "
+      logger.warn("Could not write sanitized server "
           + "descriptor to disk.", e);
       return;
     }
@@ -935,26 +936,26 @@ public class SanitizedBridgesWriter extends CollecTorMain {
       String identityEd25519Base64) {
     byte[] identityEd25519 = Base64.decodeBase64(identityEd25519Base64);
     if (identityEd25519.length < 40) {
-      this.logger.warn("Invalid length of identity-ed25519 (in "
+      logger.warn("Invalid length of identity-ed25519 (in "
           + "bytes): " + identityEd25519.length);
     } else if (identityEd25519[0] != 0x01) {
-      this.logger.warn("Unknown version in identity-ed25519: "
+      logger.warn("Unknown version in identity-ed25519: "
           + identityEd25519[0]);
     } else if (identityEd25519[1] != 0x04) {
-      this.logger.warn("Unknown cert type in identity-ed25519: "
+      logger.warn("Unknown cert type in identity-ed25519: "
           + identityEd25519[1]);
     } else if (identityEd25519[6] != 0x01) {
-      this.logger.warn("Unknown certified key type in "
+      logger.warn("Unknown certified key type in "
           + "identity-ed25519: " + identityEd25519[1]);
     } else if (identityEd25519[39] == 0x00) {
-      this.logger.warn("No extensions in identity-ed25519 (which "
+      logger.warn("No extensions in identity-ed25519 (which "
           + "would contain the encoded master-key-ed25519): "
           + identityEd25519[39]);
     } else {
       int extensionStart = 40;
       for (int i = 0; i < (int) identityEd25519[39]; i++) {
         if (identityEd25519.length < extensionStart + 4) {
-          this.logger.warn("Invalid extension with id " + i
+          logger.warn("Invalid extension with id " + i
               + " in identity-ed25519.");
           break;
         }
@@ -964,7 +965,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         int extensionType = identityEd25519[extensionStart + 2];
         if (extensionLength == 32 && extensionType == 4) {
           if (identityEd25519.length < extensionStart + 4 + 32) {
-            this.logger.warn("Invalid extension with id " + i
+            logger.warn("Invalid extension with id " + i
                 + " in identity-ed25519.");
             break;
           }
@@ -980,7 +981,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         extensionStart += 4 + extensionLength;
       }
     }
-    this.logger.warn("Unable to locate master-key-ed25519 in "
+    logger.warn("Unable to locate master-key-ed25519 in "
         + "identity-ed25519.");
     return null;
   }
@@ -1028,7 +1029,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
          * name. */
         } else if (line.startsWith("transport ")) {
           if (parts.length < 3) {
-            this.logger.debug("Illegal line in extra-info descriptor: '"
+            logger.debug("Illegal line in extra-info descriptor: '"
                 + line + "'.  Skipping descriptor.");
             return;
           }
@@ -1058,7 +1059,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
               + "\n");
           if (masterKeyEd25519 != null && !masterKeyEd25519.equals(
               masterKeyEd25519FromIdentityEd25519)) {
-            this.logger.warn("Mismatch between identity-ed25519 and "
+            logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -1069,7 +1070,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           if (masterKeyEd25519FromIdentityEd25519 != null
               && !masterKeyEd25519FromIdentityEd25519.equals(
               masterKeyEd25519)) {
-            this.logger.warn("Mismatch between identity-ed25519 and "
+            logger.warn("Mismatch between identity-ed25519 and "
                 + "master-key-ed25519.  Skipping.");
             return;
           }
@@ -1106,18 +1107,18 @@ public class SanitizedBridgesWriter extends CollecTorMain {
          * that we need to remove or replace for the sanitized descriptor
          * version. */
         } else {
-          this.logger.warn("Unrecognized line '" + line
+          logger.warn("Unrecognized line '" + line
               + "'. Skipping.");
           return;
         }
       }
       br.close();
     } catch (IOException e) {
-      this.logger.warn("Could not parse extra-info "
+      logger.warn("Could not parse extra-info "
           + "descriptor.", e);
       return;
     } catch (DecoderException e) {
-      this.logger.warn("Could not parse extra-info "
+      logger.warn("Could not parse extra-info "
           + "descriptor.", e);
       return;
     }
@@ -1139,7 +1140,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
       /* Handle below. */
     }
     if (descriptorDigest == null) {
-      this.logger.warn("Could not calculate extra-info "
+      logger.warn("Could not calculate extra-info "
           + "descriptor digest.");
       return;
     }
@@ -1162,7 +1163,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         /* Handle below. */
       }
       if (descriptorDigestSha256Base64 == null) {
-        this.logger.warn("Could not calculate extra-info "
+        logger.warn("Could not calculate extra-info "
             + "descriptor SHA256 digest.");
         return;
       }
@@ -1203,7 +1204,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
         bw.close();
       }
     } catch (Exception e) {
-      this.logger.warn("Could not write sanitized "
+      logger.warn("Could not write sanitized "
           + "extra-info descriptor to disk.", e);
     }
   }
@@ -1237,10 +1238,10 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           }
         }
         bw.close();
-        this.logger.info("Deleted " + deleted + " secrets that we don't "
+        logger.info("Deleted " + deleted + " secrets that we don't "
             + "need anymore and kept " + kept + ".");
       } catch (IOException e) {
-        this.logger.warn("Could not store reduced set of "
+        logger.warn("Could not store reduced set of "
             + "secrets to disk! This is a bad sign, better check what's "
             + "going on!", e);
       }
@@ -1257,7 +1258,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           dateTimeFormat.parse(maxNetworkStatusPublishedTime).getTime();
       if (maxNetworkStatusPublishedMillis > 0L
           && maxNetworkStatusPublishedMillis < tooOldMillis) {
-        this.logger.warn("The last known bridge network status was "
+        logger.warn("The last known bridge network status was "
             + "published " + maxNetworkStatusPublishedTime + ", which is "
             + "more than 5:30 hours in the past.");
       }
@@ -1266,7 +1267,7 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           .getTime();
       if (maxServerDescriptorPublishedMillis > 0L
           && maxServerDescriptorPublishedMillis < tooOldMillis) {
-        this.logger.warn("The last known bridge server descriptor was "
+        logger.warn("The last known bridge server descriptor was "
             + "published " + maxServerDescriptorPublishedTime + ", which "
             + "is more than 5:30 hours in the past.");
       }
@@ -1275,12 +1276,12 @@ public class SanitizedBridgesWriter extends CollecTorMain {
           .getTime();
       if (maxExtraInfoDescriptorPublishedMillis > 0L
           && maxExtraInfoDescriptorPublishedMillis < tooOldMillis) {
-        this.logger.warn("The last known bridge extra-info descriptor "
+        logger.warn("The last known bridge extra-info descriptor "
             + "was published " + maxExtraInfoDescriptorPublishedTime
             + ", which is more than 5:30 hours in the past.");
       }
     } catch (ParseException e) {
-      this.logger.warn("Unable to parse timestamp for "
+      logger.warn("Unable to parse timestamp for "
           + "stale check.", e);
     }
   }
diff --git a/src/main/java/org/torproject/collector/conf/Configuration.java b/src/main/java/org/torproject/collector/conf/Configuration.java
index 86527af..ee6552c 100644
--- a/src/main/java/org/torproject/collector/conf/Configuration.java
+++ b/src/main/java/org/torproject/collector/conf/Configuration.java
@@ -27,7 +27,8 @@ import java.util.concurrent.TimeUnit;
  */
 public class Configuration extends Observable implements Cloneable {
 
-  private static final Logger log = LoggerFactory.getLogger(Configuration.class);
+  private static final Logger logger = LoggerFactory.getLogger(
+      Configuration.class);
 
   private final ScheduledExecutorService scheduler =
       Executors.newScheduledThreadPool(1);
@@ -55,18 +56,18 @@ public class Configuration extends Observable implements Cloneable {
     }
     this.scheduler.scheduleAtFixedRate(new Runnable() {
         public void run() {
-          log.trace("Check configuration file.");
+          logger.trace("Check configuration file.");
             try {
               FileTime ftNow = Files.getLastModifiedTime(confPath);
               if (ft.compareTo(ftNow) < 0) {
-                log.info("Configuration file was changed.");
+                logger.info("Configuration file was changed.");
                 reload();
                 setChanged();
                 notifyObservers(null);
               }
               ft = ftNow;
             } catch (Throwable th) { // Catch all and keep running.
-              log.error("Cannot reload configuration file.", th);
+              logger.error("Cannot reload configuration file.", th);
             }
         }
       }, 5, 5, TimeUnit.SECONDS);
diff --git a/src/main/java/org/torproject/collector/cron/CollecTorMain.java b/src/main/java/org/torproject/collector/cron/CollecTorMain.java
index 021fc34..26c9671 100644
--- a/src/main/java/org/torproject/collector/cron/CollecTorMain.java
+++ b/src/main/java/org/torproject/collector/cron/CollecTorMain.java
@@ -18,7 +18,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 public abstract class CollecTorMain implements Observer, Runnable {
 
-  private static Logger log = LoggerFactory.getLogger(CollecTorMain.class);
+  private static final Logger logger = LoggerFactory.getLogger(
+      CollecTorMain.class);
 
   private static final long LIMIT_MB = 200;
 
@@ -40,7 +41,7 @@ public abstract class CollecTorMain implements Observer, Runnable {
   public final void run() {
     synchronized (this) {
       if (newConfigAvailable.get()) {
-        log.info("Module {} is using the new configuration.", module());
+        logger.info("Module {} is using the new configuration.", module());
         synchronized (newConfig) {
           config.clear();
           config.putAll(newConfig.getPropertiesCopy());
@@ -48,13 +49,13 @@ public abstract class CollecTorMain implements Observer, Runnable {
         }
       }
     }
-    log.info("Starting {} module of CollecTor.", module());
+    logger.info("Starting {} module of CollecTor.", module());
     try {
       startProcessing();
     } catch (Throwable th) { // Catching all to prevent #19771
-      log.error("The {} module failed: {}", module(), th.getMessage(), th);
+      logger.error("The {} module failed: {}", module(), th.getMessage(), th);
     }
-    log.info("Terminating {} module of CollecTor.", module());
+    logger.info("Terminating {} module of CollecTor.", module());
   }
 
   @Override
@@ -62,7 +63,7 @@ public abstract class CollecTorMain implements Observer, Runnable {
     newConfigAvailable.set(true);
     if (obs instanceof Configuration) {
       newConfig = (Configuration) obs;
-      log.info("Module {} just received a new configuration.", module());
+      logger.info("Module {} just received a new configuration.", module());
     }
   }
 
@@ -87,10 +88,10 @@ public abstract class CollecTorMain implements Observer, Runnable {
           .getAbsoluteFile().toPath().getRoot()).getUsableSpace()
               / 1024 / 1024);
       if (megaBytes < LIMIT_MB) {
-        log.warn("Available storage critical for {}; only {} MiB left.",
+        logger.warn("Available storage critical for {}; only {} MiB left.",
             location, megaBytes);
       } else {
-        log.trace("Available storage for {}: {} MiB", location, megaBytes);
+        logger.trace("Available storage for {}: {} MiB", location, megaBytes);
       }
     } catch (IOException ioe) {
       throw new RuntimeException("Cannot access " + location + " reason: "
diff --git a/src/main/java/org/torproject/collector/cron/Scheduler.java b/src/main/java/org/torproject/collector/cron/Scheduler.java
index 3677664..6c7b6dd 100644
--- a/src/main/java/org/torproject/collector/cron/Scheduler.java
+++ b/src/main/java/org/torproject/collector/cron/Scheduler.java
@@ -28,7 +28,7 @@ public final class Scheduler implements ThreadFactory {
   public static final String PERIODMIN = "PeriodMinutes";
   public static final String OFFSETMIN = "OffsetMinutes";
 
-  private static final Logger log = LoggerFactory.getLogger(Scheduler.class);
+  private static final Logger logger = LoggerFactory.getLogger(Scheduler.class);
 
   private final ThreadFactory threads = Executors.defaultThreadFactory();
 
@@ -66,7 +66,7 @@ public final class Scheduler implements ThreadFactory {
           | InstantiationException | InvocationTargetException
           | NoSuchMethodException | RejectedExecutionException
           | NullPointerException ex) {
-        log.error("Cannot schedule " + ctmEntry.getValue().getName()
+        logger.error("Cannot schedule " + ctmEntry.getValue().getName()
             + ". Reason: " + ex.getMessage(), ex);
       }
     }
@@ -77,17 +77,17 @@ public final class Scheduler implements ThreadFactory {
   private void scheduleExecutions(boolean runOnce, CollecTorMain ctm,
       int offset, int period) {
     if (runOnce) {
-      this.log.info("Single run for " + ctm.getClass().getName() + ".");
+      logger.info("Single run for " + ctm.getClass().getName() + ".");
       this.scheduler.execute(ctm);
     } else {
-      this.log.info("Periodic updater started for " + ctm.getClass().getName()
+      logger.info("Periodic updater started for " + ctm.getClass().getName()
           + "; offset=" + offset + ", period=" + period + ".");
       long periodMillis = period * MILLIS_IN_A_MINUTE;
       long initialDelayMillis = computeInitialDelayMillis(
           System.currentTimeMillis(), offset * MILLIS_IN_A_MINUTE, periodMillis);
 
       /* Run after initialDelay delay and then every period min. */
-      log.info("Periodic updater will first run in {} and then every {} "
+      logger.info("Periodic updater will first run in {} and then every {} "
           + "minutes.", initialDelayMillis < MILLIS_IN_A_MINUTE
           ? "under 1 minute"
           : (initialDelayMillis / MILLIS_IN_A_MINUTE) + " minute(s)", period);
@@ -109,12 +109,12 @@ public final class Scheduler implements ThreadFactory {
     try {
       scheduler.shutdown();
       scheduler.awaitTermination(20L, java.util.concurrent.TimeUnit.MINUTES);
-      log.info("Shutdown of all scheduled tasks completed successfully.");
+      logger.info("Shutdown of all scheduled tasks completed successfully.");
     } catch (InterruptedException ie) {
       List<Runnable> notTerminated = scheduler.shutdownNow();
-      log.error("Regular shutdown failed for: " + notTerminated);
+      logger.error("Regular shutdown failed for: " + notTerminated);
       if (!notTerminated.isEmpty()) {
-        log.error("Forced shutdown failed for: " + notTerminated);
+        logger.error("Forced shutdown failed for: " + notTerminated);
       }
     }
   }
@@ -126,7 +126,7 @@ public final class Scheduler implements ThreadFactory {
   public Thread newThread(Runnable runner) {
     Thread newThread = threads.newThread(runner);
     newThread.setName("CollecTor-Scheduled-Thread-" + ++currentThreadNo);
-    log.info("New Thread created: " + newThread.getName());
+    logger.info("New Thread created: " + newThread.getName());
     return newThread;
   }
 }
diff --git a/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java b/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java
index 5ba8ea3..2f71d2e 100644
--- a/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java
+++ b/src/main/java/org/torproject/collector/exitlists/ExitListDownloader.java
@@ -34,7 +34,8 @@ import java.util.TreeSet;
 
 public class ExitListDownloader extends CollecTorMain {
 
-  private static Logger logger = LoggerFactory.getLogger(ExitListDownloader.class);
+  private static final Logger logger = LoggerFactory.getLogger(
+      ExitListDownloader.class);
 
   /** Instanciate the exit-lists module using the given configuration. */
   public ExitListDownloader(Configuration config) {
diff --git a/src/main/java/org/torproject/collector/index/CreateIndexJson.java b/src/main/java/org/torproject/collector/index/CreateIndexJson.java
index 39069f1..5ec2014 100644
--- a/src/main/java/org/torproject/collector/index/CreateIndexJson.java
+++ b/src/main/java/org/torproject/collector/index/CreateIndexJson.java
@@ -40,7 +40,8 @@ import java.util.zip.GZIPOutputStream;
  * we'll likely have to do that. */
 public class CreateIndexJson extends CollecTorMain {
 
-  private static Logger log = LoggerFactory.getLogger(CreateIndexJson.class);
+  private static final Logger logger =
+      LoggerFactory.getLogger(CreateIndexJson.class);
 
   private static File indexJsonFile;
 
@@ -76,7 +77,7 @@ public class CreateIndexJson extends CollecTorMain {
           config.getPath(Key.RecentPath).toFile() };
       writeIndex(indexDirectories());
     } catch (Exception e) {
-      log.error("Cannot run index creation: " + e.getMessage(), e);
+      logger.error("Cannot run index creation: " + e.getMessage(), e);
       throw new RuntimeException(e);
     }
   }
@@ -144,7 +145,7 @@ public class CreateIndexJson extends CollecTorMain {
   private IndexNode indexDirectories() {
     SortedSet<DirectoryNode> directoryNodes =
         new TreeSet<DirectoryNode>();
-    log.trace("indexing: " + indexedDirectories[0] + " "
+    logger.trace("indexing: " + indexedDirectories[0] + " "
         + indexedDirectories[1]);
     for (File directory : indexedDirectories) {
       if (directory.exists() && directory.isDirectory()) {
@@ -162,10 +163,10 @@ public class CreateIndexJson extends CollecTorMain {
     SortedSet<FileNode> fileNodes = new TreeSet<FileNode>();
     SortedSet<DirectoryNode> directoryNodes =
         new TreeSet<DirectoryNode>();
-    log.trace("indexing: " + directory);
+    logger.trace("indexing: " + directory);
     File[] fileList = directory.listFiles();
     if (null == fileList) {
-      log.warn("Indexing dubious directory: " + directory);
+      logger.warn("Indexing dubious directory: " + directory);
       return null;
     }
     for (File fileOrDirectory : fileList) {
diff --git a/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java b/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java
index 9ceb2a0..3f15eec 100644
--- a/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java
+++ b/src/main/java/org/torproject/collector/relaydescs/ArchiveReader.java
@@ -39,6 +39,9 @@ import java.util.TreeSet;
  */
 public class ArchiveReader {
 
+  private static final Logger logger = LoggerFactory.getLogger(
+      ArchiveReader.class);
+
   private Map<String, Set<String>> microdescriptorValidAfterTimes =
       new HashMap<String, Set<String>>();
 
@@ -56,7 +59,6 @@ public class ArchiveReader {
     rdp.setArchiveReader(this);
     int parsedFiles = 0;
     int ignoredFiles = 0;
-    Logger logger = LoggerFactory.getLogger(ArchiveReader.class);
     SortedSet<String> archivesImportHistory = new TreeSet<String>();
     File archivesImportHistoryFile = new File(statsDirectory,
         "archives-import-history");
diff --git a/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java b/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java
index 9053372..f160a17 100644
--- a/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java
+++ b/src/main/java/org/torproject/collector/relaydescs/ArchiveWriter.java
@@ -40,7 +40,8 @@ import java.util.TreeMap;
 
 public class ArchiveWriter extends CollecTorMain {
 
-  private static Logger logger = LoggerFactory.getLogger(ArchiveWriter.class);
+  private static final Logger logger = LoggerFactory.getLogger(
+      ArchiveWriter.class);
 
   private long now = System.currentTimeMillis();
   private String outputDirectory;
@@ -208,7 +209,7 @@ public class ArchiveWriter extends CollecTorMain {
         while ((line = br.readLine()) != null) {
           String[] parts = line.split(",");
           if (parts.length != 3) {
-            this.logger.warn("Could not load server descriptor "
+            logger.warn("Could not load server descriptor "
                 + "digests because of illegal line '" + line + "'.  We "
                 + "might not be able to correctly check descriptors for "
                 + "completeness.");
@@ -237,7 +238,7 @@ public class ArchiveWriter extends CollecTorMain {
         while ((line = br.readLine()) != null) {
           String[] parts = line.split(",");
           if (parts.length != 2) {
-            this.logger.warn("Could not load extra-info descriptor "
+            logger.warn("Could not load extra-info descriptor "
                 + "digests because of illegal line '" + line + "'.  We "
                 + "might not be able to correctly check descriptors for "
                 + "completeness.");
@@ -264,7 +265,7 @@ public class ArchiveWriter extends CollecTorMain {
         while ((line = br.readLine()) != null) {
           String[] parts = line.split(",");
           if (parts.length != 2) {
-            this.logger.warn("Could not load microdescriptor digests "
+            logger.warn("Could not load microdescriptor digests "
                 + "because of illegal line '" + line + "'.  We might not "
                 + "be able to correctly check descriptors for "
                 + "completeness.");
@@ -285,11 +286,11 @@ public class ArchiveWriter extends CollecTorMain {
         br.close();
       }
     } catch (ParseException e) {
-      this.logger.warn("Could not load descriptor "
+      logger.warn("Could not load descriptor "
           + "digests.  We might not be able to correctly check "
           + "descriptors for completeness.", e);
     } catch (IOException e) {
-      this.logger.warn("Could not load descriptor "
+      logger.warn("Could not load descriptor "
           + "digests.  We might not be able to correctly check "
           + "descriptors for completeness.", e);
     }
@@ -475,9 +476,9 @@ public class ArchiveWriter extends CollecTorMain {
         missingVotes = true;
       }
     }
-    this.logger.info(sb.toString());
+    logger.info(sb.toString());
     if (missingDescriptors) {
-      this.logger.debug("We are missing at least 0.5% of server or "
+      logger.debug("We are missing at least 0.5% of server or "
           + "extra-info descriptors referenced from a consensus or "
           + "vote or at least 0.5% of microdescriptors referenced from a "
           + "microdesc consensus.");
@@ -485,13 +486,13 @@ public class ArchiveWriter extends CollecTorMain {
     if (missingVotes) {
       /* TODO Shouldn't warn if we're not trying to archive votes at
        * all. */
-      this.logger.debug("We are missing at least one vote that was "
+      logger.debug("We are missing at least one vote that was "
           + "referenced from a consensus.");
     }
     if (missingMicrodescConsensus) {
       /* TODO Shouldn't warn if we're not trying to archive microdesc
        * consensuses at all. */
-      this.logger.debug("We are missing at least one microdesc "
+      logger.debug("We are missing at least one microdesc "
           + "consensus that was published together with a known "
           + "consensus.");
     }
@@ -504,14 +505,14 @@ public class ArchiveWriter extends CollecTorMain {
     long tooOldMillis = this.now - 330L * 60L * 1000L;
     if (!this.storedConsensuses.isEmpty()
         && this.storedConsensuses.lastKey() < tooOldMillis) {
-      this.logger.warn("The last known relay network status "
+      logger.warn("The last known relay network status "
           + "consensus was valid after "
           + dateTimeFormat.format(this.storedConsensuses.lastKey())
           + ", which is more than 5:30 hours in the past.");
     }
     if (!this.storedMicrodescConsensuses.isEmpty()
         && this.storedMicrodescConsensuses.lastKey() < tooOldMillis) {
-      this.logger.warn("The last known relay network status "
+      logger.warn("The last known relay network status "
           + "microdesc consensus was valid after "
           + dateTimeFormat.format(
           this.storedMicrodescConsensuses.lastKey())
@@ -519,28 +520,28 @@ public class ArchiveWriter extends CollecTorMain {
     }
     if (!this.storedVotes.isEmpty()
         && this.storedVotes.lastKey() < tooOldMillis) {
-      this.logger.warn("The last known relay network status vote "
+      logger.warn("The last known relay network status vote "
           + "was valid after " + dateTimeFormat.format(
           this.storedVotes.lastKey()) + ", which is more than 5:30 hours "
           + "in the past.");
     }
     if (!this.storedServerDescriptors.isEmpty()
         && this.storedServerDescriptors.lastKey() < tooOldMillis) {
-      this.logger.warn("The last known relay server descriptor was "
+      logger.warn("The last known relay server descriptor was "
           + "published at "
           + dateTimeFormat.format(this.storedServerDescriptors.lastKey())
           + ", which is more than 5:30 hours in the past.");
     }
     if (!this.storedExtraInfoDescriptors.isEmpty()
         && this.storedExtraInfoDescriptors.lastKey() < tooOldMillis) {
-      this.logger.warn("The last known relay extra-info descriptor "
+      logger.warn("The last known relay extra-info descriptor "
           + "was published at " + dateTimeFormat.format(
           this.storedExtraInfoDescriptors.lastKey())
           + ", which is more than 5:30 hours in the past.");
     }
     if (!this.storedMicrodescriptors.isEmpty()
         && this.storedMicrodescriptors.lastKey() < tooOldMillis) {
-      this.logger.warn("The last known relay microdescriptor was "
+      logger.warn("The last known relay microdescriptor was "
           + "contained in a microdesc consensus that was valid after "
           + dateTimeFormat.format(this.storedMicrodescriptors.lastKey())
           + ", which is more than 5:30 hours in the past.");
@@ -620,7 +621,7 @@ public class ArchiveWriter extends CollecTorMain {
       }
       bw.close();
     } catch (IOException e) {
-      this.logger.warn("Could not save descriptor "
+      logger.warn("Could not save descriptor "
           + "digests.  We might not be able to correctly check "
           + "descriptors for completeness in the next run.", e);
     }
@@ -819,10 +820,10 @@ public class ArchiveWriter extends CollecTorMain {
   private boolean store(byte[] typeAnnotation, byte[] data,
       File[] outputFiles, boolean[] append) {
     try {
-      this.logger.trace("Storing " + outputFiles[0]);
+      logger.trace("Storing " + outputFiles[0]);
       if (this.descriptorParser.parseDescriptors(data,
           outputFiles[0].getName()).size() != 1) {
-        this.logger.info("Relay descriptor file " + outputFiles[0]
+        logger.info("Relay descriptor file " + outputFiles[0]
             + " doesn't contain exactly one descriptor.  Not storing.");
         return false;
       }
@@ -840,10 +841,10 @@ public class ArchiveWriter extends CollecTorMain {
       }
       return true;
     } catch (DescriptorParseException e) {
-      this.logger.warn("Could not parse relay descriptor "
+      logger.warn("Could not parse relay descriptor "
           + outputFiles[0] + " before storing it to disk.  Skipping.", e);
     } catch (IOException e) {
-      this.logger.warn("Could not store relay descriptor "
+      logger.warn("Could not store relay descriptor "
           + outputFiles[0], e);
     }
     return false;
diff --git a/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java b/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java
index 05d4481..acfe0b1 100644
--- a/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java
+++ b/src/main/java/org/torproject/collector/relaydescs/CachedRelayDescriptorReader.java
@@ -34,6 +34,9 @@ import java.util.TreeSet;
  */
 public class CachedRelayDescriptorReader {
 
+  private static final Logger logger = LoggerFactory.getLogger(
+      CachedRelayDescriptorReader.class);
+
   /** Reads cached-descriptor files from one or more directories and
    * passes them to the given descriptor parser. */
   public CachedRelayDescriptorReader(RelayDescriptorParser rdp,
@@ -46,7 +49,6 @@ public class CachedRelayDescriptorReader {
 
     StringBuilder dumpStats = new StringBuilder("Finished importing "
         + "relay descriptors from local Tor data directories:");
-    Logger logger = LoggerFactory.getLogger(CachedRelayDescriptorReader.class);
 
     /* Read import history containing SHA-1 digests of previously parsed
      * statuses and descriptors, so that we can skip them in this run. */
diff --git a/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java b/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java
index a749ff5..9c76216 100644
--- a/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java
+++ b/src/main/java/org/torproject/collector/relaydescs/ReferenceChecker.java
@@ -37,7 +37,8 @@ import java.util.TreeSet;
 
 public class ReferenceChecker {
 
-  private Logger log = LoggerFactory.getLogger(ReferenceChecker.class);
+  private static final Logger logger = LoggerFactory.getLogger(
+      ReferenceChecker.class);
 
   private File descriptorsDir;
 
@@ -145,7 +146,7 @@ public class ReferenceChecker {
           Reference[].class)));
       fr.close();
     } catch (IOException e) {
-      this.log.warn("Cannot read existing references file "
+      logger.warn("Cannot read existing references file "
           + "from previous run.", e);
     }
   }
@@ -301,9 +302,9 @@ public class ReferenceChecker {
             totalMissingDescriptorsWeight));
       }
     }
-    this.log.info(sb.toString());
+    logger.info(sb.toString());
     if (totalMissingDescriptorsWeight > 0.999) {
-      this.log.warn("Missing too many referenced "
+      logger.warn("Missing too many referenced "
           + "descriptors (" + totalMissingDescriptorsWeight + ").");
     }
   }
@@ -315,7 +316,7 @@ public class ReferenceChecker {
       gson.toJson(this.references, fw);
       fw.close();
     } catch (IOException e) {
-      this.log.warn("Cannot write references file for next "
+      logger.warn("Cannot write references file for next "
           + "run.", e);
     }
   }
diff --git a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java
index f4e38f4..841a63b 100644
--- a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java
+++ b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorDownloader.java
@@ -212,7 +212,8 @@ public class RelayDescriptorDownloader {
   /**
    * Logger for this class.
    */
-  private Logger logger;
+  private static final Logger logger = LoggerFactory.getLogger(
+      RelayDescriptorDownloader.class);
 
   /**
    * Number of descriptors requested by directory authority to be included
@@ -319,9 +320,6 @@ public class RelayDescriptorDownloader {
     /* Shuffle list of authorities for better load balancing over time. */
     Collections.shuffle(this.authorities);
 
-    /* Initialize logger. */
-    this.logger = LoggerFactory.getLogger(RelayDescriptorDownloader.class);
-
     /* Prepare cut-off times and timestamp for the missing descriptors
      * list and the list of authorities to download all server and
      * extra-info descriptors from. */
@@ -345,7 +343,7 @@ public class RelayDescriptorDownloader {
         "stats/missing-relay-descriptors");
     if (this.missingDescriptorsFile.exists()) {
       try {
-        this.logger.debug("Reading file "
+        logger.debug("Reading file "
             + this.missingDescriptorsFile.getAbsolutePath() + "...");
         BufferedReader br = new BufferedReader(new FileReader(
             this.missingDescriptorsFile));
@@ -396,16 +394,16 @@ public class RelayDescriptorDownloader {
               }
             }
           } else {
-            this.logger.debug("Invalid line '" + line + "' in "
+            logger.debug("Invalid line '" + line + "' in "
                 + this.missingDescriptorsFile.getAbsolutePath()
                 + ". Ignoring.");
           }
         }
         br.close();
-        this.logger.debug("Finished reading file "
+        logger.debug("Finished reading file "
             + this.missingDescriptorsFile.getAbsolutePath() + ".");
       } catch (IOException e) {
-        this.logger.warn("Failed to read file "
+        logger.warn("Failed to read file "
             + this.missingDescriptorsFile.getAbsolutePath()
             + "! This means that we might forget to dowload relay "
             + "descriptors we are missing.", e);
@@ -419,7 +417,7 @@ public class RelayDescriptorDownloader {
         "stats/last-downloaded-all-descriptors");
     if (this.lastDownloadedAllDescriptorsFile.exists()) {
       try {
-        this.logger.debug("Reading file "
+        logger.debug("Reading file "
             + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
             + "...");
         BufferedReader br = new BufferedReader(new FileReader(
@@ -427,7 +425,7 @@ public class RelayDescriptorDownloader {
         String line;
         while ((line = br.readLine()) != null) {
           if (line.split(",").length != 2) {
-            this.logger.debug("Invalid line '" + line + "' in "
+            logger.debug("Invalid line '" + line + "' in "
                 + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
                 + ". Ignoring.");
           } else {
@@ -439,11 +437,11 @@ public class RelayDescriptorDownloader {
           }
         }
         br.close();
-        this.logger.debug("Finished reading file "
+        logger.debug("Finished reading file "
             + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
             + ".");
       } catch (IOException e) {
-        this.logger.warn("Failed to read file "
+        logger.warn("Failed to read file "
             + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
             + "! This means that we might download all server and "
             + "extra-info descriptors more often than we should.", e);
@@ -979,7 +977,7 @@ public class RelayDescriptorDownloader {
               this.rdp.storeMicrodescriptor(descBytes, digest256Hex,
                   digest256Base64, validAfter);
             } catch (ParseException e) {
-              this.logger.warn("Could not parse "
+              logger.warn("Could not parse "
                   + "valid-after time '" + validAfterTime + "' in "
                   + "microdescriptor key. Not storing microdescriptor.",
                   e);
@@ -1005,7 +1003,7 @@ public class RelayDescriptorDownloader {
     int missingServerDescriptors = 0;
     int missingExtraInfoDescriptors = 0;
     try {
-      this.logger.debug("Writing file "
+      logger.debug("Writing file "
           + this.missingDescriptorsFile.getAbsolutePath() + "...");
       this.missingDescriptorsFile.getParentFile().mkdirs();
       BufferedWriter bw = new BufferedWriter(new FileWriter(
@@ -1032,10 +1030,10 @@ public class RelayDescriptorDownloader {
         bw.write(key + "," + value + "\n");
       }
       bw.close();
-      this.logger.debug("Finished writing file "
+      logger.debug("Finished writing file "
           + this.missingDescriptorsFile.getAbsolutePath() + ".");
     } catch (IOException e) {
-      this.logger.warn("Failed writing "
+      logger.warn("Failed writing "
           + this.missingDescriptorsFile.getAbsolutePath() + "!", e);
     }
 
@@ -1043,7 +1041,7 @@ public class RelayDescriptorDownloader {
      * last downloaded all server and extra-info descriptors from them to
      * disk. */
     try {
-      this.logger.debug("Writing file "
+      logger.debug("Writing file "
           + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
           + "...");
       this.lastDownloadedAllDescriptorsFile.getParentFile().mkdirs();
@@ -1056,26 +1054,26 @@ public class RelayDescriptorDownloader {
         bw.write(authority + "," + lastDownloaded + "\n");
       }
       bw.close();
-      this.logger.debug("Finished writing file "
+      logger.debug("Finished writing file "
           + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
           + ".");
     } catch (IOException e) {
-      this.logger.warn("Failed writing "
+      logger.warn("Failed writing "
           + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() + "!",
           e);
     }
 
     /* Log statistics about this execution. */
-    this.logger.info("Finished downloading relay descriptors from the "
+    logger.info("Finished downloading relay descriptors from the "
         + "directory authorities.");
-    this.logger.info("At the beginning of this execution, we were "
+    logger.info("At the beginning of this execution, we were "
         + "missing " + oldMissingConsensuses + " consensus(es), "
         + oldMissingMicrodescConsensuses + " microdesc consensus(es), "
         + oldMissingVotes + " vote(s), " + oldMissingServerDescriptors
         + " server descriptor(s), " + oldMissingExtraInfoDescriptors
         + " extra-info descriptor(s), and " + oldMissingMicrodescriptors
         + " microdescriptor(s).");
-    this.logger.info("During this execution, we added "
+    logger.info("During this execution, we added "
         + this.newMissingConsensuses + " consensus(es), "
         + this.newMissingMicrodescConsensuses
         + " microdesc consensus(es), " + this.newMissingVotes
@@ -1085,7 +1083,7 @@ public class RelayDescriptorDownloader {
         + this.newMissingMicrodescriptors + " microdescriptor(s) to the "
         + "missing list, some of which we also "
         + "requested and removed from the list again.");
-    this.logger.info("We requested " + this.requestedConsensuses
+    logger.info("We requested " + this.requestedConsensuses
         + " consensus(es), " + this.requestedMicrodescConsensuses
         + " microdesc consensus(es), " + this.requestedVotes
         + " vote(s), " + this.requestedMissingServerDescriptors
@@ -1102,9 +1100,9 @@ public class RelayDescriptorDownloader {
       sb.append(" " + authority + "="
           + this.requestsByAuthority.get(authority));
     }
-    this.logger.info("We sent these numbers of requests to the directory "
+    logger.info("We sent these numbers of requests to the directory "
         + "authorities:" + sb.toString());
-    this.logger.info("We successfully downloaded "
+    logger.info("We successfully downloaded "
         + this.downloadedConsensuses + " consensus(es), "
         + this.downloadedMicrodescConsensuses
         + " microdesc consensus(es), " + this.downloadedVotes
@@ -1118,7 +1116,7 @@ public class RelayDescriptorDownloader {
         + "descriptor(s) when downloading all descriptors, and "
         + this.downloadedMissingMicrodescriptors
         + " missing microdescriptor(s).");
-    this.logger.info("At the end of this execution, we are missing "
+    logger.info("At the end of this execution, we are missing "
         + missingConsensuses + " consensus(es), "
         + missingMicrodescConsensuses + " microdesc consensus(es), "
         + missingVotes + " vote(s), " + missingServerDescriptors
diff --git a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java
index 8c44c38..fda87bf 100644
--- a/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java
+++ b/src/main/java/org/torproject/collector/relaydescs/RelayDescriptorParser.java
@@ -43,7 +43,8 @@ public class RelayDescriptorParser {
   /**
    * Logger for this class.
    */
-  private Logger logger;
+  private static final Logger logger = LoggerFactory.getLogger(
+      RelayDescriptorParser.class);
 
   private SimpleDateFormat dateTimeFormat;
 
@@ -53,9 +54,6 @@ public class RelayDescriptorParser {
   public RelayDescriptorParser(ArchiveWriter aw) {
     this.aw = aw;
 
-    /* Initialize logger. */
-    this.logger = LoggerFactory.getLogger(RelayDescriptorParser.class);
-
     this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
     this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
   }
@@ -86,7 +84,7 @@ public class RelayDescriptorParser {
         line = br.readLine();
       } while (line != null && line.startsWith("@"));
       if (line == null) {
-        this.logger.debug("We were given an empty descriptor for "
+        logger.debug("We were given an empty descriptor for "
             + "parsing. Ignoring.");
         return false;
       }
@@ -154,7 +152,7 @@ public class RelayDescriptorParser {
                   + lastRelayIdentity + "," + serverDesc);
               serverDescriptorDigests.add(serverDesc);
             } else {
-              this.logger.warn("Could not parse r line '"
+              logger.warn("Could not parse r line '"
                   + line + "' in descriptor. Skipping.");
               break;
             }
@@ -171,7 +169,7 @@ public class RelayDescriptorParser {
             } else if (parts.length != 3
                 || !parts[2].startsWith("sha256=")
                 || parts[2].length() != 50) {
-              this.logger.warn("Could not parse m line '"
+              logger.warn("Could not parse m line '"
                   + line + "' in descriptor. Skipping.");
               break;
             }
@@ -319,10 +317,10 @@ public class RelayDescriptorParser {
       }
       br.close();
     } catch (IOException e) {
-      this.logger.warn("Could not parse descriptor. "
+      logger.warn("Could not parse descriptor. "
           + "Skipping.", e);
     } catch (ParseException e) {
-      this.logger.warn("Could not parse descriptor. "
+      logger.warn("Could not parse descriptor. "
           + "Skipping.", e);
     }
     return stored;
diff --git a/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java b/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java
index 6f8daf0..635c5a3 100644
--- a/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java
+++ b/src/main/java/org/torproject/collector/torperf/TorperfDownloader.java
@@ -33,7 +33,9 @@ import java.util.TreeMap;
  * configured sources, append them to the files we already have, and merge
  * the two files into the .tpf format. */
 public class TorperfDownloader extends CollecTorMain {
-  private static Logger logger = LoggerFactory.getLogger(TorperfDownloader.class);
+
+  private static final Logger logger = LoggerFactory.getLogger(
+      TorperfDownloader.class);
 
   public TorperfDownloader(Configuration config) {
     super(config);
@@ -99,7 +101,7 @@ public class TorperfDownloader extends CollecTorMain {
           }
         }
         if (fileName == null || timestamp == null) {
-          this.logger.warn("Invalid line '" + line + "' in "
+          logger.warn("Invalid line '" + line + "' in "
               + this.torperfLastMergedFile.getAbsolutePath() + ".  "
               + "Ignoring past history of merging .data and .extradata "
               + "files.");
@@ -110,7 +112,7 @@ public class TorperfDownloader extends CollecTorMain {
       }
       br.close();
     } catch (IOException e) {
-      this.logger.warn("Error while reading '"
+      logger.warn("Error while reading '"
           + this.torperfLastMergedFile.getAbsolutePath() + ".  Ignoring "
           + "past history of merging .data and .extradata files.");
       this.lastMergedTimestamps.clear();
@@ -130,7 +132,7 @@ public class TorperfDownloader extends CollecTorMain {
       }
       bw.close();
     } catch (IOException e) {
-      this.logger.warn("Error while writing '"
+      logger.warn("Error while writing '"
           + this.torperfLastMergedFile.getAbsolutePath() + ".  This may "
           + "result in ignoring history of merging .data and .extradata "
           + "files in the next execution.", e);
@@ -145,7 +147,7 @@ public class TorperfDownloader extends CollecTorMain {
     try {
       fileSize = Integer.parseInt(parts[1]);
     } catch (NumberFormatException e) {
-      this.logger.warn("Could not parse file size in "
+      logger.warn("Could not parse file size in "
           + "TorperfFiles configuration line '" + torperfFilesLine
           + "'.", e);
       return;
@@ -182,7 +184,7 @@ public class TorperfDownloader extends CollecTorMain {
       skipUntil = this.mergeFiles(dataOutputFile, extradataOutputFile,
           sourceName, fileSize, skipUntil);
     } catch (IOException e) {
-      this.logger.warn("Failed merging " + dataOutputFile
+      logger.warn("Failed merging " + dataOutputFile
           + " and " + extradataOutputFile + ".", e);
     }
     if (skipUntil != null) {
@@ -212,14 +214,14 @@ public class TorperfDownloader extends CollecTorMain {
         }
         br.close();
       } catch (IOException e) {
-        this.logger.warn("Failed reading '"
+        logger.warn("Failed reading '"
             + outputFile.getAbsolutePath() + "' to determine the first "
             + "line to append to it.", e);
         return false;
       }
     }
     try {
-      this.logger.debug("Downloading " + (isDataFile ? ".data" :
+      logger.debug("Downloading " + (isDataFile ? ".data" :
           ".extradata") + " file from '" + urlString + "' and merging it "
           + "into '" + outputFile.getAbsolutePath() + "'.");
       URL url = new URL(urlString);
@@ -247,19 +249,19 @@ public class TorperfDownloader extends CollecTorMain {
       bw.close();
       br.close();
       if (!copyLines) {
-        this.logger.warn("The last timestamp line in '"
+        logger.warn("The last timestamp line in '"
             + outputFile.getAbsolutePath() + "' is not contained in the "
             + "new file downloaded from '" + url + "'.  Cannot append "
             + "new lines without possibly leaving a gap.  Skipping.");
         return false;
       }
     } catch (IOException e) {
-      this.logger.warn("Failed downloading and/or merging '"
+      logger.warn("Failed downloading and/or merging '"
           + urlString + "'.", e);
       return false;
     }
     if (lastTimestampLine == null) {
-      this.logger.warn("'" + outputFile.getAbsolutePath()
+      logger.warn("'" + outputFile.getAbsolutePath()
           + "' doesn't contain any timestamp lines.  Unable to check "
           + "whether that file is stale or not.");
     } else {
@@ -275,7 +277,7 @@ public class TorperfDownloader extends CollecTorMain {
       }
       if (lastTimestampMillis < System.currentTimeMillis()
           - 330L * 60L * 1000L) {
-        this.logger.warn("The last timestamp in '"
+        logger.warn("The last timestamp in '"
             + outputFile.getAbsolutePath() + "' is more than 5:30 hours "
             + "old: " + lastTimestampMillis);
       }
@@ -287,12 +289,12 @@ public class TorperfDownloader extends CollecTorMain {
       String source, int fileSize, String skipUntil) throws IOException,
       ConfigurationException {
     if (!dataFile.exists() || !extradataFile.exists()) {
-      this.logger.warn("File " + dataFile.getAbsolutePath() + " or "
+      logger.warn("File " + dataFile.getAbsolutePath() + " or "
           + extradataFile.getAbsolutePath() + " is missing.");
       return null;
     }
-    this.logger.debug("Merging " + dataFile.getAbsolutePath() + " and "
-          + extradataFile.getAbsolutePath() + " into .tpf format.");
+    logger.debug("Merging " + dataFile.getAbsolutePath() + " and "
+        + extradataFile.getAbsolutePath() + " into .tpf format.");
     BufferedReader brD = new BufferedReader(new FileReader(dataFile));
     BufferedReader brE = new BufferedReader(new FileReader(extradataFile));
     String lineD = brD.readLine();
@@ -307,14 +309,14 @@ public class TorperfDownloader extends CollecTorMain {
        * format, either with additional information from the .extradata
        * file or without it. */
       if (lineD.isEmpty()) {
-        this.logger.trace("Skipping empty line " + dataFile.getName()
+        logger.trace("Skipping empty line " + dataFile.getName()
             + ":" + skippedLineCount++ + ".");
         lineD = brD.readLine();
         continue;
       }
       SortedMap<String, String> data = this.parseDataLine(lineD);
       if (data == null) {
-        this.logger.trace("Skipping illegal line " + dataFile.getName()
+        logger.trace("Skipping illegal line " + dataFile.getName()
             + ":" + skippedLineCount++ + " '" + lineD + "'.");
         lineD = brD.readLine();
         continue;
@@ -322,7 +324,7 @@ public class TorperfDownloader extends CollecTorMain {
       String dataComplete = data.get("DATACOMPLETE");
       double dataCompleteSeconds = Double.parseDouble(dataComplete);
       if (skipUntil != null && dataComplete.compareTo(skipUntil) < 0) {
-        this.logger.trace("Skipping " + dataFile.getName() + ":"
+        logger.trace("Skipping " + dataFile.getName() + ":"
             + skippedLineCount++ + " which we already processed before.");
         lineD = brD.readLine();
         continue;
@@ -334,34 +336,34 @@ public class TorperfDownloader extends CollecTorMain {
       SortedMap<String, String> extradata = null;
       while (lineE != null) {
         if (lineE.isEmpty()) {
-          this.logger.trace("Skipping " + extradataFile.getName() + ":"
+          logger.trace("Skipping " + extradataFile.getName() + ":"
               + skippedExtraDataCount++ + " which is empty.");
           lineE = brE.readLine();
           continue;
         }
         if (lineE.startsWith("BUILDTIMEOUT_SET ")) {
-          this.logger.trace("Skipping " + extradataFile.getName() + ":"
+          logger.trace("Skipping " + extradataFile.getName() + ":"
               + skippedExtraDataCount++ + " which is a BUILDTIMEOUT_SET "
               + "line.");
           lineE = brE.readLine();
           continue;
         } else if (lineE.startsWith("ok ")
             || lineE.startsWith("error ")) {
-          this.logger.trace("Skipping " + extradataFile.getName() + ":"
+          logger.trace("Skipping " + extradataFile.getName() + ":"
               + skippedExtraDataCount++ + " which is in the old format.");
           lineE = brE.readLine();
           continue;
         }
         extradata = this.parseExtradataLine(lineE);
         if (extradata == null) {
-          this.logger.trace("Skipping Illegal line "
+          logger.trace("Skipping Illegal line "
               + extradataFile.getName() + ":" + skippedExtraDataCount++
               + " '" + lineE + "'.");
           lineE = brE.readLine();
           continue;
         }
         if (!extradata.containsKey("USED_AT")) {
-          this.logger.trace("Skipping " + extradataFile.getName() + ":"
+          logger.trace("Skipping " + extradataFile.getName() + ":"
               + skippedExtraDataCount++ + " which doesn't contain a "
               + "USED_AT element.");
           lineE = brE.readLine();
@@ -370,7 +372,7 @@ public class TorperfDownloader extends CollecTorMain {
         String usedAt = extradata.get("USED_AT");
         double usedAtSeconds = Double.parseDouble(usedAt);
         if (skipUntil != null && usedAt.compareTo(skipUntil) < 0) {
-          this.logger.trace("Skipping " + extradataFile.getName() + ":"
+          logger.trace("Skipping " + extradataFile.getName() + ":"
               + skippedExtraDataCount++ + " which we already processed "
               + "before.");
           lineE = brE.readLine();
@@ -378,17 +380,17 @@ public class TorperfDownloader extends CollecTorMain {
         }
         maxUsedAt = usedAt;
         if (Math.abs(usedAtSeconds - dataCompleteSeconds) <= 1.0) {
-          this.logger.debug("Merging " + extradataFile.getName() + ":"
+          logger.debug("Merging " + extradataFile.getName() + ":"
               + skippedExtraDataCount++ + " into the current .data line.");
           lineE = brE.readLine();
           break;
         } else if (usedAtSeconds > dataCompleteSeconds) {
-          this.logger.trace("Comparing " + extradataFile.getName()
+          logger.trace("Comparing " + extradataFile.getName()
               + " to the next .data line.");
           extradata = null;
           break;
         } else {
-          this.logger.trace("Skipping " + extradataFile.getName() + ":"
+          logger.trace("Skipping " + extradataFile.getName() + ":"
               + skippedExtraDataCount++ + " which is too old to be "
               + "merged with " + dataFile.getName() + ":"
               + skippedLineCount + ".");
@@ -406,13 +408,13 @@ public class TorperfDownloader extends CollecTorMain {
         keysAndValues.putAll(extradata);
       }
       keysAndValues.putAll(data);
-      this.logger.debug("Writing " + dataFile.getName() + ":"
+      logger.debug("Writing " + dataFile.getName() + ":"
           + skippedLineCount++ + ".");
       lineD = brD.readLine();
       try {
         this.writeTpfLine(source, fileSize, keysAndValues);
       } catch (IOException ex) {
-        this.logger.warn("Error writing output line.  "
+        logger.warn("Error writing output line.  "
             + "Aborting to merge " + dataFile.getName() + " and "
             + extradataFile.getName() + ".", skippedExtraDataCount);
         break;





More information about the tor-commits mailing list