[tor-commits] [collector/master] Simplify logging configuration.

karsten at torproject.org karsten at torproject.org
Tue Mar 31 07:28:44 UTC 2020


commit 77d9429797594113d2876ef5c3600d8fa37caf46
Author: Karsten Loesing <karsten.loesing at gmx.net>
Date:   Tue Mar 31 09:18:17 2020 +0200

    Simplify logging configuration.
    
    Implements #33549.
---
 CHANGELOG.md                                       |   3 +
 src/build                                          |   2 +-
 .../org/torproject/metrics/collector/Main.java     |   4 +-
 .../metrics/collector/cron/ShutdownHook.java       |   7 +-
 .../persist/BandwidthFilePersistence.java          |   7 +-
 .../collector/persist/DescriptorPersistence.java   |   6 -
 .../collector/persist/PersistenceUtils.java        |   8 +-
 .../metrics/collector/persist/VotePersistence.java |   7 +-
 .../metrics/collector/sync/SyncManager.java        |  21 +--
 .../metrics/collector/sync/SyncPersistence.java    |  11 +-
 .../metrics/collector/webstats/LogFileMap.java     |   7 +-
 .../metrics/collector/webstats/LogMetadata.java    |   6 +-
 .../collector/webstats/SanitizeWeblogs.java        |  33 ++--
 .../collector/webstats/WebServerAccessLogLine.java |   4 +-
 src/main/resources/logback.xml                     | 167 ---------------------
 15 files changed, 70 insertions(+), 223 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5606180..c284d47 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
 # Changes in version 1.1?.? - 2020-0?-??
 
+ * Minor changes
+   - Simplify logging configuration.
+
 
 # Changes in version 1.14.1 - 2020-01-16
 
diff --git a/src/build b/src/build
index 264e498..fd85646 160000
--- a/src/build
+++ b/src/build
@@ -1 +1 @@
-Subproject commit 264e498f54a20f7d299daaf2533d043f880e6a8b
+Subproject commit fd856466bcb260f53ef69a24c102d0e49d171cc3
diff --git a/src/main/java/org/torproject/metrics/collector/Main.java b/src/main/java/org/torproject/metrics/collector/Main.java
index 3822353..3e8ec33 100644
--- a/src/main/java/org/torproject/metrics/collector/Main.java
+++ b/src/main/java/org/torproject/metrics/collector/Main.java
@@ -39,7 +39,7 @@ import java.util.Map;
  */
 public class Main {
 
-  private static final Logger log = LoggerFactory.getLogger(Main.class);
+  private static final Logger logger = LoggerFactory.getLogger(Main.class);
 
   public static final String CONF_FILE = "collector.properties";
 
@@ -116,7 +116,7 @@ public class Main {
           + ") and provide at least one data source and one data sink. "
           + "Refer to the manual for more information.");
     } catch (IOException e) {
-      log.error("Cannot write default configuration.", e);
+      logger.error("Cannot write default configuration.", e);
       throw new RuntimeException(e);
     }
   }
diff --git a/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java b/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java
index ec34a19..7e0d0be 100644
--- a/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java
+++ b/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java
@@ -11,7 +11,8 @@ import org.slf4j.LoggerFactory;
  */
 public final class ShutdownHook extends Thread {
 
-  private static final Logger log = LoggerFactory.getLogger(ShutdownHook.class);
+  private static final Logger logger
+      = LoggerFactory.getLogger(ShutdownHook.class);
 
   private boolean stayAlive = true;
 
@@ -37,13 +38,13 @@ public final class ShutdownHook extends Thread {
 
   @Override
   public void run() {
-    log.info("Shutdown in progress ... ");
+    logger.info("Shutdown in progress ... ");
     Scheduler.getInstance().shutdownScheduler();
     synchronized (this) {
       this.stayAlive = false;
       this.notify();
     }
-    log.info("Shutdown finished. Exiting.");
+    logger.info("Shutdown finished. Exiting.");
   }
 }
 
diff --git a/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java b/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java
index bbbfca5..8664ae8 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java
@@ -7,6 +7,8 @@ import org.torproject.descriptor.BandwidthFile;
 import org.torproject.metrics.collector.conf.Annotation;
 
 import org.apache.commons.codec.digest.DigestUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.nio.file.Paths;
 import java.time.LocalDateTime;
@@ -16,6 +18,9 @@ import java.time.format.DateTimeFormatter;
 public class BandwidthFilePersistence
     extends DescriptorPersistence<BandwidthFile> {
 
+  private static final Logger logger
+      = LoggerFactory.getLogger(BandwidthFilePersistence.class);
+
   private static final String BANDWIDTH = "bandwidth";
   private static final String BANDWIDTHS = "bandwidths";
 
@@ -57,7 +62,7 @@ public class BandwidthFilePersistence
       System.arraycopy(bytes, start, forDigest, 0, forDigest.length);
       digest = DigestUtils.sha256Hex(forDigest).toUpperCase();
     } else {
-      log.error("No digest calculation possible.  Returning empty string.");
+      logger.error("No digest calculation possible.  Returning empty string.");
     }
     return digest;
   }
diff --git a/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java b/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
index 7c648ef..a2c9bc4 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
@@ -5,18 +5,12 @@ package org.torproject.metrics.collector.persist;
 
 import org.torproject.descriptor.Descriptor;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.nio.file.Paths;
 import java.nio.file.StandardOpenOption;
 import java.util.List;
 
 public abstract class DescriptorPersistence<T extends Descriptor> {
 
-  protected static final Logger log = LoggerFactory.getLogger(
-      DescriptorPersistence.class);
-
   protected static final String BRIDGEDESCS = "bridge-descriptors";
   protected static final String BRIDGEPOOLASSIGNMENTS
       = "bridge-pool-assignments";
diff --git a/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java b/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
index 72ad73a..da1403c 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
@@ -23,7 +23,7 @@ import java.util.TimeZone;
 
 public class PersistenceUtils {
 
-  private static final Logger log = LoggerFactory.getLogger(
+  private static final Logger logger = LoggerFactory.getLogger(
       PersistenceUtils.class);
 
   public static final String TEMPFIX = ".tmp";
@@ -55,14 +55,14 @@ public class PersistenceUtils {
       }
       return createOrAppend(typeAnnotation, data, tmpPath, option);
     } catch (FileAlreadyExistsException faee) {
-      log.debug("Already have descriptor(s) of type '{}': {}. Skipping.",
+      logger.debug("Already have descriptor(s) of type '{}': {}. Skipping.",
           new String(typeAnnotation), outputPath);
     } catch (IOException | SecurityException
           | UnsupportedOperationException e) {
-      log.warn("Could not store descriptor(s) {} of type '{}'",
+      logger.warn("Could not store descriptor(s) {} of type '{}'",
           outputPath, new String(typeAnnotation), e);
     } catch (Throwable th) {  // anything else
-      log.warn("Problem storing descriptor(s) {} of type '{}'",
+      logger.warn("Problem storing descriptor(s) {} of type '{}'",
           outputPath, new String(typeAnnotation), th);
     }
     return false;
diff --git a/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java b/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java
index 461ca40..5973795 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java
@@ -7,6 +7,8 @@ import org.torproject.descriptor.RelayNetworkStatusVote;
 import org.torproject.metrics.collector.conf.Annotation;
 
 import org.apache.commons.codec.digest.DigestUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Paths;
@@ -14,6 +16,9 @@ import java.nio.file.Paths;
 public class VotePersistence
     extends DescriptorPersistence<RelayNetworkStatusVote> {
 
+  private static final Logger logger
+      = LoggerFactory.getLogger(VotePersistence.class);
+
   private static final String VOTE = "vote";
   private static final String VOTES = "votes";
 
@@ -56,7 +61,7 @@ public class VotePersistence
       System.arraycopy(bytes, start, forDigest, 0, sig - start);
       digest = DigestUtils.sha1Hex(forDigest).toUpperCase();
     } else {
-      log.error("No digest calculation possible.  Returning empty string.");
+      logger.error("No digest calculation possible.  Returning empty string.");
     }
     return digest;
   }
diff --git a/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java b/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java
index e42ae61..1fa1347 100644
--- a/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java
+++ b/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java
@@ -25,7 +25,8 @@ import java.util.Set;
 
 public class SyncManager {
 
-  private static final Logger log = LoggerFactory.getLogger(SyncManager.class);
+  private static final Logger logger
+      = LoggerFactory.getLogger(SyncManager.class);
   public static final String SYNCORIGINS = "SyncOrigins";
 
   private Date collectionDate;
@@ -53,12 +54,12 @@ public class SyncManager {
         File storage = new File(basePath.toFile(),
             marker + "-" + source.getHost());
         storage.mkdirs();
-        log.info("Collecting {} from {} ...", marker, source.getHost());
+        logger.info("Collecting {} from {} ...", marker, source.getHost());
         descriptorCollector.collectDescriptors(source.toString(),
             dirs.toArray(new String[dirs.size()]), 0L, storage, true);
-        log.info("Done collecting {} from {}.", marker, source.getHost());
+        logger.info("Done collecting {} from {}.", marker, source.getHost());
       } catch (Throwable th) { // catch all
-        log.warn("Cannot download {} from {}.", dirs, source, th);
+        logger.warn("Cannot download {} from {}.", dirs, source, th);
       }
     }
   }
@@ -72,7 +73,7 @@ public class SyncManager {
         = new ProcessCriterium(UnparseableDescriptor.class);
     for (URL source : sources) {
       File base = new File(basePath.toFile(), marker + "-" + source.getHost());
-      log.info("Merging {} from {} into storage ...", marker,
+      logger.info("Merging {} from {} into storage ...", marker,
           source.getHost());
       for (Map.Entry<String, Class<? extends Descriptor>> entry
           : mapPathDesc.entrySet()) {
@@ -86,21 +87,21 @@ public class SyncManager {
             "sync-history-" + source.getHost() + "-" + marker + "-"
             + histFileEnding);
         descriptorReader.setHistoryFile(historyFile);
-        log.info("Reading {} of type {} ... ", marker, histFileEnding);
+        logger.info("Reading {} of type {} ... ", marker, histFileEnding);
         Iterator<Descriptor> descriptors
             = descriptorReader.readDescriptors(descFile).iterator();
-        log.info("Done reading {} of type {}.", marker, histFileEnding);
+        logger.info("Done reading {} of type {}.", marker, histFileEnding);
         Criterium<Descriptor> crit = new ProcessCriterium(entry.getValue());
         while (descriptors.hasNext()) {
           Descriptor desc = descriptors.next();
           if (unparseable.applies(desc)) {
             Exception ex
                 = ((UnparseableDescriptor)desc).getDescriptorParseException();
-            log.warn("Parsing of {} caused Exception(s). Processing anyway.",
+            logger.warn("Parsing of {} caused Exception(s). Processing anyway.",
                 desc.getDescriptorFile(), ex);
           }
           if (!crit.applies(desc)) {
-            log.warn("Not processing {} in {}.", desc.getClass().getName(),
+            logger.warn("Not processing {} in {}.", desc.getClass().getName(),
                 desc.getDescriptorFile());
             continue;
           }
@@ -110,7 +111,7 @@ public class SyncManager {
         persist.cleanDirectory();
         descriptorReader.saveHistoryFile(historyFile);
       }
-      log.info("Done merging {} from {}.", marker, source.getHost());
+      logger.info("Done merging {} from {}.", marker, source.getHost());
     }
   }
 
diff --git a/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java b/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
index f81e164..adffb93 100644
--- a/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
@@ -48,7 +48,7 @@ import java.nio.file.Path;
 /** Provides persistence for descriptors based on the descriptor type. */
 public class SyncPersistence {
 
-  private static final Logger log
+  private static final Logger logger
       = LoggerFactory.getLogger(SyncPersistence.class);
 
   private final Path recentPath;
@@ -72,7 +72,7 @@ public class SyncPersistence {
     try {
       PersistenceUtils.cleanDirectory(recentPath);
     } catch (IOException ioe) {
-      log.error("Cleaning of {} failed.", recentPath.toString(), ioe);
+      logger.error("Cleaning of {} failed.", recentPath.toString(), ioe);
     }
   }
 
@@ -126,7 +126,8 @@ public class SyncPersistence {
         case "BridgeNetworkStatus": // need to infer authId from filename
           String[] filenameParts = filename.split(DASH);
           if (filenameParts.length < 3) {
-            log.error("Invalid BridgeNetworkStatus; skipping: {}.", filename);
+            logger.error("Invalid BridgeNetworkStatus; skipping: {}.",
+                filename);
             break;
           }
           descPersist = new StatusPersistence(
@@ -160,7 +161,7 @@ public class SyncPersistence {
           descPersist = new BridgedbMetricsPersistence((BridgedbMetrics) desc);
           break;
         default:
-          log.trace("Invalid descriptor type {} for sync-merge.",
+          logger.trace("Invalid descriptor type {} for sync-merge.",
               clazz.getName());
           continue;
       }
@@ -171,7 +172,7 @@ public class SyncPersistence {
       break;
     }
     if (!recognizedAndWritten) {
-      log.error("Unknown descriptor type {} implementing {}.",
+      logger.error("Unknown descriptor type {} implementing {}.",
           desc.getClass().getSimpleName(), desc.getClass().getInterfaces());
     }
   }
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java b/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java
index 5be6b50..fb39202 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java
@@ -22,7 +22,8 @@ import java.util.TreeMap;
 public class LogFileMap
     extends TreeMap<String, TreeMap<String, TreeMap<LocalDate, LogMetadata>>> {
 
-  private static final Logger log = LoggerFactory.getLogger(LogFileMap.class);
+  private static final Logger logger
+      = LoggerFactory.getLogger(LogFileMap.class);
 
   /**
    * The map to keep track of the logfiles by virtual host,
@@ -54,13 +55,13 @@ public class LogFileMap
 
         private FileVisitResult logIfError(Path path, IOException ex) {
           if (null != ex) {
-            log.warn("Cannot process '{}'.", path, ex);
+            logger.warn("Cannot process '{}'.", path, ex);
           }
           return FileVisitResult.CONTINUE;
         }
       });
     } catch (IOException ex) {
-      log.error("Cannot read directory '{}'.", startDir, ex);
+      logger.error("Cannot read directory '{}'.", startDir, ex);
     }
   }
 
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java b/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java
index d3bf8fb..2cac619 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java
@@ -17,7 +17,7 @@ import java.util.regex.Pattern;
 
 public class LogMetadata {
 
-  private static final Logger log
+  private static final Logger logger
       = LoggerFactory.getLogger(LogMetadata.class);
 
   /** The mandatory web server log descriptor file name pattern. */
@@ -67,7 +67,7 @@ public class LogMetadata {
               = LocalDate.parse(mat.group(2), DateTimeFormatter.BASIC_ISO_DATE);
           if (null == virtualHost || null == physicalHost || null == logDate
               || virtualHost.isEmpty() || physicalHost.isEmpty()) {
-            log.debug("Non-matching file encountered: '{}/{}'.",
+            logger.debug("Non-matching file encountered: '{}/{}'.",
                 parentPath, file);
           } else {
             metadata = new LogMetadata(logPath, physicalHost, virtualHost,
@@ -77,7 +77,7 @@ public class LogMetadata {
       }
     } catch (Throwable ex) {
       metadata = null;
-      log.debug("Problem parsing path '{}'.", logPath, ex);
+      logger.debug("Problem parsing path '{}'.", logPath, ex);
     }
     return Optional.ofNullable(metadata);
   }
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
index 6c8a495..670f686 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
@@ -55,7 +55,7 @@ import java.util.stream.Stream;
  */
 public class SanitizeWeblogs extends CollecTorMain {
 
-  private static final Logger log =
+  private static final Logger logger =
       LoggerFactory.getLogger(SanitizeWeblogs.class);
   private static final int LIMIT = 2;
 
@@ -99,7 +99,7 @@ public class SanitizeWeblogs extends CollecTorMain {
       Set<SourceType> sources = this.config.getSourceTypeSet(
           Key.WebstatsSources);
       if (sources.contains(SourceType.Local)) {
-        log.info("Processing logs using batch value {}.", BATCH);
+        logger.info("Processing logs using batch value {}.", BATCH);
         Map<LogMetadata, Set<LocalDate>> previouslyProcessedWebstats
             = this.readProcessedWebstats();
         Map<LogMetadata, Set<LocalDate>> newlyProcessedWebstats
@@ -112,7 +112,7 @@ public class SanitizeWeblogs extends CollecTorMain {
             cutOffMillis);
       }
     } catch (Exception e) {
-      log.error("Cannot sanitize web-logs: {}", e.getMessage(), e);
+      logger.error("Cannot sanitize web-logs: {}", e.getMessage(), e);
       throw new RuntimeException(e);
     }
   }
@@ -132,9 +132,10 @@ public class SanitizeWeblogs extends CollecTorMain {
           }
         }
       } catch (IOException e) {
-        log.error("Cannot read state file {}.", this.processedWebstatsFile, e);
+        logger.error("Cannot read state file {}.", this.processedWebstatsFile,
+            e);
       }
-      log.debug("Read state file containing {} log files.",
+      logger.debug("Read state file containing {} log files.",
           processedWebstats.size());
     }
     return processedWebstats;
@@ -144,14 +145,14 @@ public class SanitizeWeblogs extends CollecTorMain {
       Map<LogMetadata, Set<LocalDate>> previouslyProcessedWebstats) {
     Map<LogMetadata, Set<LocalDate>> newlyProcessedWebstats = new HashMap<>();
     LogFileMap fileMapIn = new LogFileMap(dir);
-    log.info("Found log files for {} virtual hosts.", fileMapIn.size());
+    logger.info("Found log files for {} virtual hosts.", fileMapIn.size());
     for (Map.Entry<String,TreeMap<String,TreeMap<LocalDate,LogMetadata>>>
              virtualEntry : fileMapIn.entrySet()) {
       String virtualHost = virtualEntry.getKey();
       for (Map.Entry<String, TreeMap<LocalDate, LogMetadata>> physicalEntry
           : virtualEntry.getValue().entrySet()) {
         String physicalHost = physicalEntry.getKey();
-        log.info("Processing logs for {} on {}.", virtualHost, physicalHost);
+        logger.info("Processing logs for {} on {}.", virtualHost, physicalHost);
         /* Go through current input log files for given virtual and physical
          * host, and either look up contained log dates from the last execution,
          * or parse files to memory now. */
@@ -231,7 +232,7 @@ public class SanitizeWeblogs extends CollecTorMain {
         .add(WebServerAccessLogImpl.MARKER)
         .add(date.format(DateTimeFormatter.BASIC_ISO_DATE))
         .toString() + "." + FileType.XZ.name().toLowerCase();
-    log.debug("Storing {}.", name);
+    logger.debug("Storing {}.", name);
     Map<String, Long> retainedLines = new TreeMap<>(lineCounts);
     lineCounts.clear(); // not needed anymore
     try {
@@ -239,13 +240,14 @@ public class SanitizeWeblogs extends CollecTorMain {
           = new WebServerAccessLogPersistence(
           new WebServerAccessLogImpl(toCompressedBytes(retainedLines),
           new File(name), name));
-      log.debug("Storing {}.", name);
+      logger.debug("Storing {}.", name);
       walp.storeOut(this.outputDirectory.toString());
       walp.storeRecent(this.recentDirectory.toString());
     } catch (DescriptorParseException dpe) {
-      log.error("Cannot store log desriptor {}.", name, dpe);
+      logger.error("Cannot store log desriptor {}.", name, dpe);
     } catch (Throwable th) { // catch all else
-      log.error("Serious problem.  Cannot store log desriptor {}.", name, th);
+      logger.error("Serious problem.  Cannot store log desriptor {}.", name,
+          th);
     }
   }
 
@@ -327,7 +329,7 @@ public class SanitizeWeblogs extends CollecTorMain {
 
   private Map<LocalDate, Map<String, Long>>
       sanitzedLineStream(LogMetadata metadata) {
-    log.debug("Processing file {}.", metadata.path);
+    logger.debug("Processing file {}.", metadata.path);
     try (BufferedReader br
         = new BufferedReader(new InputStreamReader(
          metadata.fileType.decompress(Files.newInputStream(metadata.path))))) {
@@ -365,7 +367,7 @@ public class SanitizeWeblogs extends CollecTorMain {
                     .collect(groupingByConcurrent(Map.Entry::getKey,
                         summingLong(Map.Entry::getValue))))));
     } catch (Exception ex) {
-      log.debug("Skipping log-file {}.", metadata.path, ex);
+      logger.debug("Skipping log-file {}.", metadata.path, ex);
     }
     return Collections.emptyMap();
   }
@@ -385,9 +387,10 @@ public class SanitizeWeblogs extends CollecTorMain {
       }
       Files.write(this.processedWebstatsFile, lines);
     } catch (IOException e) {
-      log.error("Cannot write state file {}.", this.processedWebstatsFile, e);
+      logger.error("Cannot write state file {}.", this.processedWebstatsFile,
+          e);
     }
-    log.debug("Wrote state file containing {} log files.",
+    logger.debug("Wrote state file containing {} log files.",
         newlyProcessedWebstats.size());
   }
 }
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java b/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java
index 816064a..d187cf2 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java
@@ -23,7 +23,7 @@ import java.util.regex.Pattern;
 
 public class WebServerAccessLogLine implements WebServerAccessLog.Line {
 
-  private static final Logger log = LoggerFactory
+  private static final Logger logger = LoggerFactory
       .getLogger(WebServerAccessLogLine.class);
 
   private static final String DATE_PATTERN = "dd/MMM/yyyy";
@@ -151,7 +151,7 @@ public class WebServerAccessLogLine implements WebServerAccessLog.Line {
         res.valid = true;
       }
     } catch (Throwable th) {
-      log.debug("Unmatchable line: '{}'.", line, th);
+      logger.debug("Unmatchable line: '{}'.", line, th);
       return new WebServerAccessLogLine();
     }
     return res;
diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml
deleted file mode 100644
index 6cb5831..0000000
--- a/src/main/resources/logback.xml
+++ /dev/null
@@ -1,167 +0,0 @@
-<configuration debug="false">
-
-  <!-- a path and a prefix -->
-  <property name="logfile-base" value="${LOGBASE}/collector-" />
-
-  <!-- log file names -->
-  <property name="fileall-logname" value="${logfile-base}all" />
-  <property name="file-bridgedescs-logname" value="${logfile-base}bridgedescs" />
-  <property name="file-exitlists-logname" value="${logfile-base}exitlists" />
-  <property name="file-relaydescs-logname" value="${logfile-base}relaydescs" />
-  <property name="file-torperf-logname" value="${logfile-base}torperf" />
-  <property name="file-updateindex-logname" value="${logfile-base}updateindex" />
-
-  <!-- date pattern -->
-  <property name="utc-date-pattern" value="%date{ISO8601, UTC}" />
-
-  <!-- appender section -->
-  <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-      <level>WARN</level>
-    </filter>
-  </appender>
-
-  <appender name="SHUTDOWN" class="ch.qos.logback.core.ConsoleAppender">
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-  </appender>
-
-  <appender name="FILEALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
-    <file>${fileall-logname}.log</file>
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-      <!-- rollover daily -->
-      <FileNamePattern>${fileall-logname}.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
-      <maxHistory>10</maxHistory>
-      <timeBasedFileNamingAndTriggeringPolicy
-            class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
-        <!-- or whenever the file size reaches 1MB -->
-        <maxFileSize>1MB</maxFileSize>
-      </timeBasedFileNamingAndTriggeringPolicy>
-    </rollingPolicy>
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-      <level>INFO</level>
-    </filter>
-  </appender>
-
-  <appender name="FILEBRIDGEDESCS" class="ch.qos.logback.core.FileAppender">
-    <file>${file-bridgedescs-logname}.log</file>
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-      <level>INFO</level>
-    </filter>
-  </appender>
-
-  <appender name="FILEEXITLISTS" class="ch.qos.logback.core.FileAppender">
-    <file>${file-exitlists-logname}.log</file>
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-      <level>INFO</level>
-    </filter>
-  </appender>
-
-  <appender name="FILERELAYDESCS" class="ch.qos.logback.core.FileAppender">
-    <file>${file-relaydescs-logname}.log</file>
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-      <level>INFO</level>
-    </filter>
-  </appender>
-
-  <appender name="FILETORPERF" class="ch.qos.logback.core.FileAppender">
-    <file>${file-torperf-logname}.log</file>
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-      <level>INFO</level>
-    </filter>
-  </appender>
-
-  <appender name="FILEUPDATEINDEX" class="ch.qos.logback.core.FileAppender">
-    <file>${file-updateindex-logname}.log</file>
-    <encoder>
-      <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
-    </encoder>
-
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-      <level>INFO</level>
-    </filter>
-  </appender>
-
-  <!-- logger section -->
-  <logger name="org.torproject.collector.bridgedescs" >
-    <appender-ref ref="FILEBRIDGEDESCS" />
-  </logger>
-
-  <logger name="org.torproject.collector.exitlists" >
-    <appender-ref ref="FILEEXITLISTS" />
-  </logger>
-
-  <logger name="org.torproject.collector.relaydescs" >
-    <appender-ref ref="FILERELAYDESCS" />
-  </logger>
-
-  <logger name="org.torproject.collector.torperf" >
-    <appender-ref ref="FILETORPERF" />
-  </logger>
-
-  <logger name="org.torproject.collector.index" level="INFO" >
-    <appender-ref ref="FILEUPDATEINDEX" />
-  </logger>
-
-  <logger name="org.torproject.collector.Main" >
-    <appender-ref ref="FILEBRIDGEDESCS" />
-    <appender-ref ref="FILEEXITLISTS" />
-    <appender-ref ref="FILERELAYDESCS" />
-    <appender-ref ref="FILETORPERF" />
-    <appender-ref ref="FILEUPDATEINDEX" />
-  </logger>
-
-  <logger name="org.torproject.collector.conf" >
-    <appender-ref ref="FILEBRIDGEDESCS" />
-    <appender-ref ref="FILEEXITLISTS" />
-    <appender-ref ref="FILERELAYDESCS" />
-    <appender-ref ref="FILETORPERF" />
-    <appender-ref ref="FILEUPDATEINDEX" />
-  </logger>
-
-  <logger name="org.torproject.collector.cron" >
-    <appender-ref ref="FILEBRIDGEDESCS" />
-    <appender-ref ref="FILEEXITLISTS" />
-    <appender-ref ref="FILERELAYDESCS" />
-    <appender-ref ref="FILETORPERF" />
-    <appender-ref ref="FILEUPDATEINDEX" />
-  </logger>
-
-  <logger name="org.torproject" >
-    <appender-ref ref="CONSOLE" />
-  </logger>
-
-  <logger name="org.torproject.collector.cron.ShutdownHook" >
-    <appender-ref ref="SHUTDOWN" />
-  </logger>
-
-  <root level="ALL">
-    <appender-ref ref="FILEALL" />
-  </root>
-
-</configuration>
-



More information about the tor-commits mailing list