commit 556a0a5ff74548edced024a5a8b40cac392b8d3a Author: Karsten Loesing karsten.loesing@gmx.net Date: Tue Aug 21 10:40:08 2018 +0200
Use parameterized log statements. --- .../org/torproject/metrics/collector/Main.java | 2 +- .../bridgedescs/BridgeSnapshotReader.java | 43 +++-- .../bridgedescs/SanitizedBridgesWriter.java | 4 +- .../metrics/collector/cron/Scheduler.java | 19 ++- .../collector/exitlists/ExitListDownloader.java | 10 +- .../metrics/collector/indexer/CreateIndexJson.java | 10 +- .../collector/onionperf/OnionPerfDownloader.java | 6 +- .../collector/relaydescs/ArchiveReader.java | 27 ++-- .../collector/relaydescs/ArchiveWriter.java | 74 ++++----- .../relaydescs/CachedRelayDescriptorReader.java | 30 ++-- .../collector/relaydescs/ReferenceChecker.java | 4 +- .../relaydescs/RelayDescriptorDownloader.java | 179 ++++++++++----------- .../relaydescs/RelayDescriptorParser.java | 8 +- .../collector/webstats/SanitizeWeblogs.java | 2 +- 14 files changed, 189 insertions(+), 229 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/Main.java b/src/main/java/org/torproject/metrics/collector/Main.java index 6230e36..46e93af 100644 --- a/src/main/java/org/torproject/metrics/collector/Main.java +++ b/src/main/java/org/torproject/metrics/collector/Main.java @@ -103,7 +103,7 @@ public class Main { + ") and provide at least one data source and one data sink. " + "Refer to the manual for more information."); } catch (IOException e) { - log.error("Cannot write default configuration. Reason: " + e, e); + log.error("Cannot write default configuration.", e); throw new RuntimeException(e); } } diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java index fccee5e..4f72b5a 100644 --- a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java +++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java @@ -49,7 +49,7 @@ public class BridgeSnapshotReader { boolean modified = false; if (bdDir.exists()) { if (pbdFile.exists()) { - logger.debug("Reading file " + pbdFile.getAbsolutePath() + "..."); + logger.debug("Reading file {}...", pbdFile.getAbsolutePath()); try { BufferedReader br = new BufferedReader(new FileReader(pbdFile)); String line; @@ -57,16 +57,13 @@ public class BridgeSnapshotReader { parsed.add(line); } br.close(); - logger.debug("Finished reading file " - + pbdFile.getAbsolutePath() + "."); + logger.debug("Finished reading file {}.", pbdFile.getAbsolutePath()); } catch (IOException e) { - logger.warn("Failed reading file " - + pbdFile.getAbsolutePath() + "!", e); + logger.warn("Failed reading file {}!", pbdFile.getAbsolutePath(), e); return; } } - logger.debug("Importing files in directory " + bridgeDirectoriesDir - + "/..."); + logger.debug("Importing files in directory {}/...", bridgeDirectoriesDir); Set<String> descriptorImportHistory = new HashSet<>(); int parsedFiles = 0; int skippedFiles = 0; @@ -99,8 +96,8 @@ public class BridgeSnapshotReader { String fn = pop.getName(); String[] fnParts = fn.split("-"); if (fnParts.length != 5) { - logger.warn("Invalid bridge descriptor tarball file name: " - + fn + ". Skipping."); + logger.warn("Invalid bridge descriptor tarball file name: {}. " + + "Skipping.", fn); continue; } String authorityPart = String.format("%s-%s-", fnParts[0], @@ -123,7 +120,7 @@ public class BridgeSnapshotReader { break; default: logger.warn("Did not recognize the bridge authority that " - + "generated " + fn + ". Skipping."); + + "generated {}. Skipping.", fn); continue; } String dateTime = datePart.substring(0, 10) + " " @@ -226,32 +223,28 @@ public class BridgeSnapshotReader { parsed.add(pop.getName()); modified = true; } catch (IOException e) { - logger.warn("Could not parse bridge snapshot " - + pop.getName() + "!", e); + logger.warn("Could not parse bridge snapshot {}!", pop.getName(), + e); continue; } } } - logger.debug("Finished importing files in directory " - + bridgeDirectoriesDir + "/. In total, we parsed " - + parsedFiles + " files (skipped " + skippedFiles - + ") containing " + parsedStatuses + " statuses, " - + parsedServerDescriptors + " server descriptors (skipped " - + skippedServerDescriptors + "), and " - + parsedExtraInfoDescriptors + " extra-info descriptors " - + "(skipped " + skippedExtraInfoDescriptors + ")."); + logger.debug("Finished importing files in directory {}/. In total, we " + + "parsed {} files (skipped {}) containing {} statuses, {} server " + + "descriptors (skipped {}), and {} extra-info descriptors (skipped " + + "{}).", bridgeDirectoriesDir, parsedFiles, skippedFiles, + parsedStatuses, parsedServerDescriptors, skippedServerDescriptors, + parsedExtraInfoDescriptors, skippedExtraInfoDescriptors); if (!parsed.isEmpty() && modified) { - logger.debug("Writing file " + pbdFile.getAbsolutePath() + "..."); + logger.debug("Writing file {}...", pbdFile.getAbsolutePath()); pbdFile.getParentFile().mkdirs(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(pbdFile))) { for (String f : parsed) { bw.append(f).append("\n"); } - logger.debug("Finished writing file " + pbdFile.getAbsolutePath() - + "."); + logger.debug("Finished writing file {}.", pbdFile.getAbsolutePath()); } catch (IOException e) { - logger.warn("Failed writing file " - + pbdFile.getAbsolutePath() + "!", e); + logger.warn("Failed writing file {}!", pbdFile.getAbsolutePath(), e); } } } diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java index 2bd85ef..66a5685 100644 --- a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java +++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java @@ -1067,8 +1067,8 @@ public class SanitizedBridgesWriter extends CollecTorMain { logger.warn("Unknown cert type in identity-ed25519: {}", identityEd25519[1]); } else if (identityEd25519[6] != 0x01) { - logger.warn("Unknown certified key type in " - + "identity-ed25519: " + identityEd25519[1]); + logger.warn("Unknown certified key type in identity-ed25519: {}", + identityEd25519[1]); } else if (identityEd25519[39] == 0x00) { logger.warn("No extensions in identity-ed25519 (which " + "would contain the encoded master-key-ed25519): {}", diff --git a/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java b/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java index 1fc0039..db10205 100644 --- a/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java +++ b/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java @@ -70,8 +70,7 @@ public final class Scheduler implements ThreadFactory { CollecTorMain ctm = ctmEntry.getValue() .getConstructor(Configuration.class).newInstance(conf); if (conf.getBool(Key.RunOnce)) { - logger.info("Prepare single run for " + ctm.getClass().getName() - + "."); + logger.info("Prepare single run for {}.", ctm.getClass().getName()); runOnceMains.add(Executors.callable(ctm)); } else { scheduleExecutions(ctm, @@ -83,8 +82,8 @@ public final class Scheduler implements ThreadFactory { | InstantiationException | InvocationTargetException | NoSuchMethodException | RejectedExecutionException | NullPointerException ex) { - logger.error("Cannot schedule " + ctmEntry.getValue().getName() - + ". Reason: " + ex.getMessage(), ex); + logger.error("Cannot schedule {}. Reason: {}", + ctmEntry.getValue().getName(), ex.getMessage(), ex); } } try { @@ -93,13 +92,13 @@ public final class Scheduler implements ThreadFactory { } } catch (ConfigurationException | InterruptedException | RejectedExecutionException | NullPointerException ex) { - logger.error("Cannot schedule run-once: " + ex.getMessage(), ex); + logger.error("Cannot schedule run-once: {}", ex.getMessage(), ex); } }
private void scheduleExecutions(CollecTorMain ctm, int offset, int period) { - logger.info("Periodic updater started for " + ctm.getClass().getName() - + "; offset=" + offset + ", period=" + period + "."); + logger.info("Periodic updater started for {}; offset={}, period={}.", + ctm.getClass().getName(), offset, period); long periodMillis = period * MILLIS_IN_A_MINUTE; long initialDelayMillis = computeInitialDelayMillis( System.currentTimeMillis(), offset * MILLIS_IN_A_MINUTE, periodMillis); @@ -132,9 +131,9 @@ public final class Scheduler implements ThreadFactory { logger.info("Shutdown of all scheduled tasks completed successfully."); } catch (InterruptedException ie) { List<Runnable> notTerminated = scheduler.shutdownNow(); - logger.error("Regular shutdown failed for: " + notTerminated); + logger.error("Regular shutdown failed for: {}", notTerminated); if (!notTerminated.isEmpty()) { - logger.error("Forced shutdown failed for: " + notTerminated); + logger.error("Forced shutdown failed for: {}", notTerminated); } } } @@ -147,7 +146,7 @@ public final class Scheduler implements ThreadFactory { Thread newThread = threads.newThread(runner); newThread.setDaemon(true); newThread.setName("CollecTor-Scheduled-Thread-" + ++currentThreadNo); - logger.info("New Thread created: " + newThread.getName()); + logger.info("New Thread created: {}", newThread.getName()); return newThread; } } diff --git a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java index 0b7dfad..5eab578 100644 --- a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java +++ b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java @@ -138,9 +138,9 @@ public class ExitListDownloader extends CollecTorMain { } if (maxScanMillis > 0L && maxScanMillis + 330L * 60L * 1000L < System.currentTimeMillis()) { - logger.warn("The last reported scan in the downloaded exit list " - + "took place at " + dateTimeFormat.format(maxScanMillis) - + ", which is more than 5:30 hours in the past."); + logger.warn("The last reported scan in the downloaded exit list took " + + "place at {}, which is more than 5:30 hours in the past.", + dateTimeFormat.format(maxScanMillis)); }
/* Write to disk. */ @@ -154,8 +154,8 @@ public class ExitListDownloader extends CollecTorMain { bw.write(downloadedExitList); bw.close(); } catch (IOException e) { - logger.warn("Could not write downloaded exit list " - + "to " + outputFile.getAbsolutePath(), e); + logger.warn("Could not write downloaded exit list to {}", + outputFile.getAbsolutePath(), e); } }
diff --git a/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java b/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java index 5f3a649..a018f8b 100644 --- a/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java +++ b/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java @@ -92,7 +92,7 @@ public class CreateIndexJson extends CollecTorMain { config.getPath(Key.RecentPath).toFile() }; writeIndex(indexDirectories()); } catch (Exception e) { - logger.error("Cannot run index creation: " + e.getMessage(), e); + logger.error("Cannot run index creation: {}", e.getMessage(), e); throw new RuntimeException(e); } } @@ -108,8 +108,8 @@ public class CreateIndexJson extends CollecTorMain {
private IndexNode indexDirectories() { SortedSet<DirectoryNode> directoryNodes = new TreeSet<>(); - logger.trace("indexing: " + indexedDirectories[0] + " " - + indexedDirectories[1]); + logger.trace("indexing: {} {}", indexedDirectories[0], + indexedDirectories[1]); for (File directory : indexedDirectories) { if (directory.exists() && directory.isDirectory()) { DirectoryNode dn = indexDirectory(directory); @@ -126,10 +126,10 @@ public class CreateIndexJson extends CollecTorMain { private DirectoryNode indexDirectory(File directory) { SortedSet<FileNode> fileNodes = new TreeSet<>(); SortedSet<DirectoryNode> directoryNodes = new TreeSet<>(); - logger.trace("indexing: " + directory); + logger.trace("indexing: {}", directory); File[] fileList = directory.listFiles(); if (null == fileList) { - logger.warn("Indexing dubious directory: " + directory); + logger.warn("Indexing dubious directory: {}", directory); return null; } for (File fileOrDirectory : fileList) { diff --git a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java index f97a4f8..ca307a5 100644 --- a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java +++ b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java @@ -111,9 +111,9 @@ public class OnionPerfDownloader extends CollecTorMain { this.downloadedTpfFiles.add(line); } } catch (IOException e) { - logger.info("Unable to read download history file '" - + this.onionPerfDownloadedFile.getAbsolutePath() + "'. Ignoring " - + "download history and downloading all available .tpf files."); + logger.info("Unable to read download history file '{}'. Ignoring " + + "download history and downloading all available .tpf files.", + this.onionPerfDownloadedFile.getAbsolutePath()); this.downloadedTpfFiles.clear(); } } diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java index 7c59054..d166bd7 100644 --- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java +++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java @@ -103,8 +103,8 @@ public class ArchiveReader {
private void readDescriptorFiles() { if (this.archivesDirectory.exists()) { - logger.debug("Importing files in directory " + this.archivesDirectory - + "/..."); + logger.debug("Importing files in directory {}/...", + this.archivesDirectory); Stack<File> filesInInputDir = new Stack<>(); filesInInputDir.add(this.archivesDirectory); List<File> problems = new ArrayList<>(); @@ -121,8 +121,8 @@ public class ArchiveReader { this.ignoredFiles++; continue; } else if (pop.getName().endsWith(".tar.bz2")) { - logger.warn("Cannot parse compressed tarball " - + pop.getAbsolutePath() + ". Skipping."); + logger.warn("Cannot parse compressed tarball {}. Skipping.", + pop.getAbsolutePath()); continue; } else if (pop.getName().endsWith(".bz2")) { FileInputStream fis = new FileInputStream(pop); @@ -225,9 +225,8 @@ public class ArchiveReader { String digest256Hex = DigestUtils.sha256Hex(descBytes); if (!this.microdescriptorValidAfterTimes.containsKey( digest256Hex)) { - logger.debug("Could not store microdescriptor '" - + digest256Hex + "', which was not contained in a " - + "microdesc consensus."); + logger.debug("Could not store microdescriptor '{}', which was " + + "not contained in a microdesc consensus.", digest256Hex); continue; } for (String validAfterTime : @@ -238,9 +237,8 @@ public class ArchiveReader { rdp.storeMicrodescriptor(descBytes, digest256Hex, digest256Base64, validAfter); } catch (ParseException e) { - logger.warn("Could not parse " - + "valid-after time '" + validAfterTime + "'. Not " - + "storing microdescriptor.", e); + logger.warn("Could not parse valid-after time '{}'. Not " + + "storing microdescriptor.", validAfterTime, e); } } } @@ -256,8 +254,8 @@ public class ArchiveReader { } } if (problems.isEmpty()) { - logger.debug("Finished importing files in directory " - + this.archivesDirectory + "/."); + logger.debug("Finished importing files in directory {}/.", + this.archivesDirectory); } else { StringBuilder sb = new StringBuilder("Failed importing files in " + "directory " + this.archivesDirectory + "/:"); @@ -288,9 +286,8 @@ public class ArchiveReader { + "history file."); } } - logger.info("Finished importing relay descriptors from local " - + "directory:\nParsed " + this.parsedFiles + ", ignored " - + this.ignoredFiles + " files."); + logger.info("Finished importing relay descriptors from local directory:\n" + + "Parsed {}, ignored {} files.", this.parsedFiles, this.ignoredFiles); }
/** Stores the valid-after time and microdescriptor digests of a given diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java index 3429a0a..edd03d7 100644 --- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java +++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java @@ -221,10 +221,9 @@ public class ArchiveWriter extends CollecTorMain { while ((line = br.readLine()) != null) { String[] parts = line.split(","); if (parts.length != 3) { - logger.warn("Could not load server descriptor " - + "digests because of illegal line '" + line + "'. We " - + "might not be able to correctly check descriptors for " - + "completeness."); + logger.warn("Could not load server descriptor digests because of " + + "illegal line '{}'. We might not be able to correctly " + + "check descriptors for completeness.", line); break; } long published = dateTimeFormat.parse(parts[0]).getTime(); @@ -249,10 +248,9 @@ public class ArchiveWriter extends CollecTorMain { while ((line = br.readLine()) != null) { String[] parts = line.split(","); if (parts.length != 2) { - logger.warn("Could not load extra-info descriptor " - + "digests because of illegal line '" + line + "'. We " - + "might not be able to correctly check descriptors for " - + "completeness."); + logger.warn("Could not load extra-info descriptor digests because " + + "of illegal line '{}'. We might not be able to correctly " + + "check descriptors for completeness.", line); break; } long published = dateTimeFormat.parse(parts[0]).getTime(); @@ -275,10 +273,9 @@ public class ArchiveWriter extends CollecTorMain { while ((line = br.readLine()) != null) { String[] parts = line.split(","); if (parts.length != 2) { - logger.warn("Could not load microdescriptor digests " - + "because of illegal line '" + line + "'. We might not " - + "be able to correctly check descriptors for " - + "completeness."); + logger.warn("Could not load microdescriptor digests because of " + + "illegal line '{}'. We might not be able to correctly check " + + "descriptors for completeness.", line); break; } long validAfter = dateTimeFormat.parse(parts[0]).getTime(); @@ -510,46 +507,40 @@ public class ArchiveWriter extends CollecTorMain { long tooOldMillis = this.now - 330L * 60L * 1000L; if (!this.storedConsensuses.isEmpty() && this.storedConsensuses.lastKey() < tooOldMillis) { - logger.warn("The last known relay network status " - + "consensus was valid after " - + dateTimeFormat.format(this.storedConsensuses.lastKey()) - + ", which is more than 5:30 hours in the past."); + logger.warn("The last known relay network status consensus was valid " + + "after {}, which is more than 5:30 hours in the past.", + dateTimeFormat.format(this.storedConsensuses.lastKey())); } if (!this.storedMicrodescConsensuses.isEmpty() && this.storedMicrodescConsensuses.lastKey() < tooOldMillis) { - logger.warn("The last known relay network status " - + "microdesc consensus was valid after " - + dateTimeFormat.format( - this.storedMicrodescConsensuses.lastKey()) - + ", which is more than 5:30 hours in the past."); + logger.warn("The last known relay network status microdesc consensus " + + "was valid after {}, which is more than 5:30 hours in the past.", + dateTimeFormat.format(this.storedMicrodescConsensuses.lastKey())); } if (!this.storedVotes.isEmpty() && this.storedVotes.lastKey() < tooOldMillis) { - logger.warn("The last known relay network status vote " - + "was valid after " + dateTimeFormat.format( - this.storedVotes.lastKey()) + ", which is more than 5:30 hours " - + "in the past."); + logger.warn("The last known relay network status vote was valid after " + + "{}, which is more than 5:30 hours in the past.", + dateTimeFormat.format(this.storedVotes.lastKey())); } if (!this.storedServerDescriptors.isEmpty() && this.storedServerDescriptors.lastKey() < tooOldMillis) { - logger.warn("The last known relay server descriptor was " - + "published at " - + dateTimeFormat.format(this.storedServerDescriptors.lastKey()) - + ", which is more than 5:30 hours in the past."); + logger.warn("The last known relay server descriptor was published at " + + "{}, which is more than 5:30 hours in the past.", + dateTimeFormat.format(this.storedServerDescriptors.lastKey())); } if (!this.storedExtraInfoDescriptors.isEmpty() && this.storedExtraInfoDescriptors.lastKey() < tooOldMillis) { - logger.warn("The last known relay extra-info descriptor " - + "was published at " + dateTimeFormat.format( - this.storedExtraInfoDescriptors.lastKey()) - + ", which is more than 5:30 hours in the past."); + logger.warn("The last known relay extra-info descriptor was published " + + "at {}, which is more than 5:30 hours in the past.", + dateTimeFormat.format(this.storedExtraInfoDescriptors.lastKey())); } if (!this.storedMicrodescriptors.isEmpty() && this.storedMicrodescriptors.lastKey() < tooOldMillis) { - logger.warn("The last known relay microdescriptor was " - + "contained in a microdesc consensus that was valid after " - + dateTimeFormat.format(this.storedMicrodescriptors.lastKey()) - + ", which is more than 5:30 hours in the past."); + logger.warn("The last known relay microdescriptor was contained in a " + + "microdesc consensus that was valid after {}, which is more than " + + "5:30 hours in the past.", + dateTimeFormat.format(this.storedMicrodescriptors.lastKey())); } }
@@ -821,7 +812,7 @@ public class ArchiveWriter extends CollecTorMain {
private boolean store(byte[] typeAnnotation, byte[] data, File[] outputFiles, boolean[] append) { - logger.trace("Storing " + outputFiles[0]); + logger.trace("Storing {}", outputFiles[0]); int parseableDescriptors = 0; for (Descriptor descriptor : this.descriptorParser.parseDescriptors(data, null, outputFiles[0].getName())) { @@ -830,8 +821,8 @@ public class ArchiveWriter extends CollecTorMain { } } if (parseableDescriptors != 1) { - logger.info("Relay descriptor file " + outputFiles[0] - + " doesn't contain exactly one descriptor. Storing anyway."); + logger.info("Relay descriptor file {} doesn't contain exactly one " + + "descriptor. Storing anyway.", outputFiles[0]); } try { for (int i = 0; i < outputFiles.length; i++) { @@ -848,8 +839,7 @@ public class ArchiveWriter extends CollecTorMain { } return true; } catch (IOException e) { - logger.warn("Could not store relay descriptor " - + outputFiles[0], e); + logger.warn("Could not store relay descriptor {}", outputFiles[0], e); } return false; } diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java index 4f0d012..3e1bba4 100644 --- a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java +++ b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java @@ -86,8 +86,8 @@ public class CachedRelayDescriptorReader { } br.close(); } catch (IOException e) { - logger.warn("Could not read import history from " - + importHistoryFile.getAbsolutePath() + ".", e); + logger.warn("Could not read import history from {}.", + importHistoryFile.getAbsolutePath(), e); } } } @@ -97,12 +97,11 @@ public class CachedRelayDescriptorReader { for (String inputDirectory : this.inputDirectories) { File cachedDescDir = new File(inputDirectory); if (!cachedDescDir.exists()) { - logger.warn("Directory " + cachedDescDir.getAbsolutePath() - + " does not exist. Skipping."); + logger.warn("Directory {} does not exist. Skipping.", + cachedDescDir.getAbsolutePath()); continue; } - logger.debug("Reading " + cachedDescDir.getAbsolutePath() - + " directory."); + logger.debug("Reading {} directory.", cachedDescDir.getAbsolutePath()); SortedSet<File> cachedDescFiles = new TreeSet<>(); Stack<File> files = new Stack<>(); files.add(cachedDescDir); @@ -143,10 +142,9 @@ public class CachedRelayDescriptorReader { if (dateTimeFormat.parse(line.substring("valid-after " .length())).getTime() < System.currentTimeMillis() - 6L * 60L * 60L * 1000L) { - logger.warn("Cached descriptor files in " - + cachedDescDir.getAbsolutePath() + " are stale. " - + "The valid-after line in cached-consensus is '" - + line + "'."); + logger.warn("Cached descriptor files in {} are stale. The " + + "valid-after line in cached-consensus is '{}'.", + cachedDescDir.getAbsolutePath(), line); this.dumpStats.append(" (stale!)"); } break; @@ -244,12 +242,12 @@ public class CachedRelayDescriptorReader { ? "server" : "extra-info").append(" descriptors"); } } catch (IOException | ParseException e) { - logger.warn("Failed reading " - + cachedDescDir.getAbsolutePath() + " directory.", e); + logger.warn("Failed reading {} directory.", + cachedDescDir.getAbsolutePath(), e); } } - logger.debug("Finished reading " - + cachedDescDir.getAbsolutePath() + " directory."); + logger.debug("Finished reading {} directory.", + cachedDescDir.getAbsolutePath()); } }
@@ -264,8 +262,8 @@ public class CachedRelayDescriptorReader { } bw.close(); } catch (IOException e) { - logger.warn("Could not write import history to " - + this.importHistoryFile.getAbsolutePath() + ".", e); + logger.warn("Could not write import history to {}.", + this.importHistoryFile.getAbsolutePath(), e); }
logger.info(dumpStats.toString()); diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java index 0984de0..f3a21ba 100644 --- a/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java +++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java @@ -315,8 +315,8 @@ public class ReferenceChecker { } logger.info(sb.toString()); if (totalMissingDescriptorsWeight > 0.999) { - logger.warn("Missing too many referenced " - + "descriptors (" + totalMissingDescriptorsWeight + ")."); + logger.warn("Missing too many referenced descriptors ({}).", + totalMissingDescriptorsWeight); } }
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java index 446b6a7..6e470d8 100644 --- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java +++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java @@ -344,8 +344,8 @@ public class RelayDescriptorDownloader { "stats/missing-relay-descriptors"); if (this.missingDescriptorsFile.exists()) { try { - logger.debug("Reading file " - + this.missingDescriptorsFile.getAbsolutePath() + "..."); + logger.debug("Reading file {}...", + this.missingDescriptorsFile.getAbsolutePath()); BufferedReader br = new BufferedReader(new FileReader( this.missingDescriptorsFile)); String line; @@ -395,19 +395,17 @@ public class RelayDescriptorDownloader { } } } else { - logger.debug("Invalid line '" + line + "' in " - + this.missingDescriptorsFile.getAbsolutePath() - + ". Ignoring."); + logger.debug("Invalid line '{}' in {}. Ignoring.", line, + this.missingDescriptorsFile.getAbsolutePath()); } } br.close(); - logger.debug("Finished reading file " - + this.missingDescriptorsFile.getAbsolutePath() + "."); + logger.debug("Finished reading file {}.", + this.missingDescriptorsFile.getAbsolutePath()); } catch (IOException e) { - logger.warn("Failed to read file " - + this.missingDescriptorsFile.getAbsolutePath() - + "! This means that we might forget to dowload relay " - + "descriptors we are missing.", e); + logger.warn("Failed to read file {}! This means that we might forget " + + "to dowload relay descriptors we are missing.", + this.missingDescriptorsFile.getAbsolutePath(), e); } }
@@ -418,17 +416,15 @@ public class RelayDescriptorDownloader { "stats/last-downloaded-all-descriptors"); if (this.lastDownloadedAllDescriptorsFile.exists()) { try { - logger.debug("Reading file " - + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() - + "..."); + logger.debug("Reading file {}...", + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()); BufferedReader br = new BufferedReader(new FileReader( this.lastDownloadedAllDescriptorsFile)); String line; while ((line = br.readLine()) != null) { if (line.split(",").length != 2) { - logger.debug("Invalid line '" + line + "' in " - + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() - + ". Ignoring."); + logger.debug("Invalid line '{}' in {}. Ignoring.", line, + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()); } else { String[] parts = line.split(","); String authority = parts[0]; @@ -438,14 +434,13 @@ public class RelayDescriptorDownloader { } } br.close(); - logger.debug("Finished reading file " - + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() - + "."); + logger.debug("Finished reading file {}.", + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()); } catch (IOException e) { - logger.warn("Failed to read file " - + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() - + "! This means that we might download all server and " - + "extra-info descriptors more often than we should.", e); + logger.warn("Failed to read file {}! This means that we might " + + "download all server and extra-info descriptors more often than " + + "we should.", + this.lastDownloadedAllDescriptorsFile.getAbsolutePath(), e); } }
@@ -839,7 +834,7 @@ public class RelayDescriptorDownloader { /* If a download failed, stop requesting descriptors from this * authority and move on to the next. */ } catch (IOException e) { - logger.debug("Failed downloading from " + authority + "!", e); + logger.debug("Failed downloading from {}!", authority, e); } } } @@ -883,8 +878,8 @@ public class RelayDescriptorDownloader { allData = baos.toByteArray(); } } - logger.debug("Downloaded " + fullUrl + " -> " + response + " (" - + (allData == null ? 0 : allData.length) + " bytes)"); + logger.debug("Downloaded {} -> {} ({} bytes)", fullUrl, response, + allData == null ? 0 : allData.length); int receivedDescriptors = 0; if (allData != null) { if (resource.startsWith("/tor/status-vote/current/")) { @@ -967,10 +962,9 @@ public class RelayDescriptorDownloader { this.rdp.storeMicrodescriptor(descBytes, digest256Hex, digest256Base64, validAfter); } catch (ParseException e) { - logger.warn("Could not parse " - + "valid-after time '" + validAfterTime + "' in " + logger.warn("Could not parse valid-after time '{}' in " + "microdescriptor key. Not storing microdescriptor.", - e); + validAfterTime, e); } } receivedDescriptors++; @@ -993,8 +987,8 @@ public class RelayDescriptorDownloader { int missingServerDescriptors = 0; int missingExtraInfoDescriptors = 0; try { - logger.debug("Writing file " - + this.missingDescriptorsFile.getAbsolutePath() + "..."); + logger.debug("Writing file {}...", + this.missingDescriptorsFile.getAbsolutePath()); this.missingDescriptorsFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter( this.missingDescriptorsFile)); @@ -1020,20 +1014,19 @@ public class RelayDescriptorDownloader { bw.write(key + "," + value + "\n"); } bw.close(); - logger.debug("Finished writing file " - + this.missingDescriptorsFile.getAbsolutePath() + "."); + logger.debug("Finished writing file {}.", + this.missingDescriptorsFile.getAbsolutePath()); } catch (IOException e) { - logger.warn("Failed writing " - + this.missingDescriptorsFile.getAbsolutePath() + "!", e); + logger.warn("Failed writing {}!", + this.missingDescriptorsFile.getAbsolutePath(), e); }
/* Write text file containing the directory authorities and when we * last downloaded all server and extra-info descriptors from them to * disk. */ try { - logger.debug("Writing file " - + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() - + "..."); + logger.debug("Writing file {}...", + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()); this.lastDownloadedAllDescriptorsFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter( this.lastDownloadedAllDescriptorsFile)); @@ -1044,77 +1037,67 @@ public class RelayDescriptorDownloader { bw.write(authority + "," + lastDownloaded + "\n"); } bw.close(); - logger.debug("Finished writing file " - + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() - + "."); + logger.debug("Finished writing file {}.", + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()); } catch (IOException e) { - logger.warn("Failed writing " - + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() + "!", - e); + logger.warn("Failed writing {}!", + this.lastDownloadedAllDescriptorsFile.getAbsolutePath(), e); }
/* Log statistics about this execution. */ logger.info("Finished downloading relay descriptors from the " + "directory authorities."); - logger.info("At the beginning of this execution, we were " - + "missing " + oldMissingConsensuses + " consensus(es), " - + oldMissingMicrodescConsensuses + " microdesc consensus(es), " - + oldMissingVotes + " vote(s), " + oldMissingServerDescriptors - + " server descriptor(s), " + oldMissingExtraInfoDescriptors - + " extra-info descriptor(s), and " + oldMissingMicrodescriptors - + " microdescriptor(s)."); - logger.info("During this execution, we added " - + this.newMissingConsensuses + " consensus(es), " - + this.newMissingMicrodescConsensuses - + " microdesc consensus(es), " + this.newMissingVotes - + " vote(s), " + this.newMissingServerDescriptors - + " server descriptor(s), " + this.newMissingExtraInfoDescriptors - + " extra-info descriptor(s), and " - + this.newMissingMicrodescriptors + " microdescriptor(s) to the " - + "missing list, some of which we also " - + "requested and removed from the list again."); - logger.info("We requested " + this.requestedConsensuses - + " consensus(es), " + this.requestedMicrodescConsensuses - + " microdesc consensus(es), " + this.requestedVotes - + " vote(s), " + this.requestedMissingServerDescriptors - + " missing server descriptor(s), " - + this.requestedAllServerDescriptors - + " times all server descriptors, " - + this.requestedMissingExtraInfoDescriptors + " missing " - + "extra-info descriptor(s), " - + this.requestedAllExtraInfoDescriptors + " times all extra-info " - + "descriptors, and " + this.requestedMissingMicrodescriptors - + " missing microdescriptor(s) from the directory authorities."); + logger.info("At the beginning of this execution, we were missing {} " + + "consensus(es), {} microdesc consensus(es), {} vote(s), {} server " + + "descriptor(s), {} extra-info descriptor(s), and {} " + + "microdescriptor(s).", oldMissingConsensuses, + oldMissingMicrodescConsensuses, oldMissingVotes, + oldMissingServerDescriptors, oldMissingExtraInfoDescriptors, + oldMissingMicrodescriptors); + logger.info("During this execution, we added {} consensus(es), {} " + + "microdesc consensus(es), {} vote(s), {} server descriptor(s), {} " + + "extra-info descriptor(s), and {} microdescriptor(s) to the missing " + + "list, some of which we also requested and removed from the list " + + "again.", this.newMissingConsensuses, + this.newMissingMicrodescConsensuses, this.newMissingVotes, + this.newMissingServerDescriptors, this.newMissingExtraInfoDescriptors, + this.newMissingMicrodescriptors); + logger.info("We requested {} consensus(es), {} microdesc consensus(es), " + + "{} vote(s), {} missing server descriptor(s), {} times all server " + + "descriptors, {} missing extra-info descriptor(s), {} times all " + + "extra-info descriptors, and {} missing microdescriptor(s) from the " + + "directory authorities.", this.requestedConsensuses, + this.requestedMicrodescConsensuses, this.requestedVotes, + this.requestedMissingServerDescriptors, + this.requestedAllServerDescriptors, + this.requestedMissingExtraInfoDescriptors, + this.requestedAllExtraInfoDescriptors, + this.requestedMissingMicrodescriptors); StringBuilder sb = new StringBuilder(); for (String authority : this.authorities) { sb.append(" ").append(authority).append("=").append( this.requestsByAuthority.get(authority)); } logger.info("We sent these numbers of requests to the directory " - + "authorities:" + sb.toString()); - logger.info("We successfully downloaded " - + this.downloadedConsensuses + " consensus(es), " - + this.downloadedMicrodescConsensuses - + " microdesc consensus(es), " + this.downloadedVotes - + " vote(s), " + this.downloadedMissingServerDescriptors - + " missing server descriptor(s), " - + this.downloadedAllServerDescriptors - + " server descriptor(s) when downloading all descriptors, " - + this.downloadedMissingExtraInfoDescriptors + " missing " - + "extra-info descriptor(s), " - + this.downloadedAllExtraInfoDescriptors + " extra-info " - + "descriptor(s) when downloading all descriptors, and " - + this.downloadedMissingMicrodescriptors - + " missing microdescriptor(s)."); - logger.info("At the end of this execution, we are missing " - + missingConsensuses + " consensus(es), " - + missingMicrodescConsensuses + " microdesc consensus(es), " - + missingVotes + " vote(s), " + missingServerDescriptors - + " server descriptor(s), " + missingExtraInfoDescriptors - + " extra-info descriptor(s), and " - + this.missingMicrodescriptors.size() - + " microdescriptor(s), some of which we may try in the next " - + "execution."); + + "authorities:{}", sb.toString()); + logger.info("We successfully downloaded {} consensus(es), {} microdesc " + + "consensus(es), {} vote(s), {} missing server descriptor(s), {} " + + "server descriptor(s) when downloading all descriptors, {} missing " + + "extra-info descriptor(s), {} extra-info descriptor(s) when " + + "downloading all descriptors, and {} missing microdescriptor(s).", + this.downloadedConsensuses, this.downloadedMicrodescConsensuses, + this.downloadedVotes, this.downloadedMissingServerDescriptors, + this.downloadedAllServerDescriptors, + this.downloadedMissingExtraInfoDescriptors, + this.downloadedAllExtraInfoDescriptors, + this.downloadedMissingMicrodescriptors); + logger.info("At the end of this execution, we are missing {} " + + "consensus(es), {} microdesc consensus(es), {} vote(s), {} server " + + "descriptor(s), {} extra-info descriptor(s), and {} " + + "microdescriptor(s), some of which we may try in the next execution.", + missingConsensuses, missingMicrodescConsensuses, missingVotes, + missingServerDescriptors, missingExtraInfoDescriptors, + this.missingMicrodescriptors.size()); } }
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java index 664b566..59b9fb0 100644 --- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java +++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java @@ -152,8 +152,8 @@ public class RelayDescriptorParser { + lastRelayIdentity + "," + serverDesc); serverDescriptorDigests.add(serverDesc); } else { - logger.warn("Could not parse r line '" - + line + "' in descriptor. Skipping."); + logger.warn("Could not parse r line '{}' in descriptor. " + + "Skipping.", line); break; } } else if (line.startsWith("m ")) { @@ -169,8 +169,8 @@ public class RelayDescriptorParser { } else if (parts.length != 3 || !parts[2].startsWith("sha256=") || parts[2].length() != 50) { - logger.warn("Could not parse m line '" - + line + "' in descriptor. Skipping."); + logger.warn("Could not parse m line '{}' in descriptor. " + + "Skipping.", line); break; } } diff --git a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java index 84f4f9e..0e83598 100644 --- a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java +++ b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java @@ -100,7 +100,7 @@ public class SanitizeWeblogs extends CollecTorMain { PersistenceUtils.cleanDirectory(this.config.getPath(Key.RecentPath)); } } catch (Exception e) { - log.error("Cannot sanitize web-logs: " + e.getMessage(), e); + log.error("Cannot sanitize web-logs: {}", e.getMessage(), e); throw new RuntimeException(e); } }
tor-commits@lists.torproject.org