tor-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
October 2018
- 17 participants
- 2350 discussions
commit 1c376afd3b5120e0dda4ca1b140fb9b3a9f4b902
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Mon Aug 20 16:20:13 2018 +0200
Use Arrays.asList with varargs.
---
.../collector/bridgedescs/SanitizedBridgesWriterTest.java | 13 +++++--------
1 file changed, 5 insertions(+), 8 deletions(-)
diff --git a/src/test/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriterTest.java b/src/test/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriterTest.java
index b791f88..819c830 100644
--- a/src/test/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriterTest.java
+++ b/src/test/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriterTest.java
@@ -112,16 +112,14 @@ public class SanitizedBridgesWriterTest {
this.defaultTarballTestBuilder = new TarballTestBuilder(
"from-tonga-2016-07-01T000702Z.tar.gz", 1467331624000L);
this.defaultTarballTestBuilder.add("bridge-descriptors", 1467331622000L,
- Arrays.asList(new TestDescriptorBuilder[] {
- this.defaultServerTestDescriptorBuilder }));
+ Arrays.asList(this.defaultServerTestDescriptorBuilder));
this.defaultTarballTestBuilder.add("cached-extrainfo", 1467327972000L,
- Arrays.asList(new TestDescriptorBuilder[] {
- this.defaultExtraInfoTestDescriptorBuilder }));
+ Arrays.asList(this.defaultExtraInfoTestDescriptorBuilder));
this.defaultTarballTestBuilder.add("cached-extrainfo.new", 1467331623000L,
Arrays.asList(new TestDescriptorBuilder[] { }));
this.defaultTarballTestBuilder.add("networkstatus-bridges",
- 1467330028000L, Arrays.asList(new TestDescriptorBuilder[] {
- this.defaultNetworkStatusTestDescriptorBuilder }));
+ 1467330028000L,
+ Arrays.asList(this.defaultNetworkStatusTestDescriptorBuilder));
this.tarballBuilders = new ArrayList<>(
Arrays.asList(this.defaultTarballTestBuilder));
}
@@ -713,8 +711,7 @@ public class SanitizedBridgesWriterTest {
@Test
public void testTarballContainsSameFileTwice() throws Exception {
this.defaultTarballTestBuilder.add("cached-extrainfo.new", 1467331623000L,
- Arrays.asList(new TestDescriptorBuilder[] {
- this.defaultExtraInfoTestDescriptorBuilder }));
+ Arrays.asList(this.defaultExtraInfoTestDescriptorBuilder));
this.runTest();
assertEquals("There should only be one.",
1, this.parsedExtraInfoDescriptors.size());
1
0

15 Oct '18
commit 18848615322dc38324975573308019aeb36fa39e
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Aug 21 11:47:59 2018 +0200
Remove unnecessary return statements.
---
.../metrics/collector/bridgedescs/BridgeDescriptorParser.java | 1 -
.../metrics/collector/bridgedescs/SanitizedBridgesWriter.java | 2 --
2 files changed, 3 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParser.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParser.java
index c993d89..04460c8 100644
--- a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParser.java
+++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParser.java
@@ -48,7 +48,6 @@ public class BridgeDescriptorParser {
}
} catch (IOException e) {
logger.warn("Could not parse or write bridge descriptor.", e);
- return;
}
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
index 66a5685..1cab69f 100644
--- a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
+++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
@@ -659,7 +659,6 @@ public class SanitizedBridgesWriter extends CollecTorMain {
} catch (IOException e) {
logger.warn("Could not write sanitized bridge "
+ "network status to disk.", e);
- return;
}
}
@@ -1050,7 +1049,6 @@ public class SanitizedBridgesWriter extends CollecTorMain {
}
} catch (ConfigurationException | IOException e) {
logger.warn("Could not write sanitized server descriptor to disk.", e);
- return;
}
}
1
0

15 Oct '18
commit 698d15d94f52fe1192fd2afb82439abace27066c
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Mon Aug 20 21:17:03 2018 +0200
Refactor CachedRelayDescriptorReader.
---
.../collector/relaydescs/ArchiveWriter.java | 3 +-
.../relaydescs/CachedRelayDescriptorReader.java | 96 ++++++++++++++--------
2 files changed, 62 insertions(+), 37 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
index 8679439..3429a0a 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
@@ -161,7 +161,8 @@ public class ArchiveWriter extends CollecTorMain {
}
if (sources.contains(SourceType.Cache)) {
new CachedRelayDescriptorReader(rdp,
- config.getStringArray(Key.RelayCacheOrigins), statsDirectory);
+ config.getStringArray(Key.RelayCacheOrigins), statsDirectory)
+ .readDescriptors();
this.intermediateStats("importing relay descriptors from local "
+ "Tor data directories");
}
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
index f64d019..4f0d012 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
@@ -37,32 +37,52 @@ public class CachedRelayDescriptorReader {
private static final Logger logger = LoggerFactory.getLogger(
CachedRelayDescriptorReader.class);
- /** Reads cached-descriptor files from one or more directories and
- * passes them to the given descriptor parser. */
- public CachedRelayDescriptorReader(RelayDescriptorParser rdp,
- String[] inputDirectories, File statsDirectory) {
+ private RelayDescriptorParser rdp;
+
+ private String[] inputDirectories;
+
+ private File importHistoryFile;
+ private StringBuilder dumpStats;
+
+ private Set<String> lastImportHistory = new HashSet<>();
+
+ private Set<String> currentImportHistory = new HashSet<>();
+
+ /** Initializes this reader but without starting to read yet. */
+ CachedRelayDescriptorReader(RelayDescriptorParser rdp,
+ String[] inputDirectories, File statsDirectory) {
if (rdp == null || inputDirectories == null
|| inputDirectories.length == 0 || statsDirectory == null) {
throw new IllegalArgumentException();
}
+ this.rdp = rdp;
+ this.inputDirectories = inputDirectories;
+ this.importHistoryFile = new File(statsDirectory,
+ "cacheddesc-import-history");
- StringBuilder dumpStats = new StringBuilder("Finished importing "
+ this.dumpStats = new StringBuilder("Finished importing "
+ "relay descriptors from local Tor data directories:");
+ }
- /* Read import history containing SHA-1 digests of previously parsed
- * statuses and descriptors, so that we can skip them in this run. */
- Set<String> lastImportHistory = new HashSet<>();
- Set<String> currentImportHistory = new HashSet<>();
- File importHistoryFile = new File(statsDirectory,
- "cacheddesc-import-history");
+ /** Reads cached-descriptor files from one or more directories and
+ * passes them to the given descriptor parser. */
+ public void readDescriptors() {
+ this.readHistoryFile();
+ this.readDescriptorFiles();
+ this.writeHistoryFile();
+ }
+
+ /** Read import history containing SHA-1 digests of previously parsed
+ * statuses and descriptors, so that we can skip them in this run. */
+ private void readHistoryFile() {
if (importHistoryFile.exists()) {
try {
BufferedReader br = new BufferedReader(new FileReader(
importHistoryFile));
String line;
while ((line = br.readLine()) != null) {
- lastImportHistory.add(line);
+ this.lastImportHistory.add(line);
}
br.close();
} catch (IOException e) {
@@ -70,9 +90,11 @@ public class CachedRelayDescriptorReader {
+ importHistoryFile.getAbsolutePath() + ".", e);
}
}
+ }
- /* Read cached descriptors directories. */
- for (String inputDirectory : inputDirectories) {
+ /** Read cached descriptors directories. */
+ private void readDescriptorFiles() {
+ for (String inputDirectory : this.inputDirectories) {
File cachedDescDir = new File(inputDirectory);
if (!cachedDescDir.exists()) {
logger.warn("Directory " + cachedDescDir.getAbsolutePath()
@@ -113,7 +135,7 @@ public class CachedRelayDescriptorReader {
String line;
while ((line = br.readLine()) != null) {
if (line.startsWith("valid-after ")) {
- dumpStats.append("\n").append(f.getName()).append(": ")
+ this.dumpStats.append("\n").append(f.getName()).append(": ")
.append(line.substring("valid-after ".length()));
SimpleDateFormat dateTimeFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@@ -125,7 +147,7 @@ public class CachedRelayDescriptorReader {
+ cachedDescDir.getAbsolutePath() + " are stale. "
+ "The valid-after line in cached-consensus is '"
+ line + "'.");
- dumpStats.append(" (stale!)");
+ this.dumpStats.append(" (stale!)");
}
break;
}
@@ -136,13 +158,13 @@ public class CachedRelayDescriptorReader {
* (but regardless of whether it's stale or not). */
String digest = Hex.encodeHexString(DigestUtils.sha1(
allData));
- if (!lastImportHistory.contains(digest)
- && !currentImportHistory.contains(digest)) {
- rdp.parse(allData);
+ if (!this.lastImportHistory.contains(digest)
+ && !this.currentImportHistory.contains(digest)) {
+ this.rdp.parse(allData);
} else {
- dumpStats.append(" (skipped)");
+ this.dumpStats.append(" (skipped)");
}
- currentImportHistory.add(digest);
+ this.currentImportHistory.add(digest);
} else if (f.getName().equals("v3-status-votes")) {
int parsedNum = 0;
int skippedNum = 0;
@@ -161,18 +183,18 @@ public class CachedRelayDescriptorReader {
next - start);
String digest = Hex.encodeHexString(DigestUtils.sha1(
rawNetworkStatusBytes));
- if (!lastImportHistory.contains(digest)
- && !currentImportHistory.contains(digest)) {
- rdp.parse(rawNetworkStatusBytes);
+ if (!this.lastImportHistory.contains(digest)
+ && !this.currentImportHistory.contains(digest)) {
+ this.rdp.parse(rawNetworkStatusBytes);
parsedNum++;
} else {
skippedNum++;
}
- currentImportHistory.add(digest);
+ this.currentImportHistory.add(digest);
}
start = next;
}
- dumpStats.append("\n").append(f.getName()).append(": parsed ")
+ this.dumpStats.append("\n").append(f.getName()).append(": parsed ")
.append(parsedNum).append(", skipped ").append(skippedNum)
.append(" votes");
} else if (f.getName().startsWith("cached-descriptors")
@@ -183,7 +205,7 @@ public class CachedRelayDescriptorReader {
int end = -1;
String startToken =
f.getName().startsWith("cached-descriptors")
- ? "router " : "extra-info ";
+ ? "router " : "extra-info ";
String sigToken = "\nrouter-signature\n";
String endToken = "\n-----END SIGNATURE-----\n";
int parsedNum = 0;
@@ -207,16 +229,16 @@ public class CachedRelayDescriptorReader {
System.arraycopy(allData, start, descBytes, 0, end - start);
String digest = Hex.encodeHexString(DigestUtils.sha1(
descBytes));
- if (!lastImportHistory.contains(digest)
- && !currentImportHistory.contains(digest)) {
- rdp.parse(descBytes);
+ if (!this.lastImportHistory.contains(digest)
+ && !this.currentImportHistory.contains(digest)) {
+ this.rdp.parse(descBytes);
parsedNum++;
} else {
skippedNum++;
}
- currentImportHistory.add(digest);
+ this.currentImportHistory.add(digest);
}
- dumpStats.append("\n").append(f.getName()).append(": parsed ")
+ this.dumpStats.append("\n").append(f.getName()).append(": parsed ")
.append(parsedNum).append(", skipped ").append(skippedNum)
.append(" ").append(f.getName().startsWith("cached-descriptors")
? "server" : "extra-info").append(" descriptors");
@@ -229,19 +251,21 @@ public class CachedRelayDescriptorReader {
logger.debug("Finished reading "
+ cachedDescDir.getAbsolutePath() + " directory.");
}
+ }
- /* Write import history containing SHA-1 digests to disk. */
+ /** Write import history containing SHA-1 digests to disk. */
+ private void writeHistoryFile() {
try {
- importHistoryFile.getParentFile().mkdirs();
+ this.importHistoryFile.getParentFile().mkdirs();
BufferedWriter bw = new BufferedWriter(new FileWriter(
- importHistoryFile));
+ this.importHistoryFile));
for (String digest : currentImportHistory) {
bw.write(digest + "\n");
}
bw.close();
} catch (IOException e) {
logger.warn("Could not write import history to "
- + importHistoryFile.getAbsolutePath() + ".", e);
+ + this.importHistoryFile.getAbsolutePath() + ".", e);
}
logger.info(dumpStats.toString());
1
0

[collector/release] Remove unnecessary modifiers and semicolons.
by karsten@torproject.org 15 Oct '18
by karsten@torproject.org 15 Oct '18
15 Oct '18
commit 941a18353ae98b1b4eefe6c3d621e81978e85ae9
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Aug 21 09:23:45 2018 +0200
Remove unnecessary modifiers and semicolons.
---
src/main/java/org/torproject/metrics/collector/conf/Annotation.java | 2 +-
src/main/java/org/torproject/metrics/collector/conf/SourceType.java | 2 +-
src/main/java/org/torproject/metrics/collector/sync/Criterium.java | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/conf/Annotation.java b/src/main/java/org/torproject/metrics/collector/conf/Annotation.java
index a796f4d..f90516b 100644
--- a/src/main/java/org/torproject/metrics/collector/conf/Annotation.java
+++ b/src/main/java/org/torproject/metrics/collector/conf/Annotation.java
@@ -22,7 +22,7 @@ public enum Annotation {
private final String annotation;
private final byte[] bytes;
- private Annotation(String annotation) {
+ Annotation(String annotation) {
this.annotation = annotation;
this.bytes = annotation.getBytes();
}
diff --git a/src/main/java/org/torproject/metrics/collector/conf/SourceType.java b/src/main/java/org/torproject/metrics/collector/conf/SourceType.java
index 984ff21..ed5df4d 100644
--- a/src/main/java/org/torproject/metrics/collector/conf/SourceType.java
+++ b/src/main/java/org/torproject/metrics/collector/conf/SourceType.java
@@ -7,5 +7,5 @@ public enum SourceType {
Cache,
Local,
Remote,
- Sync;
+ Sync
}
diff --git a/src/main/java/org/torproject/metrics/collector/sync/Criterium.java b/src/main/java/org/torproject/metrics/collector/sync/Criterium.java
index c9d6212..a90de8e 100644
--- a/src/main/java/org/torproject/metrics/collector/sync/Criterium.java
+++ b/src/main/java/org/torproject/metrics/collector/sync/Criterium.java
@@ -7,7 +7,7 @@ package org.torproject.metrics.collector.sync;
public interface Criterium<T> {
/** Determine, if the given object of type T fulfills the Criterium. */
- public boolean applies(T object);
+ boolean applies(T object);
}
1
0
commit 569e172c828daead3bde0f7774675efbb944f5d3
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Aug 21 11:57:33 2018 +0200
Add missing log statement.
---
.../java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java | 1 +
1 file changed, 1 insertion(+)
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
index d166bd7..4e0e28d 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
@@ -267,6 +267,7 @@ public class ArchiveReader {
break;
}
}
+ logger.warn(sb.toString());
}
}
}
1
0

[collector/release] Use parameterized constructor for bulk add.
by karsten@torproject.org 15 Oct '18
by karsten@torproject.org 15 Oct '18
15 Oct '18
commit 1e61e8228988b2494b98da58192e780d46bd2ea1
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Aug 21 11:55:44 2018 +0200
Use parameterized constructor for bulk add.
---
.../torproject/metrics/collector/exitlists/ExitListDownloader.java | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
index 5eab578..66fc1a7 100644
--- a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
@@ -168,10 +168,8 @@ public class ExitListDownloader extends CollecTorMain {
while (!filesInInputDir.isEmpty()) {
File pop = filesInInputDir.pop();
if (pop.isDirectory()) {
- SortedSet<File> lastThreeElements = new TreeSet<>();
- for (File f : pop.listFiles()) {
- lastThreeElements.add(f);
- }
+ SortedSet<File> lastThreeElements
+ = new TreeSet<>(Arrays.asList(pop.listFiles()));
while (lastThreeElements.size() > 3) {
lastThreeElements.remove(lastThreeElements.first());
}
1
0

15 Oct '18
commit 556a0a5ff74548edced024a5a8b40cac392b8d3a
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Aug 21 10:40:08 2018 +0200
Use parameterized log statements.
---
.../org/torproject/metrics/collector/Main.java | 2 +-
.../bridgedescs/BridgeSnapshotReader.java | 43 +++--
.../bridgedescs/SanitizedBridgesWriter.java | 4 +-
.../metrics/collector/cron/Scheduler.java | 19 ++-
.../collector/exitlists/ExitListDownloader.java | 10 +-
.../metrics/collector/indexer/CreateIndexJson.java | 10 +-
.../collector/onionperf/OnionPerfDownloader.java | 6 +-
.../collector/relaydescs/ArchiveReader.java | 27 ++--
.../collector/relaydescs/ArchiveWriter.java | 74 ++++-----
.../relaydescs/CachedRelayDescriptorReader.java | 30 ++--
.../collector/relaydescs/ReferenceChecker.java | 4 +-
.../relaydescs/RelayDescriptorDownloader.java | 179 ++++++++++-----------
.../relaydescs/RelayDescriptorParser.java | 8 +-
.../collector/webstats/SanitizeWeblogs.java | 2 +-
14 files changed, 189 insertions(+), 229 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/Main.java b/src/main/java/org/torproject/metrics/collector/Main.java
index 6230e36..46e93af 100644
--- a/src/main/java/org/torproject/metrics/collector/Main.java
+++ b/src/main/java/org/torproject/metrics/collector/Main.java
@@ -103,7 +103,7 @@ public class Main {
+ ") and provide at least one data source and one data sink. "
+ "Refer to the manual for more information.");
} catch (IOException e) {
- log.error("Cannot write default configuration. Reason: " + e, e);
+ log.error("Cannot write default configuration.", e);
throw new RuntimeException(e);
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java
index fccee5e..4f72b5a 100644
--- a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java
+++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java
@@ -49,7 +49,7 @@ public class BridgeSnapshotReader {
boolean modified = false;
if (bdDir.exists()) {
if (pbdFile.exists()) {
- logger.debug("Reading file " + pbdFile.getAbsolutePath() + "...");
+ logger.debug("Reading file {}...", pbdFile.getAbsolutePath());
try {
BufferedReader br = new BufferedReader(new FileReader(pbdFile));
String line;
@@ -57,16 +57,13 @@ public class BridgeSnapshotReader {
parsed.add(line);
}
br.close();
- logger.debug("Finished reading file "
- + pbdFile.getAbsolutePath() + ".");
+ logger.debug("Finished reading file {}.", pbdFile.getAbsolutePath());
} catch (IOException e) {
- logger.warn("Failed reading file "
- + pbdFile.getAbsolutePath() + "!", e);
+ logger.warn("Failed reading file {}!", pbdFile.getAbsolutePath(), e);
return;
}
}
- logger.debug("Importing files in directory " + bridgeDirectoriesDir
- + "/...");
+ logger.debug("Importing files in directory {}/...", bridgeDirectoriesDir);
Set<String> descriptorImportHistory = new HashSet<>();
int parsedFiles = 0;
int skippedFiles = 0;
@@ -99,8 +96,8 @@ public class BridgeSnapshotReader {
String fn = pop.getName();
String[] fnParts = fn.split("-");
if (fnParts.length != 5) {
- logger.warn("Invalid bridge descriptor tarball file name: "
- + fn + ". Skipping.");
+ logger.warn("Invalid bridge descriptor tarball file name: {}. "
+ + "Skipping.", fn);
continue;
}
String authorityPart = String.format("%s-%s-", fnParts[0],
@@ -123,7 +120,7 @@ public class BridgeSnapshotReader {
break;
default:
logger.warn("Did not recognize the bridge authority that "
- + "generated " + fn + ". Skipping.");
+ + "generated {}. Skipping.", fn);
continue;
}
String dateTime = datePart.substring(0, 10) + " "
@@ -226,32 +223,28 @@ public class BridgeSnapshotReader {
parsed.add(pop.getName());
modified = true;
} catch (IOException e) {
- logger.warn("Could not parse bridge snapshot "
- + pop.getName() + "!", e);
+ logger.warn("Could not parse bridge snapshot {}!", pop.getName(),
+ e);
continue;
}
}
}
- logger.debug("Finished importing files in directory "
- + bridgeDirectoriesDir + "/. In total, we parsed "
- + parsedFiles + " files (skipped " + skippedFiles
- + ") containing " + parsedStatuses + " statuses, "
- + parsedServerDescriptors + " server descriptors (skipped "
- + skippedServerDescriptors + "), and "
- + parsedExtraInfoDescriptors + " extra-info descriptors "
- + "(skipped " + skippedExtraInfoDescriptors + ").");
+ logger.debug("Finished importing files in directory {}/. In total, we "
+ + "parsed {} files (skipped {}) containing {} statuses, {} server "
+ + "descriptors (skipped {}), and {} extra-info descriptors (skipped "
+ + "{}).", bridgeDirectoriesDir, parsedFiles, skippedFiles,
+ parsedStatuses, parsedServerDescriptors, skippedServerDescriptors,
+ parsedExtraInfoDescriptors, skippedExtraInfoDescriptors);
if (!parsed.isEmpty() && modified) {
- logger.debug("Writing file " + pbdFile.getAbsolutePath() + "...");
+ logger.debug("Writing file {}...", pbdFile.getAbsolutePath());
pbdFile.getParentFile().mkdirs();
try (BufferedWriter bw = new BufferedWriter(new FileWriter(pbdFile))) {
for (String f : parsed) {
bw.append(f).append("\n");
}
- logger.debug("Finished writing file " + pbdFile.getAbsolutePath()
- + ".");
+ logger.debug("Finished writing file {}.", pbdFile.getAbsolutePath());
} catch (IOException e) {
- logger.warn("Failed writing file "
- + pbdFile.getAbsolutePath() + "!", e);
+ logger.warn("Failed writing file {}!", pbdFile.getAbsolutePath(), e);
}
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
index 2bd85ef..66a5685 100644
--- a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
+++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
@@ -1067,8 +1067,8 @@ public class SanitizedBridgesWriter extends CollecTorMain {
logger.warn("Unknown cert type in identity-ed25519: {}",
identityEd25519[1]);
} else if (identityEd25519[6] != 0x01) {
- logger.warn("Unknown certified key type in "
- + "identity-ed25519: " + identityEd25519[1]);
+ logger.warn("Unknown certified key type in identity-ed25519: {}",
+ identityEd25519[1]);
} else if (identityEd25519[39] == 0x00) {
logger.warn("No extensions in identity-ed25519 (which "
+ "would contain the encoded master-key-ed25519): {}",
diff --git a/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java b/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java
index 1fc0039..db10205 100644
--- a/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java
+++ b/src/main/java/org/torproject/metrics/collector/cron/Scheduler.java
@@ -70,8 +70,7 @@ public final class Scheduler implements ThreadFactory {
CollecTorMain ctm = ctmEntry.getValue()
.getConstructor(Configuration.class).newInstance(conf);
if (conf.getBool(Key.RunOnce)) {
- logger.info("Prepare single run for " + ctm.getClass().getName()
- + ".");
+ logger.info("Prepare single run for {}.", ctm.getClass().getName());
runOnceMains.add(Executors.callable(ctm));
} else {
scheduleExecutions(ctm,
@@ -83,8 +82,8 @@ public final class Scheduler implements ThreadFactory {
| InstantiationException | InvocationTargetException
| NoSuchMethodException | RejectedExecutionException
| NullPointerException ex) {
- logger.error("Cannot schedule " + ctmEntry.getValue().getName()
- + ". Reason: " + ex.getMessage(), ex);
+ logger.error("Cannot schedule {}. Reason: {}",
+ ctmEntry.getValue().getName(), ex.getMessage(), ex);
}
}
try {
@@ -93,13 +92,13 @@ public final class Scheduler implements ThreadFactory {
}
} catch (ConfigurationException | InterruptedException
| RejectedExecutionException | NullPointerException ex) {
- logger.error("Cannot schedule run-once: " + ex.getMessage(), ex);
+ logger.error("Cannot schedule run-once: {}", ex.getMessage(), ex);
}
}
private void scheduleExecutions(CollecTorMain ctm, int offset, int period) {
- logger.info("Periodic updater started for " + ctm.getClass().getName()
- + "; offset=" + offset + ", period=" + period + ".");
+ logger.info("Periodic updater started for {}; offset={}, period={}.",
+ ctm.getClass().getName(), offset, period);
long periodMillis = period * MILLIS_IN_A_MINUTE;
long initialDelayMillis = computeInitialDelayMillis(
System.currentTimeMillis(), offset * MILLIS_IN_A_MINUTE, periodMillis);
@@ -132,9 +131,9 @@ public final class Scheduler implements ThreadFactory {
logger.info("Shutdown of all scheduled tasks completed successfully.");
} catch (InterruptedException ie) {
List<Runnable> notTerminated = scheduler.shutdownNow();
- logger.error("Regular shutdown failed for: " + notTerminated);
+ logger.error("Regular shutdown failed for: {}", notTerminated);
if (!notTerminated.isEmpty()) {
- logger.error("Forced shutdown failed for: " + notTerminated);
+ logger.error("Forced shutdown failed for: {}", notTerminated);
}
}
}
@@ -147,7 +146,7 @@ public final class Scheduler implements ThreadFactory {
Thread newThread = threads.newThread(runner);
newThread.setDaemon(true);
newThread.setName("CollecTor-Scheduled-Thread-" + ++currentThreadNo);
- logger.info("New Thread created: " + newThread.getName());
+ logger.info("New Thread created: {}", newThread.getName());
return newThread;
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
index 0b7dfad..5eab578 100644
--- a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
@@ -138,9 +138,9 @@ public class ExitListDownloader extends CollecTorMain {
}
if (maxScanMillis > 0L
&& maxScanMillis + 330L * 60L * 1000L < System.currentTimeMillis()) {
- logger.warn("The last reported scan in the downloaded exit list "
- + "took place at " + dateTimeFormat.format(maxScanMillis)
- + ", which is more than 5:30 hours in the past.");
+ logger.warn("The last reported scan in the downloaded exit list took "
+ + "place at {}, which is more than 5:30 hours in the past.",
+ dateTimeFormat.format(maxScanMillis));
}
/* Write to disk. */
@@ -154,8 +154,8 @@ public class ExitListDownloader extends CollecTorMain {
bw.write(downloadedExitList);
bw.close();
} catch (IOException e) {
- logger.warn("Could not write downloaded exit list "
- + "to " + outputFile.getAbsolutePath(), e);
+ logger.warn("Could not write downloaded exit list to {}",
+ outputFile.getAbsolutePath(), e);
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java b/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java
index 5f3a649..a018f8b 100644
--- a/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java
+++ b/src/main/java/org/torproject/metrics/collector/indexer/CreateIndexJson.java
@@ -92,7 +92,7 @@ public class CreateIndexJson extends CollecTorMain {
config.getPath(Key.RecentPath).toFile() };
writeIndex(indexDirectories());
} catch (Exception e) {
- logger.error("Cannot run index creation: " + e.getMessage(), e);
+ logger.error("Cannot run index creation: {}", e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -108,8 +108,8 @@ public class CreateIndexJson extends CollecTorMain {
private IndexNode indexDirectories() {
SortedSet<DirectoryNode> directoryNodes = new TreeSet<>();
- logger.trace("indexing: " + indexedDirectories[0] + " "
- + indexedDirectories[1]);
+ logger.trace("indexing: {} {}", indexedDirectories[0],
+ indexedDirectories[1]);
for (File directory : indexedDirectories) {
if (directory.exists() && directory.isDirectory()) {
DirectoryNode dn = indexDirectory(directory);
@@ -126,10 +126,10 @@ public class CreateIndexJson extends CollecTorMain {
private DirectoryNode indexDirectory(File directory) {
SortedSet<FileNode> fileNodes = new TreeSet<>();
SortedSet<DirectoryNode> directoryNodes = new TreeSet<>();
- logger.trace("indexing: " + directory);
+ logger.trace("indexing: {}", directory);
File[] fileList = directory.listFiles();
if (null == fileList) {
- logger.warn("Indexing dubious directory: " + directory);
+ logger.warn("Indexing dubious directory: {}", directory);
return null;
}
for (File fileOrDirectory : fileList) {
diff --git a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
index f97a4f8..ca307a5 100644
--- a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
@@ -111,9 +111,9 @@ public class OnionPerfDownloader extends CollecTorMain {
this.downloadedTpfFiles.add(line);
}
} catch (IOException e) {
- logger.info("Unable to read download history file '"
- + this.onionPerfDownloadedFile.getAbsolutePath() + "'. Ignoring "
- + "download history and downloading all available .tpf files.");
+ logger.info("Unable to read download history file '{}'. Ignoring "
+ + "download history and downloading all available .tpf files.",
+ this.onionPerfDownloadedFile.getAbsolutePath());
this.downloadedTpfFiles.clear();
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
index 7c59054..d166bd7 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
@@ -103,8 +103,8 @@ public class ArchiveReader {
private void readDescriptorFiles() {
if (this.archivesDirectory.exists()) {
- logger.debug("Importing files in directory " + this.archivesDirectory
- + "/...");
+ logger.debug("Importing files in directory {}/...",
+ this.archivesDirectory);
Stack<File> filesInInputDir = new Stack<>();
filesInInputDir.add(this.archivesDirectory);
List<File> problems = new ArrayList<>();
@@ -121,8 +121,8 @@ public class ArchiveReader {
this.ignoredFiles++;
continue;
} else if (pop.getName().endsWith(".tar.bz2")) {
- logger.warn("Cannot parse compressed tarball "
- + pop.getAbsolutePath() + ". Skipping.");
+ logger.warn("Cannot parse compressed tarball {}. Skipping.",
+ pop.getAbsolutePath());
continue;
} else if (pop.getName().endsWith(".bz2")) {
FileInputStream fis = new FileInputStream(pop);
@@ -225,9 +225,8 @@ public class ArchiveReader {
String digest256Hex = DigestUtils.sha256Hex(descBytes);
if (!this.microdescriptorValidAfterTimes.containsKey(
digest256Hex)) {
- logger.debug("Could not store microdescriptor '"
- + digest256Hex + "', which was not contained in a "
- + "microdesc consensus.");
+ logger.debug("Could not store microdescriptor '{}', which was "
+ + "not contained in a microdesc consensus.", digest256Hex);
continue;
}
for (String validAfterTime :
@@ -238,9 +237,8 @@ public class ArchiveReader {
rdp.storeMicrodescriptor(descBytes, digest256Hex,
digest256Base64, validAfter);
} catch (ParseException e) {
- logger.warn("Could not parse "
- + "valid-after time '" + validAfterTime + "'. Not "
- + "storing microdescriptor.", e);
+ logger.warn("Could not parse valid-after time '{}'. Not "
+ + "storing microdescriptor.", validAfterTime, e);
}
}
}
@@ -256,8 +254,8 @@ public class ArchiveReader {
}
}
if (problems.isEmpty()) {
- logger.debug("Finished importing files in directory "
- + this.archivesDirectory + "/.");
+ logger.debug("Finished importing files in directory {}/.",
+ this.archivesDirectory);
} else {
StringBuilder sb = new StringBuilder("Failed importing files in "
+ "directory " + this.archivesDirectory + "/:");
@@ -288,9 +286,8 @@ public class ArchiveReader {
+ "history file.");
}
}
- logger.info("Finished importing relay descriptors from local "
- + "directory:\nParsed " + this.parsedFiles + ", ignored "
- + this.ignoredFiles + " files.");
+ logger.info("Finished importing relay descriptors from local directory:\n"
+ + "Parsed {}, ignored {} files.", this.parsedFiles, this.ignoredFiles);
}
/** Stores the valid-after time and microdescriptor digests of a given
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
index 3429a0a..edd03d7 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
@@ -221,10 +221,9 @@ public class ArchiveWriter extends CollecTorMain {
while ((line = br.readLine()) != null) {
String[] parts = line.split(",");
if (parts.length != 3) {
- logger.warn("Could not load server descriptor "
- + "digests because of illegal line '" + line + "'. We "
- + "might not be able to correctly check descriptors for "
- + "completeness.");
+ logger.warn("Could not load server descriptor digests because of "
+ + "illegal line '{}'. We might not be able to correctly "
+ + "check descriptors for completeness.", line);
break;
}
long published = dateTimeFormat.parse(parts[0]).getTime();
@@ -249,10 +248,9 @@ public class ArchiveWriter extends CollecTorMain {
while ((line = br.readLine()) != null) {
String[] parts = line.split(",");
if (parts.length != 2) {
- logger.warn("Could not load extra-info descriptor "
- + "digests because of illegal line '" + line + "'. We "
- + "might not be able to correctly check descriptors for "
- + "completeness.");
+ logger.warn("Could not load extra-info descriptor digests because "
+ + "of illegal line '{}'. We might not be able to correctly "
+ + "check descriptors for completeness.", line);
break;
}
long published = dateTimeFormat.parse(parts[0]).getTime();
@@ -275,10 +273,9 @@ public class ArchiveWriter extends CollecTorMain {
while ((line = br.readLine()) != null) {
String[] parts = line.split(",");
if (parts.length != 2) {
- logger.warn("Could not load microdescriptor digests "
- + "because of illegal line '" + line + "'. We might not "
- + "be able to correctly check descriptors for "
- + "completeness.");
+ logger.warn("Could not load microdescriptor digests because of "
+ + "illegal line '{}'. We might not be able to correctly check "
+ + "descriptors for completeness.", line);
break;
}
long validAfter = dateTimeFormat.parse(parts[0]).getTime();
@@ -510,46 +507,40 @@ public class ArchiveWriter extends CollecTorMain {
long tooOldMillis = this.now - 330L * 60L * 1000L;
if (!this.storedConsensuses.isEmpty()
&& this.storedConsensuses.lastKey() < tooOldMillis) {
- logger.warn("The last known relay network status "
- + "consensus was valid after "
- + dateTimeFormat.format(this.storedConsensuses.lastKey())
- + ", which is more than 5:30 hours in the past.");
+ logger.warn("The last known relay network status consensus was valid "
+ + "after {}, which is more than 5:30 hours in the past.",
+ dateTimeFormat.format(this.storedConsensuses.lastKey()));
}
if (!this.storedMicrodescConsensuses.isEmpty()
&& this.storedMicrodescConsensuses.lastKey() < tooOldMillis) {
- logger.warn("The last known relay network status "
- + "microdesc consensus was valid after "
- + dateTimeFormat.format(
- this.storedMicrodescConsensuses.lastKey())
- + ", which is more than 5:30 hours in the past.");
+ logger.warn("The last known relay network status microdesc consensus "
+ + "was valid after {}, which is more than 5:30 hours in the past.",
+ dateTimeFormat.format(this.storedMicrodescConsensuses.lastKey()));
}
if (!this.storedVotes.isEmpty()
&& this.storedVotes.lastKey() < tooOldMillis) {
- logger.warn("The last known relay network status vote "
- + "was valid after " + dateTimeFormat.format(
- this.storedVotes.lastKey()) + ", which is more than 5:30 hours "
- + "in the past.");
+ logger.warn("The last known relay network status vote was valid after "
+ + "{}, which is more than 5:30 hours in the past.",
+ dateTimeFormat.format(this.storedVotes.lastKey()));
}
if (!this.storedServerDescriptors.isEmpty()
&& this.storedServerDescriptors.lastKey() < tooOldMillis) {
- logger.warn("The last known relay server descriptor was "
- + "published at "
- + dateTimeFormat.format(this.storedServerDescriptors.lastKey())
- + ", which is more than 5:30 hours in the past.");
+ logger.warn("The last known relay server descriptor was published at "
+ + "{}, which is more than 5:30 hours in the past.",
+ dateTimeFormat.format(this.storedServerDescriptors.lastKey()));
}
if (!this.storedExtraInfoDescriptors.isEmpty()
&& this.storedExtraInfoDescriptors.lastKey() < tooOldMillis) {
- logger.warn("The last known relay extra-info descriptor "
- + "was published at " + dateTimeFormat.format(
- this.storedExtraInfoDescriptors.lastKey())
- + ", which is more than 5:30 hours in the past.");
+ logger.warn("The last known relay extra-info descriptor was published "
+ + "at {}, which is more than 5:30 hours in the past.",
+ dateTimeFormat.format(this.storedExtraInfoDescriptors.lastKey()));
}
if (!this.storedMicrodescriptors.isEmpty()
&& this.storedMicrodescriptors.lastKey() < tooOldMillis) {
- logger.warn("The last known relay microdescriptor was "
- + "contained in a microdesc consensus that was valid after "
- + dateTimeFormat.format(this.storedMicrodescriptors.lastKey())
- + ", which is more than 5:30 hours in the past.");
+ logger.warn("The last known relay microdescriptor was contained in a "
+ + "microdesc consensus that was valid after {}, which is more than "
+ + "5:30 hours in the past.",
+ dateTimeFormat.format(this.storedMicrodescriptors.lastKey()));
}
}
@@ -821,7 +812,7 @@ public class ArchiveWriter extends CollecTorMain {
private boolean store(byte[] typeAnnotation, byte[] data,
File[] outputFiles, boolean[] append) {
- logger.trace("Storing " + outputFiles[0]);
+ logger.trace("Storing {}", outputFiles[0]);
int parseableDescriptors = 0;
for (Descriptor descriptor : this.descriptorParser.parseDescriptors(data,
null, outputFiles[0].getName())) {
@@ -830,8 +821,8 @@ public class ArchiveWriter extends CollecTorMain {
}
}
if (parseableDescriptors != 1) {
- logger.info("Relay descriptor file " + outputFiles[0]
- + " doesn't contain exactly one descriptor. Storing anyway.");
+ logger.info("Relay descriptor file {} doesn't contain exactly one "
+ + "descriptor. Storing anyway.", outputFiles[0]);
}
try {
for (int i = 0; i < outputFiles.length; i++) {
@@ -848,8 +839,7 @@ public class ArchiveWriter extends CollecTorMain {
}
return true;
} catch (IOException e) {
- logger.warn("Could not store relay descriptor "
- + outputFiles[0], e);
+ logger.warn("Could not store relay descriptor {}", outputFiles[0], e);
}
return false;
}
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
index 4f0d012..3e1bba4 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
@@ -86,8 +86,8 @@ public class CachedRelayDescriptorReader {
}
br.close();
} catch (IOException e) {
- logger.warn("Could not read import history from "
- + importHistoryFile.getAbsolutePath() + ".", e);
+ logger.warn("Could not read import history from {}.",
+ importHistoryFile.getAbsolutePath(), e);
}
}
}
@@ -97,12 +97,11 @@ public class CachedRelayDescriptorReader {
for (String inputDirectory : this.inputDirectories) {
File cachedDescDir = new File(inputDirectory);
if (!cachedDescDir.exists()) {
- logger.warn("Directory " + cachedDescDir.getAbsolutePath()
- + " does not exist. Skipping.");
+ logger.warn("Directory {} does not exist. Skipping.",
+ cachedDescDir.getAbsolutePath());
continue;
}
- logger.debug("Reading " + cachedDescDir.getAbsolutePath()
- + " directory.");
+ logger.debug("Reading {} directory.", cachedDescDir.getAbsolutePath());
SortedSet<File> cachedDescFiles = new TreeSet<>();
Stack<File> files = new Stack<>();
files.add(cachedDescDir);
@@ -143,10 +142,9 @@ public class CachedRelayDescriptorReader {
if (dateTimeFormat.parse(line.substring("valid-after "
.length())).getTime() < System.currentTimeMillis()
- 6L * 60L * 60L * 1000L) {
- logger.warn("Cached descriptor files in "
- + cachedDescDir.getAbsolutePath() + " are stale. "
- + "The valid-after line in cached-consensus is '"
- + line + "'.");
+ logger.warn("Cached descriptor files in {} are stale. The "
+ + "valid-after line in cached-consensus is '{}'.",
+ cachedDescDir.getAbsolutePath(), line);
this.dumpStats.append(" (stale!)");
}
break;
@@ -244,12 +242,12 @@ public class CachedRelayDescriptorReader {
? "server" : "extra-info").append(" descriptors");
}
} catch (IOException | ParseException e) {
- logger.warn("Failed reading "
- + cachedDescDir.getAbsolutePath() + " directory.", e);
+ logger.warn("Failed reading {} directory.",
+ cachedDescDir.getAbsolutePath(), e);
}
}
- logger.debug("Finished reading "
- + cachedDescDir.getAbsolutePath() + " directory.");
+ logger.debug("Finished reading {} directory.",
+ cachedDescDir.getAbsolutePath());
}
}
@@ -264,8 +262,8 @@ public class CachedRelayDescriptorReader {
}
bw.close();
} catch (IOException e) {
- logger.warn("Could not write import history to "
- + this.importHistoryFile.getAbsolutePath() + ".", e);
+ logger.warn("Could not write import history to {}.",
+ this.importHistoryFile.getAbsolutePath(), e);
}
logger.info(dumpStats.toString());
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java
index 0984de0..f3a21ba 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java
@@ -315,8 +315,8 @@ public class ReferenceChecker {
}
logger.info(sb.toString());
if (totalMissingDescriptorsWeight > 0.999) {
- logger.warn("Missing too many referenced "
- + "descriptors (" + totalMissingDescriptorsWeight + ").");
+ logger.warn("Missing too many referenced descriptors ({}).",
+ totalMissingDescriptorsWeight);
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
index 446b6a7..6e470d8 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
@@ -344,8 +344,8 @@ public class RelayDescriptorDownloader {
"stats/missing-relay-descriptors");
if (this.missingDescriptorsFile.exists()) {
try {
- logger.debug("Reading file "
- + this.missingDescriptorsFile.getAbsolutePath() + "...");
+ logger.debug("Reading file {}...",
+ this.missingDescriptorsFile.getAbsolutePath());
BufferedReader br = new BufferedReader(new FileReader(
this.missingDescriptorsFile));
String line;
@@ -395,19 +395,17 @@ public class RelayDescriptorDownloader {
}
}
} else {
- logger.debug("Invalid line '" + line + "' in "
- + this.missingDescriptorsFile.getAbsolutePath()
- + ". Ignoring.");
+ logger.debug("Invalid line '{}' in {}. Ignoring.", line,
+ this.missingDescriptorsFile.getAbsolutePath());
}
}
br.close();
- logger.debug("Finished reading file "
- + this.missingDescriptorsFile.getAbsolutePath() + ".");
+ logger.debug("Finished reading file {}.",
+ this.missingDescriptorsFile.getAbsolutePath());
} catch (IOException e) {
- logger.warn("Failed to read file "
- + this.missingDescriptorsFile.getAbsolutePath()
- + "! This means that we might forget to dowload relay "
- + "descriptors we are missing.", e);
+ logger.warn("Failed to read file {}! This means that we might forget "
+ + "to dowload relay descriptors we are missing.",
+ this.missingDescriptorsFile.getAbsolutePath(), e);
}
}
@@ -418,17 +416,15 @@ public class RelayDescriptorDownloader {
"stats/last-downloaded-all-descriptors");
if (this.lastDownloadedAllDescriptorsFile.exists()) {
try {
- logger.debug("Reading file "
- + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
- + "...");
+ logger.debug("Reading file {}...",
+ this.lastDownloadedAllDescriptorsFile.getAbsolutePath());
BufferedReader br = new BufferedReader(new FileReader(
this.lastDownloadedAllDescriptorsFile));
String line;
while ((line = br.readLine()) != null) {
if (line.split(",").length != 2) {
- logger.debug("Invalid line '" + line + "' in "
- + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
- + ". Ignoring.");
+ logger.debug("Invalid line '{}' in {}. Ignoring.", line,
+ this.lastDownloadedAllDescriptorsFile.getAbsolutePath());
} else {
String[] parts = line.split(",");
String authority = parts[0];
@@ -438,14 +434,13 @@ public class RelayDescriptorDownloader {
}
}
br.close();
- logger.debug("Finished reading file "
- + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
- + ".");
+ logger.debug("Finished reading file {}.",
+ this.lastDownloadedAllDescriptorsFile.getAbsolutePath());
} catch (IOException e) {
- logger.warn("Failed to read file "
- + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
- + "! This means that we might download all server and "
- + "extra-info descriptors more often than we should.", e);
+ logger.warn("Failed to read file {}! This means that we might "
+ + "download all server and extra-info descriptors more often than "
+ + "we should.",
+ this.lastDownloadedAllDescriptorsFile.getAbsolutePath(), e);
}
}
@@ -839,7 +834,7 @@ public class RelayDescriptorDownloader {
/* If a download failed, stop requesting descriptors from this
* authority and move on to the next. */
} catch (IOException e) {
- logger.debug("Failed downloading from " + authority + "!", e);
+ logger.debug("Failed downloading from {}!", authority, e);
}
}
}
@@ -883,8 +878,8 @@ public class RelayDescriptorDownloader {
allData = baos.toByteArray();
}
}
- logger.debug("Downloaded " + fullUrl + " -> " + response + " ("
- + (allData == null ? 0 : allData.length) + " bytes)");
+ logger.debug("Downloaded {} -> {} ({} bytes)", fullUrl, response,
+ allData == null ? 0 : allData.length);
int receivedDescriptors = 0;
if (allData != null) {
if (resource.startsWith("/tor/status-vote/current/")) {
@@ -967,10 +962,9 @@ public class RelayDescriptorDownloader {
this.rdp.storeMicrodescriptor(descBytes, digest256Hex,
digest256Base64, validAfter);
} catch (ParseException e) {
- logger.warn("Could not parse "
- + "valid-after time '" + validAfterTime + "' in "
+ logger.warn("Could not parse valid-after time '{}' in "
+ "microdescriptor key. Not storing microdescriptor.",
- e);
+ validAfterTime, e);
}
}
receivedDescriptors++;
@@ -993,8 +987,8 @@ public class RelayDescriptorDownloader {
int missingServerDescriptors = 0;
int missingExtraInfoDescriptors = 0;
try {
- logger.debug("Writing file "
- + this.missingDescriptorsFile.getAbsolutePath() + "...");
+ logger.debug("Writing file {}...",
+ this.missingDescriptorsFile.getAbsolutePath());
this.missingDescriptorsFile.getParentFile().mkdirs();
BufferedWriter bw = new BufferedWriter(new FileWriter(
this.missingDescriptorsFile));
@@ -1020,20 +1014,19 @@ public class RelayDescriptorDownloader {
bw.write(key + "," + value + "\n");
}
bw.close();
- logger.debug("Finished writing file "
- + this.missingDescriptorsFile.getAbsolutePath() + ".");
+ logger.debug("Finished writing file {}.",
+ this.missingDescriptorsFile.getAbsolutePath());
} catch (IOException e) {
- logger.warn("Failed writing "
- + this.missingDescriptorsFile.getAbsolutePath() + "!", e);
+ logger.warn("Failed writing {}!",
+ this.missingDescriptorsFile.getAbsolutePath(), e);
}
/* Write text file containing the directory authorities and when we
* last downloaded all server and extra-info descriptors from them to
* disk. */
try {
- logger.debug("Writing file "
- + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
- + "...");
+ logger.debug("Writing file {}...",
+ this.lastDownloadedAllDescriptorsFile.getAbsolutePath());
this.lastDownloadedAllDescriptorsFile.getParentFile().mkdirs();
BufferedWriter bw = new BufferedWriter(new FileWriter(
this.lastDownloadedAllDescriptorsFile));
@@ -1044,77 +1037,67 @@ public class RelayDescriptorDownloader {
bw.write(authority + "," + lastDownloaded + "\n");
}
bw.close();
- logger.debug("Finished writing file "
- + this.lastDownloadedAllDescriptorsFile.getAbsolutePath()
- + ".");
+ logger.debug("Finished writing file {}.",
+ this.lastDownloadedAllDescriptorsFile.getAbsolutePath());
} catch (IOException e) {
- logger.warn("Failed writing "
- + this.lastDownloadedAllDescriptorsFile.getAbsolutePath() + "!",
- e);
+ logger.warn("Failed writing {}!",
+ this.lastDownloadedAllDescriptorsFile.getAbsolutePath(), e);
}
/* Log statistics about this execution. */
logger.info("Finished downloading relay descriptors from the "
+ "directory authorities.");
- logger.info("At the beginning of this execution, we were "
- + "missing " + oldMissingConsensuses + " consensus(es), "
- + oldMissingMicrodescConsensuses + " microdesc consensus(es), "
- + oldMissingVotes + " vote(s), " + oldMissingServerDescriptors
- + " server descriptor(s), " + oldMissingExtraInfoDescriptors
- + " extra-info descriptor(s), and " + oldMissingMicrodescriptors
- + " microdescriptor(s).");
- logger.info("During this execution, we added "
- + this.newMissingConsensuses + " consensus(es), "
- + this.newMissingMicrodescConsensuses
- + " microdesc consensus(es), " + this.newMissingVotes
- + " vote(s), " + this.newMissingServerDescriptors
- + " server descriptor(s), " + this.newMissingExtraInfoDescriptors
- + " extra-info descriptor(s), and "
- + this.newMissingMicrodescriptors + " microdescriptor(s) to the "
- + "missing list, some of which we also "
- + "requested and removed from the list again.");
- logger.info("We requested " + this.requestedConsensuses
- + " consensus(es), " + this.requestedMicrodescConsensuses
- + " microdesc consensus(es), " + this.requestedVotes
- + " vote(s), " + this.requestedMissingServerDescriptors
- + " missing server descriptor(s), "
- + this.requestedAllServerDescriptors
- + " times all server descriptors, "
- + this.requestedMissingExtraInfoDescriptors + " missing "
- + "extra-info descriptor(s), "
- + this.requestedAllExtraInfoDescriptors + " times all extra-info "
- + "descriptors, and " + this.requestedMissingMicrodescriptors
- + " missing microdescriptor(s) from the directory authorities.");
+ logger.info("At the beginning of this execution, we were missing {} "
+ + "consensus(es), {} microdesc consensus(es), {} vote(s), {} server "
+ + "descriptor(s), {} extra-info descriptor(s), and {} "
+ + "microdescriptor(s).", oldMissingConsensuses,
+ oldMissingMicrodescConsensuses, oldMissingVotes,
+ oldMissingServerDescriptors, oldMissingExtraInfoDescriptors,
+ oldMissingMicrodescriptors);
+ logger.info("During this execution, we added {} consensus(es), {} "
+ + "microdesc consensus(es), {} vote(s), {} server descriptor(s), {} "
+ + "extra-info descriptor(s), and {} microdescriptor(s) to the missing "
+ + "list, some of which we also requested and removed from the list "
+ + "again.", this.newMissingConsensuses,
+ this.newMissingMicrodescConsensuses, this.newMissingVotes,
+ this.newMissingServerDescriptors, this.newMissingExtraInfoDescriptors,
+ this.newMissingMicrodescriptors);
+ logger.info("We requested {} consensus(es), {} microdesc consensus(es), "
+ + "{} vote(s), {} missing server descriptor(s), {} times all server "
+ + "descriptors, {} missing extra-info descriptor(s), {} times all "
+ + "extra-info descriptors, and {} missing microdescriptor(s) from the "
+ + "directory authorities.", this.requestedConsensuses,
+ this.requestedMicrodescConsensuses, this.requestedVotes,
+ this.requestedMissingServerDescriptors,
+ this.requestedAllServerDescriptors,
+ this.requestedMissingExtraInfoDescriptors,
+ this.requestedAllExtraInfoDescriptors,
+ this.requestedMissingMicrodescriptors);
StringBuilder sb = new StringBuilder();
for (String authority : this.authorities) {
sb.append(" ").append(authority).append("=").append(
this.requestsByAuthority.get(authority));
}
logger.info("We sent these numbers of requests to the directory "
- + "authorities:" + sb.toString());
- logger.info("We successfully downloaded "
- + this.downloadedConsensuses + " consensus(es), "
- + this.downloadedMicrodescConsensuses
- + " microdesc consensus(es), " + this.downloadedVotes
- + " vote(s), " + this.downloadedMissingServerDescriptors
- + " missing server descriptor(s), "
- + this.downloadedAllServerDescriptors
- + " server descriptor(s) when downloading all descriptors, "
- + this.downloadedMissingExtraInfoDescriptors + " missing "
- + "extra-info descriptor(s), "
- + this.downloadedAllExtraInfoDescriptors + " extra-info "
- + "descriptor(s) when downloading all descriptors, and "
- + this.downloadedMissingMicrodescriptors
- + " missing microdescriptor(s).");
- logger.info("At the end of this execution, we are missing "
- + missingConsensuses + " consensus(es), "
- + missingMicrodescConsensuses + " microdesc consensus(es), "
- + missingVotes + " vote(s), " + missingServerDescriptors
- + " server descriptor(s), " + missingExtraInfoDescriptors
- + " extra-info descriptor(s), and "
- + this.missingMicrodescriptors.size()
- + " microdescriptor(s), some of which we may try in the next "
- + "execution.");
+ + "authorities:{}", sb.toString());
+ logger.info("We successfully downloaded {} consensus(es), {} microdesc "
+ + "consensus(es), {} vote(s), {} missing server descriptor(s), {} "
+ + "server descriptor(s) when downloading all descriptors, {} missing "
+ + "extra-info descriptor(s), {} extra-info descriptor(s) when "
+ + "downloading all descriptors, and {} missing microdescriptor(s).",
+ this.downloadedConsensuses, this.downloadedMicrodescConsensuses,
+ this.downloadedVotes, this.downloadedMissingServerDescriptors,
+ this.downloadedAllServerDescriptors,
+ this.downloadedMissingExtraInfoDescriptors,
+ this.downloadedAllExtraInfoDescriptors,
+ this.downloadedMissingMicrodescriptors);
+ logger.info("At the end of this execution, we are missing {} "
+ + "consensus(es), {} microdesc consensus(es), {} vote(s), {} server "
+ + "descriptor(s), {} extra-info descriptor(s), and {} "
+ + "microdescriptor(s), some of which we may try in the next execution.",
+ missingConsensuses, missingMicrodescConsensuses, missingVotes,
+ missingServerDescriptors, missingExtraInfoDescriptors,
+ this.missingMicrodescriptors.size());
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java
index 664b566..59b9fb0 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java
@@ -152,8 +152,8 @@ public class RelayDescriptorParser {
+ lastRelayIdentity + "," + serverDesc);
serverDescriptorDigests.add(serverDesc);
} else {
- logger.warn("Could not parse r line '"
- + line + "' in descriptor. Skipping.");
+ logger.warn("Could not parse r line '{}' in descriptor. "
+ + "Skipping.", line);
break;
}
} else if (line.startsWith("m ")) {
@@ -169,8 +169,8 @@ public class RelayDescriptorParser {
} else if (parts.length != 3
|| !parts[2].startsWith("sha256=")
|| parts[2].length() != 50) {
- logger.warn("Could not parse m line '"
- + line + "' in descriptor. Skipping.");
+ logger.warn("Could not parse m line '{}' in descriptor. "
+ + "Skipping.", line);
break;
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
index 84f4f9e..0e83598 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
@@ -100,7 +100,7 @@ public class SanitizeWeblogs extends CollecTorMain {
PersistenceUtils.cleanDirectory(this.config.getPath(Key.RecentPath));
}
} catch (Exception e) {
- log.error("Cannot sanitize web-logs: " + e.getMessage(), e);
+ log.error("Cannot sanitize web-logs: {}", e.getMessage(), e);
throw new RuntimeException(e);
}
}
1
0
commit 4f120ba1c330da455cf0d0b12be4e6231fe196cd
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Aug 21 09:52:58 2018 +0200
Make a couple JavaDoc fixes.
---
.../java/org/torproject/metrics/collector/Main.java | 2 +-
.../metrics/collector/conf/Configuration.java | 2 +-
.../metrics/collector/cron/CollecTorMain.java | 4 +++-
.../collector/persist/DescriptorPersistence.java | 14 ++++++++------
.../metrics/collector/persist/package-info.java | 2 +-
.../relaydescs/RelayDescriptorDownloader.java | 20 ++++++++++----------
.../metrics/collector/sync/SyncPersistence.java | 10 +++++-----
.../metrics/collector/sync/package-info.java | 4 ++--
.../metrics/collector/webstats/SanitizeWeblogs.java | 2 +-
9 files changed, 32 insertions(+), 28 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/Main.java b/src/main/java/org/torproject/metrics/collector/Main.java
index 1e186d4..6230e36 100644
--- a/src/main/java/org/torproject/metrics/collector/Main.java
+++ b/src/main/java/org/torproject/metrics/collector/Main.java
@@ -32,7 +32,7 @@ import java.util.Map;
* <br>
* Run without arguments in order to read the usage information, i.e.
* <br>
- * <code>java -jar collector.jar</code>
+ * {@code java -jar collector.jar}
*/
public class Main {
diff --git a/src/main/java/org/torproject/metrics/collector/conf/Configuration.java b/src/main/java/org/torproject/metrics/collector/conf/Configuration.java
index f797947..69d3bcd 100644
--- a/src/main/java/org/torproject/metrics/collector/conf/Configuration.java
+++ b/src/main/java/org/torproject/metrics/collector/conf/Configuration.java
@@ -204,7 +204,7 @@ public class Configuration extends Observable implements Cloneable {
/**
* Parse an integer property and translate the String
- * <code>"inf"</code> into Integer.MAX_VALUE.
+ * {@code "inf"} into Integer.MAX_VALUE.
* Verifies that this enum is a Key for an integer value.
*/
public int getInt(Key key) throws ConfigurationException {
diff --git a/src/main/java/org/torproject/metrics/collector/cron/CollecTorMain.java b/src/main/java/org/torproject/metrics/collector/cron/CollecTorMain.java
index 87cb304..cd8e0ee 100644
--- a/src/main/java/org/torproject/metrics/collector/cron/CollecTorMain.java
+++ b/src/main/java/org/torproject/metrics/collector/cron/CollecTorMain.java
@@ -95,7 +95,9 @@ public abstract class CollecTorMain extends SyncManager
&& config.getSourceTypeSet(Key.valueOf(key)).size() == 1;
}
- /** Wrapper for <code>run</code>. */
+ /**
+ * Wrapper for {@code run}.
+ */
@Override
public final Object call() {
run();
diff --git a/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java b/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
index ea3d67d..bd24c81 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
@@ -34,7 +34,9 @@ public abstract class DescriptorPersistence<T extends Descriptor> {
protected String storagePath;
protected String recentPath;
- /** Initializes the paths for storing descriptors of type <code>T</code>. */
+ /**
+ * Initializes the paths for storing descriptors of type {@code T}.
+ */
protected DescriptorPersistence(T desc, byte[] defaultAnnotation) {
this.desc = desc;
List<String> annotations = desc.getAnnotations();
@@ -51,7 +53,7 @@ public abstract class DescriptorPersistence<T extends Descriptor> {
/** Stores the descriptor to all locations.
* First attempt to store the 'out' path, if that works store to 'recent'.
- * Returns <code>true</code>, if both were written. */
+ * Returns {@code true}, if both were written. */
public boolean storeAll(String recentRoot, String outRoot) {
return storeAll(recentRoot, outRoot, StandardOpenOption.APPEND,
StandardOpenOption.CREATE_NEW);
@@ -59,7 +61,7 @@ public abstract class DescriptorPersistence<T extends Descriptor> {
/** Stores the descriptor to all locations.
* First attempt to store the 'out' path, if that works store to 'recent'.
- * Returns <code>true</code>, if both were written. */
+ * Returns {@code true}, if both were written. */
public boolean storeAll(String recentRoot, String outRoot,
StandardOpenOption optionRecent, StandardOpenOption optionOut) {
if (storeOut(outRoot, optionOut)) {
@@ -76,7 +78,7 @@ public abstract class DescriptorPersistence<T extends Descriptor> {
/** Stores the descriptor in recent.
* Creates, replaces, or appends according to the given option.
- * Returns <code>true</code>, if the file was written. */
+ * Returns {@code true}, if the file was written. */
public boolean storeRecent(String recentRoot, StandardOpenOption option) {
return PersistenceUtils.storeToFileSystem(annotation,
desc.getRawDescriptorBytes(), Paths.get(recentRoot, getRecentPath()),
@@ -85,14 +87,14 @@ public abstract class DescriptorPersistence<T extends Descriptor> {
/** Stores the descriptor in out (i.e. internal storage).
* Only writes, if the file doesn't exist yet.
- * Returns <code>true</code>, if the file was written. */
+ * Returns {@code true}, if the file was written. */
public boolean storeOut(String outRoot) {
return storeOut(outRoot, StandardOpenOption.CREATE_NEW);
}
/** Stores the descriptor in out (i.e. internal storage).
* Creates, replaces, or appends according to the given option.
- * Returns <code>true</code>, if the file was written. */
+ * Returns {@code true}, if the file was written. */
public boolean storeOut(String outRoot, StandardOpenOption option) {
return PersistenceUtils.storeToFileSystem(annotation,
desc.getRawDescriptorBytes(), Paths.get(outRoot, getStoragePath()),
diff --git a/src/main/java/org/torproject/metrics/collector/persist/package-info.java b/src/main/java/org/torproject/metrics/collector/persist/package-info.java
index d3f6d2f..11d3939 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/package-info.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/package-info.java
@@ -7,5 +7,5 @@ package org.torproject.metrics.collector.persist;
* simply determine the two storage paths based on the descriptor
* and further parameters like acquisition time.
* <p>All special persistence classes extend
- * <code>DescriptorPersistence</code>.</p>
+ * {@code DescriptorPersistence}.</p>
*/
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
index f876319..446b6a7 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
@@ -38,7 +38,7 @@ import java.util.zip.InflaterInputStream;
/**
* Downloads relay descriptors from the directory authorities via HTTP.
* Keeps a list of missing descriptors that gets updated by parse results
- * from <code>RelayDescriptorParser</code> and downloads all missing
+ * from {@code RelayDescriptorParser} and downloads all missing
* descriptors that have been published in the last 24 hours. Also
* downloads all server and extra-info descriptors known to a directory
* authority at most once a day.
@@ -105,7 +105,7 @@ public class RelayDescriptorDownloader {
private Map<String, String> lastDownloadedAllDescriptors;
/**
- * <code>RelayDescriptorParser</code> that we will hand over the
+ * {@code RelayDescriptorParser} that we will hand over the
* downloaded descriptors for parsing.
*/
private RelayDescriptorParser rdp;
@@ -286,9 +286,9 @@ public class RelayDescriptorDownloader {
/**
* Initializes this class, including reading in missing descriptors from
- * <code>stats/missing-relay-descriptors</code> and the times when we
+ * {@code stats/missing-relay-descriptors} and the times when we
* last downloaded all server and extra-info descriptors from
- * <code>stats/last-downloaded-all-descriptors</code>.
+ * {@code stats/last-downloaded-all-descriptors}.
*/
public RelayDescriptorDownloader(RelayDescriptorParser rdp,
String[] authorities, String[] authorityFingerprints,
@@ -472,8 +472,8 @@ public class RelayDescriptorDownloader {
/**
* We have parsed a consensus. Take this consensus off the missing list
- * and add the votes created by the given <code>authorities</code> and
- * the <code>serverDescriptors</code> which are in the format
+ * and add the votes created by the given {@code authorities} and
+ * the {@code serverDescriptors} which are in the format
* "<published>,<relayid>,<descid>" to that list.
*/
public void haveParsedConsensus(String validAfter,
@@ -510,7 +510,7 @@ public class RelayDescriptorDownloader {
/**
* We have parsed a microdesc consensus. Take this microdesc consensus off the
- * missing list and add the <code>microdescriptors</code> which are in the
+ * missing list and add the {@code microdescriptors} which are in the
* format "<validafter>,<relayid>,<descid>" to that list.
*/
public void haveParsedMicrodescConsensus(String validAfter,
@@ -568,7 +568,7 @@ public class RelayDescriptorDownloader {
/**
* We have parsed a vote. Take this vote off the missing list and add
- * the <code>serverDescriptors</code> which are in the format
+ * the {@code serverDescriptors} which are in the format
* "<published>,<relayid>,<descid>" to that list.
*/
public void haveParsedVote(String validAfter, String fingerprint,
@@ -847,9 +847,9 @@ public class RelayDescriptorDownloader {
/**
* Attempts to download one or more descriptors identified by a resource
* string from a directory authority and passes the returned
- * descriptor(s) to the <code>RelayDescriptorParser</code> upon success.
+ * descriptor(s) to the {@code RelayDescriptorParser} upon success.
* Returns the number of descriptors contained in the reply. Throws an
- * <code>IOException</code> if something goes wrong while downloading.
+ * {@code IOException} if something goes wrong while downloading.
*/
private int downloadResourceFromAuthority(String authority,
String resource) throws IOException {
diff --git a/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java b/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
index bbd3103..f2d3e55 100644
--- a/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
@@ -58,7 +58,7 @@ public class SyncPersistence {
}
/**
- * Cleans the directory in <code>RecentPath</code> after storing descriptors.
+ * Cleans the directory in {@code RecentPath} after storing descriptors.
*/
public void cleanDirectory() {
try {
@@ -70,8 +70,8 @@ public class SyncPersistence {
/**
* Stores descriptors in main storage and recent.
- * The storage locations are taken from <code>collector.properties</code>'
- * options <code>OutputPath</code> and <code>RecentPath</code>.
+ * The storage locations are taken from {@code collector.properties}'
+ * options {@code OutputPath} and {@code RecentPath}.
*/
public void storeDescs(Iterable<Descriptor> descs, long received) {
for (Descriptor desc : descs) {
@@ -81,8 +81,8 @@ public class SyncPersistence {
/**
* Stores a descriptor in main storage and recent.
- * The storage locations are taken from <code>collector.properties</code>'
- * options <code>OutputPath</code> and <code>RecentPath</code>.
+ * The storage locations are taken from {@code collector.properties}'
+ * options {@code OutputPath} and {@code RecentPath}.
*/
public void storeDesc(Descriptor desc, long received) {
String filename = desc.getDescriptorFile().getName();
diff --git a/src/main/java/org/torproject/metrics/collector/sync/package-info.java b/src/main/java/org/torproject/metrics/collector/sync/package-info.java
index 401fb84..08a6cdd 100644
--- a/src/main/java/org/torproject/metrics/collector/sync/package-info.java
+++ b/src/main/java/org/torproject/metrics/collector/sync/package-info.java
@@ -4,10 +4,10 @@
package org.torproject.metrics.collector.sync;
/** This package coordinates syncing and merging the fetched data.
- * <p>The central class for this process is <code>SyncManager</code>, which
+ * <p>The central class for this process is {@code SyncManager}, which
* coordinates download from other instances and merging the new data
* to the local directories.</p>
* <p>Storing data to the file system is facilitated by
- * <code>SyncPersistence</code>.</p>
+ * {@code SyncPersistence}.</p>
*/
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
index 7096832..84f4f9e 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
@@ -52,7 +52,7 @@ import java.util.stream.Stream;
/**
* This module processes web-logs for CollecTor according to the weblog
- * sanitation specification published on metrics.torproject.org</p>
+ * sanitation specification published on metrics.torproject.org.
*/
public class SanitizeWeblogs extends CollecTorMain {
1
0

15 Oct '18
commit bb8c53a64dc6ac774f6ba24b8d75dc66ccbd844c
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Mon Aug 20 20:30:30 2018 +0200
Remove some more unthrown exceptions.
---
.../metrics/collector/bridgedescs/BridgeSnapshotReader.java | 5 +----
.../metrics/collector/bridgedescs/BridgeDescriptorParserTest.java | 6 +++---
2 files changed, 4 insertions(+), 7 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java
index abd30ab..fccee5e 100644
--- a/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java
+++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/BridgeSnapshotReader.java
@@ -3,8 +3,6 @@
package org.torproject.metrics.collector.bridgedescs;
-import org.torproject.metrics.collector.conf.ConfigurationException;
-
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
@@ -38,8 +36,7 @@ public class BridgeSnapshotReader {
* Reads the half-hourly snapshots of bridge descriptors from Bifroest.
*/
public BridgeSnapshotReader(BridgeDescriptorParser bdp,
- File bridgeDirectoriesDir, File statsDirectory)
- throws ConfigurationException {
+ File bridgeDirectoriesDir, File statsDirectory) {
if (bdp == null || bridgeDirectoriesDir == null
|| statsDirectory == null) {
diff --git a/src/test/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParserTest.java b/src/test/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParserTest.java
index 43fad5e..4ea2e97 100644
--- a/src/test/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParserTest.java
+++ b/src/test/java/org/torproject/metrics/collector/bridgedescs/BridgeDescriptorParserTest.java
@@ -15,7 +15,7 @@ public class BridgeDescriptorParserTest {
}
@Test(expected = NullPointerException.class)
- public void testNullData() throws Exception {
+ public void testNullData() {
BridgeDescriptorParser bdp = new BridgeDescriptorParser(
new SanitizedBridgesWriter(new Configuration()));
bdp.parse(null, "", "");
@@ -24,7 +24,7 @@ public class BridgeDescriptorParserTest {
@Test
/* Empty data is not passed down to the sanitized writer.
* This test passes when there is no exception. */
- public void testDataEmpty() throws Exception {
+ public void testDataEmpty() {
BridgeDescriptorParser bdp = new BridgeDescriptorParser(
new SanitizedBridgesWriter(new Configuration()));
bdp.parse(new byte[]{}, null, null);
@@ -34,7 +34,7 @@ public class BridgeDescriptorParserTest {
/* The SanitizedBridgesWriter wasn't initialized sufficiently.
* Actually that should be corrected in SanitizedBridgesWriter
* at some point, but that's a bigger rewrite. */
- public void testMinimalData() throws Exception {
+ public void testMinimalData() {
BridgeDescriptorParser bdp = new BridgeDescriptorParser(
new SanitizedBridgesWriter(new Configuration()));
bdp.parse(new byte[]{0}, "2010-10-10 10:10:10", null);
1
0
commit 1caca7c1f4786ef31207b42ed8298998c989487b
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Mon Aug 20 21:08:19 2018 +0200
Refactor ArchiveReader.
---
.../collector/relaydescs/ArchiveReader.java | 100 +++++++++++++--------
.../collector/relaydescs/ArchiveWriter.java | 3 +-
2 files changed, 66 insertions(+), 37 deletions(-)
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
index 74700f7..7c59054 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
@@ -46,30 +46,52 @@ public class ArchiveReader {
private Map<String, Set<String>> microdescriptorValidAfterTimes =
new HashMap<>();
- /** Reads all descriptors from the given directory, possibly using a
- * parse history file, and passes them to the given descriptor
- * parser. */
- public ArchiveReader(RelayDescriptorParser rdp, File archivesDirectory,
- File statsDirectory, boolean keepImportHistory) {
+ private RelayDescriptorParser rdp;
+
+ private File archivesDirectory;
+
+ private boolean keepImportHistory;
+ private int parsedFiles = 0;
+
+ private int ignoredFiles = 0;
+
+ private SortedSet<String> archivesImportHistory = new TreeSet<>();
+
+ private File archivesImportHistoryFile;
+
+ /** Initializes an archive reader but without reading any descriptors yet. */
+ ArchiveReader(RelayDescriptorParser rdp, File archivesDirectory,
+ File statsDirectory, boolean keepImportHistory) {
if (rdp == null || archivesDirectory == null
|| statsDirectory == null) {
throw new IllegalArgumentException();
}
-
- rdp.setArchiveReader(this);
- int parsedFiles = 0;
- int ignoredFiles = 0;
- SortedSet<String> archivesImportHistory = new TreeSet<>();
- File archivesImportHistoryFile = new File(statsDirectory,
+ this.rdp = rdp;
+ this.rdp.setArchiveReader(this);
+ this.archivesDirectory = archivesDirectory;
+ this.keepImportHistory = keepImportHistory;
+ this.archivesImportHistoryFile = new File(statsDirectory,
"archives-import-history");
- if (keepImportHistory && archivesImportHistoryFile.exists()) {
+ }
+
+ /** Reads all descriptors from the given directory, possibly using a
+ * parse history file, and passes them to the given descriptor
+ * parser. */
+ public void readDescriptors() {
+ this.readHistoryFile();
+ this.readDescriptorFiles();
+ this.writeHistoryFile();
+ }
+
+ private void readHistoryFile() {
+ if (this.keepImportHistory && this.archivesImportHistoryFile.exists()) {
try {
BufferedReader br = new BufferedReader(new FileReader(
- archivesImportHistoryFile));
+ this.archivesImportHistoryFile));
String line;
while ((line = br.readLine()) != null) {
- archivesImportHistory.add(line);
+ this.archivesImportHistory.add(line);
}
br.close();
} catch (IOException e) {
@@ -77,11 +99,14 @@ public class ArchiveReader {
+ "history file. Skipping.", e);
}
}
- if (archivesDirectory.exists()) {
- logger.debug("Importing files in directory " + archivesDirectory
+ }
+
+ private void readDescriptorFiles() {
+ if (this.archivesDirectory.exists()) {
+ logger.debug("Importing files in directory " + this.archivesDirectory
+ "/...");
Stack<File> filesInInputDir = new Stack<>();
- filesInInputDir.add(archivesDirectory);
+ filesInInputDir.add(this.archivesDirectory);
List<File> problems = new ArrayList<>();
Set<File> filesToRetry = new HashSet<>();
while (!filesInInputDir.isEmpty()) {
@@ -91,9 +116,9 @@ public class ArchiveReader {
} else {
try {
BufferedInputStream bis;
- if (keepImportHistory
- && archivesImportHistory.contains(pop.getName())) {
- ignoredFiles++;
+ if (this.keepImportHistory
+ && this.archivesImportHistory.contains(pop.getName())) {
+ this.ignoredFiles++;
continue;
} else if (pop.getName().endsWith(".tar.bz2")) {
logger.warn("Cannot parse compressed tarball "
@@ -116,15 +141,15 @@ public class ArchiveReader {
}
bis.close();
byte[] allData = baos.toByteArray();
- boolean stored = rdp.parse(allData);
+ boolean stored = this.rdp.parse(allData);
if (!stored) {
filesToRetry.add(pop);
continue;
}
- if (keepImportHistory) {
- archivesImportHistory.add(pop.getName());
+ if (this.keepImportHistory) {
+ this.archivesImportHistory.add(pop.getName());
}
- parsedFiles++;
+ this.parsedFiles++;
} catch (IOException e) {
problems.add(pop);
if (problems.size() > 3) {
@@ -219,10 +244,10 @@ public class ArchiveReader {
}
}
}
- if (keepImportHistory) {
- archivesImportHistory.add(pop.getName());
+ if (this.keepImportHistory) {
+ this.archivesImportHistory.add(pop.getName());
}
- parsedFiles++;
+ this.parsedFiles++;
} catch (IOException e) {
problems.add(pop);
if (problems.size() > 3) {
@@ -232,10 +257,10 @@ public class ArchiveReader {
}
if (problems.isEmpty()) {
logger.debug("Finished importing files in directory "
- + archivesDirectory + "/.");
+ + this.archivesDirectory + "/.");
} else {
StringBuilder sb = new StringBuilder("Failed importing files in "
- + "directory " + archivesDirectory + "/:");
+ + "directory " + this.archivesDirectory + "/:");
int printed = 0;
for (File f : problems) {
sb.append("\n ").append(f.getAbsolutePath());
@@ -246,12 +271,15 @@ public class ArchiveReader {
}
}
}
- if (keepImportHistory) {
+ }
+
+ private void writeHistoryFile() {
+ if (this.keepImportHistory) {
try {
- archivesImportHistoryFile.getParentFile().mkdirs();
+ this.archivesImportHistoryFile.getParentFile().mkdirs();
BufferedWriter bw = new BufferedWriter(new FileWriter(
- archivesImportHistoryFile));
- for (String line : archivesImportHistory) {
+ this.archivesImportHistoryFile));
+ for (String line : this.archivesImportHistory) {
bw.write(line + "\n");
}
bw.close();
@@ -261,15 +289,15 @@ public class ArchiveReader {
}
}
logger.info("Finished importing relay descriptors from local "
- + "directory:\nParsed " + parsedFiles + ", ignored "
- + ignoredFiles + " files.");
+ + "directory:\nParsed " + this.parsedFiles + ", ignored "
+ + this.ignoredFiles + " files.");
}
/** Stores the valid-after time and microdescriptor digests of a given
* microdesc consensus, so that microdescriptors (which don't contain a
* publication time) can later be sorted into the correct month
* folders. */
- public void haveParsedMicrodescConsensus(String validAfterTime,
+ void haveParsedMicrodescConsensus(String validAfterTime,
SortedSet<String> microdescriptorDigests) {
for (String microdescriptor : microdescriptorDigests) {
if (!this.microdescriptorValidAfterTimes.containsKey(
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
index ac3f5e3..8679439 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
@@ -169,7 +169,8 @@ public class ArchiveWriter extends CollecTorMain {
new ArchiveReader(rdp,
config.getPath(Key.RelayLocalOrigins).toFile(),
statsDirectory,
- config.getBool(Key.KeepDirectoryArchiveImportHistory));
+ config.getBool(Key.KeepDirectoryArchiveImportHistory))
+ .readDescriptors();
this.intermediateStats("importing relay descriptors from local "
+ "directory");
}
1
0