tor-commits
Threads by month
- ----- 2025 -----
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
April 2020
- 25 participants
- 2157 discussions
commit 77d9429797594113d2876ef5c3600d8fa37caf46
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Tue Mar 31 09:18:17 2020 +0200
Simplify logging configuration.
Implements #33549.
---
CHANGELOG.md | 3 +
src/build | 2 +-
.../org/torproject/metrics/collector/Main.java | 4 +-
.../metrics/collector/cron/ShutdownHook.java | 7 +-
.../persist/BandwidthFilePersistence.java | 7 +-
.../collector/persist/DescriptorPersistence.java | 6 -
.../collector/persist/PersistenceUtils.java | 8 +-
.../metrics/collector/persist/VotePersistence.java | 7 +-
.../metrics/collector/sync/SyncManager.java | 21 +--
.../metrics/collector/sync/SyncPersistence.java | 11 +-
.../metrics/collector/webstats/LogFileMap.java | 7 +-
.../metrics/collector/webstats/LogMetadata.java | 6 +-
.../collector/webstats/SanitizeWeblogs.java | 33 ++--
.../collector/webstats/WebServerAccessLogLine.java | 4 +-
src/main/resources/logback.xml | 167 ---------------------
15 files changed, 70 insertions(+), 223 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5606180..c284d47 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
# Changes in version 1.1?.? - 2020-0?-??
+ * Minor changes
+ - Simplify logging configuration.
+
# Changes in version 1.14.1 - 2020-01-16
diff --git a/src/build b/src/build
index 264e498..fd85646 160000
--- a/src/build
+++ b/src/build
@@ -1 +1 @@
-Subproject commit 264e498f54a20f7d299daaf2533d043f880e6a8b
+Subproject commit fd856466bcb260f53ef69a24c102d0e49d171cc3
diff --git a/src/main/java/org/torproject/metrics/collector/Main.java b/src/main/java/org/torproject/metrics/collector/Main.java
index 3822353..3e8ec33 100644
--- a/src/main/java/org/torproject/metrics/collector/Main.java
+++ b/src/main/java/org/torproject/metrics/collector/Main.java
@@ -39,7 +39,7 @@ import java.util.Map;
*/
public class Main {
- private static final Logger log = LoggerFactory.getLogger(Main.class);
+ private static final Logger logger = LoggerFactory.getLogger(Main.class);
public static final String CONF_FILE = "collector.properties";
@@ -116,7 +116,7 @@ public class Main {
+ ") and provide at least one data source and one data sink. "
+ "Refer to the manual for more information.");
} catch (IOException e) {
- log.error("Cannot write default configuration.", e);
+ logger.error("Cannot write default configuration.", e);
throw new RuntimeException(e);
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java b/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java
index ec34a19..7e0d0be 100644
--- a/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java
+++ b/src/main/java/org/torproject/metrics/collector/cron/ShutdownHook.java
@@ -11,7 +11,8 @@ import org.slf4j.LoggerFactory;
*/
public final class ShutdownHook extends Thread {
- private static final Logger log = LoggerFactory.getLogger(ShutdownHook.class);
+ private static final Logger logger
+ = LoggerFactory.getLogger(ShutdownHook.class);
private boolean stayAlive = true;
@@ -37,13 +38,13 @@ public final class ShutdownHook extends Thread {
@Override
public void run() {
- log.info("Shutdown in progress ... ");
+ logger.info("Shutdown in progress ... ");
Scheduler.getInstance().shutdownScheduler();
synchronized (this) {
this.stayAlive = false;
this.notify();
}
- log.info("Shutdown finished. Exiting.");
+ logger.info("Shutdown finished. Exiting.");
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java b/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java
index bbbfca5..8664ae8 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/BandwidthFilePersistence.java
@@ -7,6 +7,8 @@ import org.torproject.descriptor.BandwidthFile;
import org.torproject.metrics.collector.conf.Annotation;
import org.apache.commons.codec.digest.DigestUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.nio.file.Paths;
import java.time.LocalDateTime;
@@ -16,6 +18,9 @@ import java.time.format.DateTimeFormatter;
public class BandwidthFilePersistence
extends DescriptorPersistence<BandwidthFile> {
+ private static final Logger logger
+ = LoggerFactory.getLogger(BandwidthFilePersistence.class);
+
private static final String BANDWIDTH = "bandwidth";
private static final String BANDWIDTHS = "bandwidths";
@@ -57,7 +62,7 @@ public class BandwidthFilePersistence
System.arraycopy(bytes, start, forDigest, 0, forDigest.length);
digest = DigestUtils.sha256Hex(forDigest).toUpperCase();
} else {
- log.error("No digest calculation possible. Returning empty string.");
+ logger.error("No digest calculation possible. Returning empty string.");
}
return digest;
}
diff --git a/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java b/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
index 7c648ef..a2c9bc4 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/DescriptorPersistence.java
@@ -5,18 +5,12 @@ package org.torproject.metrics.collector.persist;
import org.torproject.descriptor.Descriptor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.List;
public abstract class DescriptorPersistence<T extends Descriptor> {
- protected static final Logger log = LoggerFactory.getLogger(
- DescriptorPersistence.class);
-
protected static final String BRIDGEDESCS = "bridge-descriptors";
protected static final String BRIDGEPOOLASSIGNMENTS
= "bridge-pool-assignments";
diff --git a/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java b/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
index 72ad73a..da1403c 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
@@ -23,7 +23,7 @@ import java.util.TimeZone;
public class PersistenceUtils {
- private static final Logger log = LoggerFactory.getLogger(
+ private static final Logger logger = LoggerFactory.getLogger(
PersistenceUtils.class);
public static final String TEMPFIX = ".tmp";
@@ -55,14 +55,14 @@ public class PersistenceUtils {
}
return createOrAppend(typeAnnotation, data, tmpPath, option);
} catch (FileAlreadyExistsException faee) {
- log.debug("Already have descriptor(s) of type '{}': {}. Skipping.",
+ logger.debug("Already have descriptor(s) of type '{}': {}. Skipping.",
new String(typeAnnotation), outputPath);
} catch (IOException | SecurityException
| UnsupportedOperationException e) {
- log.warn("Could not store descriptor(s) {} of type '{}'",
+ logger.warn("Could not store descriptor(s) {} of type '{}'",
outputPath, new String(typeAnnotation), e);
} catch (Throwable th) { // anything else
- log.warn("Problem storing descriptor(s) {} of type '{}'",
+ logger.warn("Problem storing descriptor(s) {} of type '{}'",
outputPath, new String(typeAnnotation), th);
}
return false;
diff --git a/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java b/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java
index 461ca40..5973795 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/VotePersistence.java
@@ -7,6 +7,8 @@ import org.torproject.descriptor.RelayNetworkStatusVote;
import org.torproject.metrics.collector.conf.Annotation;
import org.apache.commons.codec.digest.DigestUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
@@ -14,6 +16,9 @@ import java.nio.file.Paths;
public class VotePersistence
extends DescriptorPersistence<RelayNetworkStatusVote> {
+ private static final Logger logger
+ = LoggerFactory.getLogger(VotePersistence.class);
+
private static final String VOTE = "vote";
private static final String VOTES = "votes";
@@ -56,7 +61,7 @@ public class VotePersistence
System.arraycopy(bytes, start, forDigest, 0, sig - start);
digest = DigestUtils.sha1Hex(forDigest).toUpperCase();
} else {
- log.error("No digest calculation possible. Returning empty string.");
+ logger.error("No digest calculation possible. Returning empty string.");
}
return digest;
}
diff --git a/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java b/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java
index e42ae61..1fa1347 100644
--- a/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java
+++ b/src/main/java/org/torproject/metrics/collector/sync/SyncManager.java
@@ -25,7 +25,8 @@ import java.util.Set;
public class SyncManager {
- private static final Logger log = LoggerFactory.getLogger(SyncManager.class);
+ private static final Logger logger
+ = LoggerFactory.getLogger(SyncManager.class);
public static final String SYNCORIGINS = "SyncOrigins";
private Date collectionDate;
@@ -53,12 +54,12 @@ public class SyncManager {
File storage = new File(basePath.toFile(),
marker + "-" + source.getHost());
storage.mkdirs();
- log.info("Collecting {} from {} ...", marker, source.getHost());
+ logger.info("Collecting {} from {} ...", marker, source.getHost());
descriptorCollector.collectDescriptors(source.toString(),
dirs.toArray(new String[dirs.size()]), 0L, storage, true);
- log.info("Done collecting {} from {}.", marker, source.getHost());
+ logger.info("Done collecting {} from {}.", marker, source.getHost());
} catch (Throwable th) { // catch all
- log.warn("Cannot download {} from {}.", dirs, source, th);
+ logger.warn("Cannot download {} from {}.", dirs, source, th);
}
}
}
@@ -72,7 +73,7 @@ public class SyncManager {
= new ProcessCriterium(UnparseableDescriptor.class);
for (URL source : sources) {
File base = new File(basePath.toFile(), marker + "-" + source.getHost());
- log.info("Merging {} from {} into storage ...", marker,
+ logger.info("Merging {} from {} into storage ...", marker,
source.getHost());
for (Map.Entry<String, Class<? extends Descriptor>> entry
: mapPathDesc.entrySet()) {
@@ -86,21 +87,21 @@ public class SyncManager {
"sync-history-" + source.getHost() + "-" + marker + "-"
+ histFileEnding);
descriptorReader.setHistoryFile(historyFile);
- log.info("Reading {} of type {} ... ", marker, histFileEnding);
+ logger.info("Reading {} of type {} ... ", marker, histFileEnding);
Iterator<Descriptor> descriptors
= descriptorReader.readDescriptors(descFile).iterator();
- log.info("Done reading {} of type {}.", marker, histFileEnding);
+ logger.info("Done reading {} of type {}.", marker, histFileEnding);
Criterium<Descriptor> crit = new ProcessCriterium(entry.getValue());
while (descriptors.hasNext()) {
Descriptor desc = descriptors.next();
if (unparseable.applies(desc)) {
Exception ex
= ((UnparseableDescriptor)desc).getDescriptorParseException();
- log.warn("Parsing of {} caused Exception(s). Processing anyway.",
+ logger.warn("Parsing of {} caused Exception(s). Processing anyway.",
desc.getDescriptorFile(), ex);
}
if (!crit.applies(desc)) {
- log.warn("Not processing {} in {}.", desc.getClass().getName(),
+ logger.warn("Not processing {} in {}.", desc.getClass().getName(),
desc.getDescriptorFile());
continue;
}
@@ -110,7 +111,7 @@ public class SyncManager {
persist.cleanDirectory();
descriptorReader.saveHistoryFile(historyFile);
}
- log.info("Done merging {} from {}.", marker, source.getHost());
+ logger.info("Done merging {} from {}.", marker, source.getHost());
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java b/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
index f81e164..adffb93 100644
--- a/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
+++ b/src/main/java/org/torproject/metrics/collector/sync/SyncPersistence.java
@@ -48,7 +48,7 @@ import java.nio.file.Path;
/** Provides persistence for descriptors based on the descriptor type. */
public class SyncPersistence {
- private static final Logger log
+ private static final Logger logger
= LoggerFactory.getLogger(SyncPersistence.class);
private final Path recentPath;
@@ -72,7 +72,7 @@ public class SyncPersistence {
try {
PersistenceUtils.cleanDirectory(recentPath);
} catch (IOException ioe) {
- log.error("Cleaning of {} failed.", recentPath.toString(), ioe);
+ logger.error("Cleaning of {} failed.", recentPath.toString(), ioe);
}
}
@@ -126,7 +126,8 @@ public class SyncPersistence {
case "BridgeNetworkStatus": // need to infer authId from filename
String[] filenameParts = filename.split(DASH);
if (filenameParts.length < 3) {
- log.error("Invalid BridgeNetworkStatus; skipping: {}.", filename);
+ logger.error("Invalid BridgeNetworkStatus; skipping: {}.",
+ filename);
break;
}
descPersist = new StatusPersistence(
@@ -160,7 +161,7 @@ public class SyncPersistence {
descPersist = new BridgedbMetricsPersistence((BridgedbMetrics) desc);
break;
default:
- log.trace("Invalid descriptor type {} for sync-merge.",
+ logger.trace("Invalid descriptor type {} for sync-merge.",
clazz.getName());
continue;
}
@@ -171,7 +172,7 @@ public class SyncPersistence {
break;
}
if (!recognizedAndWritten) {
- log.error("Unknown descriptor type {} implementing {}.",
+ logger.error("Unknown descriptor type {} implementing {}.",
desc.getClass().getSimpleName(), desc.getClass().getInterfaces());
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java b/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java
index 5be6b50..fb39202 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/LogFileMap.java
@@ -22,7 +22,8 @@ import java.util.TreeMap;
public class LogFileMap
extends TreeMap<String, TreeMap<String, TreeMap<LocalDate, LogMetadata>>> {
- private static final Logger log = LoggerFactory.getLogger(LogFileMap.class);
+ private static final Logger logger
+ = LoggerFactory.getLogger(LogFileMap.class);
/**
* The map to keep track of the logfiles by virtual host,
@@ -54,13 +55,13 @@ public class LogFileMap
private FileVisitResult logIfError(Path path, IOException ex) {
if (null != ex) {
- log.warn("Cannot process '{}'.", path, ex);
+ logger.warn("Cannot process '{}'.", path, ex);
}
return FileVisitResult.CONTINUE;
}
});
} catch (IOException ex) {
- log.error("Cannot read directory '{}'.", startDir, ex);
+ logger.error("Cannot read directory '{}'.", startDir, ex);
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java b/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java
index d3bf8fb..2cac619 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/LogMetadata.java
@@ -17,7 +17,7 @@ import java.util.regex.Pattern;
public class LogMetadata {
- private static final Logger log
+ private static final Logger logger
= LoggerFactory.getLogger(LogMetadata.class);
/** The mandatory web server log descriptor file name pattern. */
@@ -67,7 +67,7 @@ public class LogMetadata {
= LocalDate.parse(mat.group(2), DateTimeFormatter.BASIC_ISO_DATE);
if (null == virtualHost || null == physicalHost || null == logDate
|| virtualHost.isEmpty() || physicalHost.isEmpty()) {
- log.debug("Non-matching file encountered: '{}/{}'.",
+ logger.debug("Non-matching file encountered: '{}/{}'.",
parentPath, file);
} else {
metadata = new LogMetadata(logPath, physicalHost, virtualHost,
@@ -77,7 +77,7 @@ public class LogMetadata {
}
} catch (Throwable ex) {
metadata = null;
- log.debug("Problem parsing path '{}'.", logPath, ex);
+ logger.debug("Problem parsing path '{}'.", logPath, ex);
}
return Optional.ofNullable(metadata);
}
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
index 6c8a495..670f686 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/SanitizeWeblogs.java
@@ -55,7 +55,7 @@ import java.util.stream.Stream;
*/
public class SanitizeWeblogs extends CollecTorMain {
- private static final Logger log =
+ private static final Logger logger =
LoggerFactory.getLogger(SanitizeWeblogs.class);
private static final int LIMIT = 2;
@@ -99,7 +99,7 @@ public class SanitizeWeblogs extends CollecTorMain {
Set<SourceType> sources = this.config.getSourceTypeSet(
Key.WebstatsSources);
if (sources.contains(SourceType.Local)) {
- log.info("Processing logs using batch value {}.", BATCH);
+ logger.info("Processing logs using batch value {}.", BATCH);
Map<LogMetadata, Set<LocalDate>> previouslyProcessedWebstats
= this.readProcessedWebstats();
Map<LogMetadata, Set<LocalDate>> newlyProcessedWebstats
@@ -112,7 +112,7 @@ public class SanitizeWeblogs extends CollecTorMain {
cutOffMillis);
}
} catch (Exception e) {
- log.error("Cannot sanitize web-logs: {}", e.getMessage(), e);
+ logger.error("Cannot sanitize web-logs: {}", e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -132,9 +132,10 @@ public class SanitizeWeblogs extends CollecTorMain {
}
}
} catch (IOException e) {
- log.error("Cannot read state file {}.", this.processedWebstatsFile, e);
+ logger.error("Cannot read state file {}.", this.processedWebstatsFile,
+ e);
}
- log.debug("Read state file containing {} log files.",
+ logger.debug("Read state file containing {} log files.",
processedWebstats.size());
}
return processedWebstats;
@@ -144,14 +145,14 @@ public class SanitizeWeblogs extends CollecTorMain {
Map<LogMetadata, Set<LocalDate>> previouslyProcessedWebstats) {
Map<LogMetadata, Set<LocalDate>> newlyProcessedWebstats = new HashMap<>();
LogFileMap fileMapIn = new LogFileMap(dir);
- log.info("Found log files for {} virtual hosts.", fileMapIn.size());
+ logger.info("Found log files for {} virtual hosts.", fileMapIn.size());
for (Map.Entry<String,TreeMap<String,TreeMap<LocalDate,LogMetadata>>>
virtualEntry : fileMapIn.entrySet()) {
String virtualHost = virtualEntry.getKey();
for (Map.Entry<String, TreeMap<LocalDate, LogMetadata>> physicalEntry
: virtualEntry.getValue().entrySet()) {
String physicalHost = physicalEntry.getKey();
- log.info("Processing logs for {} on {}.", virtualHost, physicalHost);
+ logger.info("Processing logs for {} on {}.", virtualHost, physicalHost);
/* Go through current input log files for given virtual and physical
* host, and either look up contained log dates from the last execution,
* or parse files to memory now. */
@@ -231,7 +232,7 @@ public class SanitizeWeblogs extends CollecTorMain {
.add(WebServerAccessLogImpl.MARKER)
.add(date.format(DateTimeFormatter.BASIC_ISO_DATE))
.toString() + "." + FileType.XZ.name().toLowerCase();
- log.debug("Storing {}.", name);
+ logger.debug("Storing {}.", name);
Map<String, Long> retainedLines = new TreeMap<>(lineCounts);
lineCounts.clear(); // not needed anymore
try {
@@ -239,13 +240,14 @@ public class SanitizeWeblogs extends CollecTorMain {
= new WebServerAccessLogPersistence(
new WebServerAccessLogImpl(toCompressedBytes(retainedLines),
new File(name), name));
- log.debug("Storing {}.", name);
+ logger.debug("Storing {}.", name);
walp.storeOut(this.outputDirectory.toString());
walp.storeRecent(this.recentDirectory.toString());
} catch (DescriptorParseException dpe) {
- log.error("Cannot store log desriptor {}.", name, dpe);
+ logger.error("Cannot store log desriptor {}.", name, dpe);
} catch (Throwable th) { // catch all else
- log.error("Serious problem. Cannot store log desriptor {}.", name, th);
+ logger.error("Serious problem. Cannot store log desriptor {}.", name,
+ th);
}
}
@@ -327,7 +329,7 @@ public class SanitizeWeblogs extends CollecTorMain {
private Map<LocalDate, Map<String, Long>>
sanitzedLineStream(LogMetadata metadata) {
- log.debug("Processing file {}.", metadata.path);
+ logger.debug("Processing file {}.", metadata.path);
try (BufferedReader br
= new BufferedReader(new InputStreamReader(
metadata.fileType.decompress(Files.newInputStream(metadata.path))))) {
@@ -365,7 +367,7 @@ public class SanitizeWeblogs extends CollecTorMain {
.collect(groupingByConcurrent(Map.Entry::getKey,
summingLong(Map.Entry::getValue))))));
} catch (Exception ex) {
- log.debug("Skipping log-file {}.", metadata.path, ex);
+ logger.debug("Skipping log-file {}.", metadata.path, ex);
}
return Collections.emptyMap();
}
@@ -385,9 +387,10 @@ public class SanitizeWeblogs extends CollecTorMain {
}
Files.write(this.processedWebstatsFile, lines);
} catch (IOException e) {
- log.error("Cannot write state file {}.", this.processedWebstatsFile, e);
+ logger.error("Cannot write state file {}.", this.processedWebstatsFile,
+ e);
}
- log.debug("Wrote state file containing {} log files.",
+ logger.debug("Wrote state file containing {} log files.",
newlyProcessedWebstats.size());
}
}
diff --git a/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java b/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java
index 816064a..d187cf2 100644
--- a/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java
+++ b/src/main/java/org/torproject/metrics/collector/webstats/WebServerAccessLogLine.java
@@ -23,7 +23,7 @@ import java.util.regex.Pattern;
public class WebServerAccessLogLine implements WebServerAccessLog.Line {
- private static final Logger log = LoggerFactory
+ private static final Logger logger = LoggerFactory
.getLogger(WebServerAccessLogLine.class);
private static final String DATE_PATTERN = "dd/MMM/yyyy";
@@ -151,7 +151,7 @@ public class WebServerAccessLogLine implements WebServerAccessLog.Line {
res.valid = true;
}
} catch (Throwable th) {
- log.debug("Unmatchable line: '{}'.", line, th);
+ logger.debug("Unmatchable line: '{}'.", line, th);
return new WebServerAccessLogLine();
}
return res;
diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml
deleted file mode 100644
index 6cb5831..0000000
--- a/src/main/resources/logback.xml
+++ /dev/null
@@ -1,167 +0,0 @@
-<configuration debug="false">
-
- <!-- a path and a prefix -->
- <property name="logfile-base" value="${LOGBASE}/collector-" />
-
- <!-- log file names -->
- <property name="fileall-logname" value="${logfile-base}all" />
- <property name="file-bridgedescs-logname" value="${logfile-base}bridgedescs" />
- <property name="file-exitlists-logname" value="${logfile-base}exitlists" />
- <property name="file-relaydescs-logname" value="${logfile-base}relaydescs" />
- <property name="file-torperf-logname" value="${logfile-base}torperf" />
- <property name="file-updateindex-logname" value="${logfile-base}updateindex" />
-
- <!-- date pattern -->
- <property name="utc-date-pattern" value="%date{ISO8601, UTC}" />
-
- <!-- appender section -->
- <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
-
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>WARN</level>
- </filter>
- </appender>
-
- <appender name="SHUTDOWN" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
- </appender>
-
- <appender name="FILEALL" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${fileall-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <!-- rollover daily -->
- <FileNamePattern>${fileall-logname}.%d{yyyy-MM-dd}.%i.log</FileNamePattern>
- <maxHistory>10</maxHistory>
- <timeBasedFileNamingAndTriggeringPolicy
- class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
- <!-- or whenever the file size reaches 1MB -->
- <maxFileSize>1MB</maxFileSize>
- </timeBasedFileNamingAndTriggeringPolicy>
- </rollingPolicy>
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>INFO</level>
- </filter>
- </appender>
-
- <appender name="FILEBRIDGEDESCS" class="ch.qos.logback.core.FileAppender">
- <file>${file-bridgedescs-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
-
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>INFO</level>
- </filter>
- </appender>
-
- <appender name="FILEEXITLISTS" class="ch.qos.logback.core.FileAppender">
- <file>${file-exitlists-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
-
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>INFO</level>
- </filter>
- </appender>
-
- <appender name="FILERELAYDESCS" class="ch.qos.logback.core.FileAppender">
- <file>${file-relaydescs-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
-
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>INFO</level>
- </filter>
- </appender>
-
- <appender name="FILETORPERF" class="ch.qos.logback.core.FileAppender">
- <file>${file-torperf-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
-
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>INFO</level>
- </filter>
- </appender>
-
- <appender name="FILEUPDATEINDEX" class="ch.qos.logback.core.FileAppender">
- <file>${file-updateindex-logname}.log</file>
- <encoder>
- <pattern>${utc-date-pattern} %level %logger{20}:%line %msg%n</pattern>
- </encoder>
-
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>INFO</level>
- </filter>
- </appender>
-
- <!-- logger section -->
- <logger name="org.torproject.collector.bridgedescs" >
- <appender-ref ref="FILEBRIDGEDESCS" />
- </logger>
-
- <logger name="org.torproject.collector.exitlists" >
- <appender-ref ref="FILEEXITLISTS" />
- </logger>
-
- <logger name="org.torproject.collector.relaydescs" >
- <appender-ref ref="FILERELAYDESCS" />
- </logger>
-
- <logger name="org.torproject.collector.torperf" >
- <appender-ref ref="FILETORPERF" />
- </logger>
-
- <logger name="org.torproject.collector.index" level="INFO" >
- <appender-ref ref="FILEUPDATEINDEX" />
- </logger>
-
- <logger name="org.torproject.collector.Main" >
- <appender-ref ref="FILEBRIDGEDESCS" />
- <appender-ref ref="FILEEXITLISTS" />
- <appender-ref ref="FILERELAYDESCS" />
- <appender-ref ref="FILETORPERF" />
- <appender-ref ref="FILEUPDATEINDEX" />
- </logger>
-
- <logger name="org.torproject.collector.conf" >
- <appender-ref ref="FILEBRIDGEDESCS" />
- <appender-ref ref="FILEEXITLISTS" />
- <appender-ref ref="FILERELAYDESCS" />
- <appender-ref ref="FILETORPERF" />
- <appender-ref ref="FILEUPDATEINDEX" />
- </logger>
-
- <logger name="org.torproject.collector.cron" >
- <appender-ref ref="FILEBRIDGEDESCS" />
- <appender-ref ref="FILEEXITLISTS" />
- <appender-ref ref="FILERELAYDESCS" />
- <appender-ref ref="FILETORPERF" />
- <appender-ref ref="FILEUPDATEINDEX" />
- </logger>
-
- <logger name="org.torproject" >
- <appender-ref ref="CONSOLE" />
- </logger>
-
- <logger name="org.torproject.collector.cron.ShutdownHook" >
- <appender-ref ref="SHUTDOWN" />
- </logger>
-
- <root level="ALL">
- <appender-ref ref="FILEALL" />
- </root>
-
-</configuration>
-
1
0
commit 145045478f3c7aa9286b1244e28a5ab4e5728460
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Jan 16 12:16:30 2020 +0100
Bump version to 1.14.1-dev.
---
CHANGELOG.md | 3 +++
build.xml | 2 +-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7161100..5606180 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,6 @@
+# Changes in version 1.1?.? - 2020-0?-??
+
+
# Changes in version 1.14.1 - 2020-01-16
* Medium changes
diff --git a/build.xml b/build.xml
index 2e9e597..748351e 100644
--- a/build.xml
+++ b/build.xml
@@ -9,7 +9,7 @@
<property name="javadoc-title" value="CollecTor API Documentation"/>
<property name="implementation-title" value="CollecTor" />
- <property name="release.version" value="1.14.1" />
+ <property name="release.version" value="1.14.1-dev" />
<property name="project-main-class" value="org.torproject.metrics.collector.Main" />
<property name="name" value="collector"/>
<property name="metricslibversion" value="2.10.0" />
1
0
30 Apr '20
commit a87ce0d02f17603dad109b78dce0d1dba4ef1d7d
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Mon Mar 9 12:17:39 2020 +0100
Extend descriptorCutOff by 6 hours.
Fixes #19828.
---
CHANGELOG.md | 4 ++++
.../metrics/collector/relaydescs/RelayDescriptorDownloader.java | 6 ++++--
2 files changed, 8 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fa9bb16..9bbedc4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,10 @@
- Simplify logging configuration.
- Set default locale `US` and default time zone `UTC` at the
beginning of the execution.
+ - Download missing server and extra-info descriptors that have been
+ published up to 30 hours ago: 24 hours for the maximum age of
+ descriptors to be referenced plus 6 hours for the time between
+ generating votes and processing a consensus.
# Changes in version 1.14.1 - 2020-01-16
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
index 7155caa..4e6e9f6 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
@@ -183,7 +183,9 @@ public class RelayDescriptorDownloader {
/**
* Cut-off time for missing server and extra-info descriptors, formatted
* "yyyy-MM-dd HH:mm:ss". This time is initialized as the current system
- * time minus 24 hours.
+ * time minus 30 hours (24 hours for the maximum age of descriptors to be
+ * referenced plus 6 hours for the time between generating votes and
+ * processing a consensus).
*/
private String descriptorCutOff;
@@ -329,7 +331,7 @@ public class RelayDescriptorDownloader {
long now = System.currentTimeMillis();
this.currentValidAfter = format.format((now / (60L * 60L * 1000L))
* (60L * 60L * 1000L));
- this.descriptorCutOff = format.format(now - 24L * 60L * 60L * 1000L);
+ this.descriptorCutOff = format.format(now - 30L * 60L * 60L * 1000L);
this.currentTimestamp = format.format(now);
this.downloadAllDescriptorsCutOff = format.format(now
- 23L * 60L * 60L * 1000L - 30L * 60L * 1000L);
1
0
[collector/release] Set default locale US and default time zone UTC.
by karsten@torproject.org 30 Apr '20
by karsten@torproject.org 30 Apr '20
30 Apr '20
commit 2b90d656d1b6c27f1e9f1d05a4cf747afec26353
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Wed Apr 1 12:45:07 2020 +0200
Set default locale US and default time zone UTC.
Part of #33655.
---
CHANGELOG.md | 2 ++
src/build | 2 +-
src/main/java/org/torproject/metrics/collector/Main.java | 4 ++++
.../collector/bridgedescs/SanitizedBridgesWriter.java | 4 ----
.../metrics/collector/exitlists/ExitListDownloader.java | 7 ++-----
.../metrics/collector/onionperf/OnionPerfDownloader.java | 2 --
.../metrics/collector/persist/PersistenceUtils.java | 3 ---
.../metrics/collector/relaydescs/ArchiveReader.java | 2 --
.../metrics/collector/relaydescs/ArchiveWriter.java | 14 --------------
.../collector/relaydescs/CachedRelayDescriptorReader.java | 2 --
.../metrics/collector/relaydescs/ReferenceChecker.java | 6 +-----
.../collector/relaydescs/RelayDescriptorDownloader.java | 3 ---
.../collector/relaydescs/RelayDescriptorParser.java | 3 ---
13 files changed, 10 insertions(+), 44 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c284d47..fa9bb16 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,8 @@
* Minor changes
- Simplify logging configuration.
+ - Set default locale `US` and default time zone `UTC` at the
+ beginning of the execution.
# Changes in version 1.14.1 - 2020-01-16
diff --git a/src/build b/src/build
index fd85646..b5e1a2d 160000
--- a/src/build
+++ b/src/build
@@ -1 +1 @@
-Subproject commit fd856466bcb260f53ef69a24c102d0e49d171cc3
+Subproject commit b5e1a2d7b29e58cc0645f068a1ebf4377bf9d8b8
diff --git a/src/main/java/org/torproject/metrics/collector/Main.java b/src/main/java/org/torproject/metrics/collector/Main.java
index 3e8ec33..9a04c6e 100644
--- a/src/main/java/org/torproject/metrics/collector/Main.java
+++ b/src/main/java/org/torproject/metrics/collector/Main.java
@@ -28,7 +28,9 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.HashMap;
+import java.util.Locale;
import java.util.Map;
+import java.util.TimeZone;
/**
* Main class for starting a CollecTor instance.
@@ -71,6 +73,8 @@ public class Main {
* See class description {@link Main}.
*/
public static void main(String[] args) {
+ Locale.setDefault(Locale.US);
+ TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
try {
Path confPath;
if (args == null || args.length == 0) {
diff --git a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
index 6aee057..c4f783a 100644
--- a/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
+++ b/src/main/java/org/torproject/metrics/collector/bridgedescs/SanitizedBridgesWriter.java
@@ -43,7 +43,6 @@ import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.Stack;
-import java.util.TimeZone;
import java.util.TreeMap;
/**
@@ -139,7 +138,6 @@ public class SanitizedBridgesWriter extends CollecTorMain {
config.getBool(Key.ReplaceIpAddressesWithHashes);
SimpleDateFormat rsyncCatFormat = new SimpleDateFormat(
"yyyy-MM-dd-HH-mm-ss");
- rsyncCatFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
this.rsyncCatString = rsyncCatFormat.format(
System.currentTimeMillis());
@@ -604,7 +602,6 @@ public class SanitizedBridgesWriter extends CollecTorMain {
* whether this status is possibly stale. */
SimpleDateFormat formatter = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
- formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
if (null == mostRecentDescPublished) {
logger.warn("The bridge network status published at {}"
+ " does not contain a single entry. Please ask the bridge "
@@ -1355,7 +1352,6 @@ public class SanitizedBridgesWriter extends CollecTorMain {
private void checkStaleDescriptors() {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
long tooOldMillis = System.currentTimeMillis() - 330L * 60L * 1000L;
try {
long maxNetworkStatusPublishedMillis =
diff --git a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
index 49e176b..c6b45da 100644
--- a/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/exitlists/ExitListDownloader.java
@@ -28,7 +28,6 @@ import java.util.Arrays;
import java.util.Date;
import java.util.SortedSet;
import java.util.Stack;
-import java.util.TimeZone;
import java.util.TreeSet;
public class ExitListDownloader extends CollecTorMain {
@@ -61,9 +60,6 @@ public class ExitListDownloader extends CollecTorMain {
@Override
protected void startProcessing() throws ConfigurationException {
- SimpleDateFormat dateTimeFormat =
- new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
outputPathName = Paths.get(config.getPath(Key.OutputPath).toString(),
EXITLISTS).toString();
recentPathName = Paths.get(config.getPath(Key.RecentPath).toString(),
@@ -73,6 +69,8 @@ public class ExitListDownloader extends CollecTorMain {
logger.debug("Downloading exit list...");
StringBuilder sb = new StringBuilder();
sb.append(Annotation.ExitList.toString());
+ SimpleDateFormat dateTimeFormat =
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
sb.append("Downloaded ").append(dateTimeFormat.format(downloadedDate))
.append("\n");
URL url = config.getUrl(Key.ExitlistUrl);
@@ -94,7 +92,6 @@ public class ExitListDownloader extends CollecTorMain {
SimpleDateFormat tarballFormat =
new SimpleDateFormat("yyyy/MM/dd/yyyy-MM-dd-HH-mm-ss");
- tarballFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(outputPathName,
tarballFormat.format(downloadedDate)).toFile();
diff --git a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
index dc1a675..b651620 100644
--- a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
@@ -35,7 +35,6 @@ import java.util.Arrays;
import java.util.List;
import java.util.SortedSet;
import java.util.Stack;
-import java.util.TimeZone;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -156,7 +155,6 @@ public class OnionPerfDownloader extends CollecTorMain {
static {
DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
DATE_FORMAT.setLenient(false);
- DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private void downloadAndParseOnionPerfTpfFile(URL baseUrl, String source,
diff --git a/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java b/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
index da1403c..c958aec 100644
--- a/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
+++ b/src/main/java/org/torproject/metrics/collector/persist/PersistenceUtils.java
@@ -19,7 +19,6 @@ import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.text.SimpleDateFormat;
import java.util.Date;
-import java.util.TimeZone;
public class PersistenceUtils {
@@ -118,7 +117,6 @@ public class PersistenceUtils {
/** Return all date-time parts as array. */
public static String[] dateTimeParts(Date dateTime) {
SimpleDateFormat printFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
- printFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
return printFormat.format(dateTime).split("-");
}
@@ -130,7 +128,6 @@ public class PersistenceUtils {
/** Return all date-time as string. */
public static String dateTime(Date dateTime) {
SimpleDateFormat printFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
- printFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
return printFormat.format(dateTime);
}
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
index 5a80058..83ef2ea 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveReader.java
@@ -31,7 +31,6 @@ import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.Stack;
-import java.util.TimeZone;
import java.util.TreeSet;
/**
@@ -201,7 +200,6 @@ public class ArchiveReader {
}
SimpleDateFormat parseFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- parseFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
String ascii = new String(allData, StandardCharsets.US_ASCII);
int start;
int end = -1;
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
index e0e1623..8addd5e 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ArchiveWriter.java
@@ -47,7 +47,6 @@ import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.Stack;
-import java.util.TimeZone;
import java.util.TreeMap;
import java.util.TreeSet;
@@ -146,7 +145,6 @@ public class ArchiveWriter extends CollecTorMain {
RELAY_DESCRIPTORS).toString();
SimpleDateFormat rsyncCatFormat = new SimpleDateFormat(
"yyyy-MM-dd-HH-mm-ss");
- rsyncCatFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
this.rsyncCatString = rsyncCatFormat.format(
System.currentTimeMillis());
this.descriptorParser =
@@ -225,7 +223,6 @@ public class ArchiveWriter extends CollecTorMain {
private void loadDescriptorDigests() {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
try {
if (this.storedServerDescriptorsFile.exists()) {
BufferedReader br = new BufferedReader(new FileReader(
@@ -339,7 +336,6 @@ public class ArchiveWriter extends CollecTorMain {
+ "descriptors:");
SimpleDateFormat dateTimeFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
Map<String, String> knownServerDescriptors = new HashMap<>();
for (Map<String, String> descriptors :
this.storedServerDescriptors.values()) {
@@ -513,7 +509,6 @@ public class ArchiveWriter extends CollecTorMain {
private void checkStaledescriptors() {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
long tooOldMillis = this.now - 330L * 60L * 1000L;
if (!this.storedConsensuses.isEmpty()
&& this.storedConsensuses.lastKey() < tooOldMillis) {
@@ -604,7 +599,6 @@ public class ArchiveWriter extends CollecTorMain {
private void saveDescriptorDigests() {
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
try {
if (!this.storedServerDescriptorsFile.getParentFile().exists()
&& !this.storedServerDescriptorsFile.getParentFile().mkdirs()) {
@@ -671,7 +665,6 @@ public class ArchiveWriter extends CollecTorMain {
SortedSet<String> serverDescriptorDigests) {
SimpleDateFormat printFormat = new SimpleDateFormat(
"yyyy/MM/dd/yyyy-MM-dd-HH-mm-ss");
- printFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(this.outputDirectory, "consensus",
printFormat.format(new Date(validAfter)) + "-consensus").toFile();
boolean tarballFileExistedBefore = tarballFile.exists();
@@ -694,10 +687,8 @@ public class ArchiveWriter extends CollecTorMain {
SortedSet<String> microdescriptorDigests) {
SimpleDateFormat yearMonthDirectoryFormat = new SimpleDateFormat(
"yyyy/MM");
- yearMonthDirectoryFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
SimpleDateFormat dayDirectoryFileFormat = new SimpleDateFormat(
"dd/yyyy-MM-dd-HH-mm-ss");
- dayDirectoryFileFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(this.outputDirectory, MICRODESC,
yearMonthDirectoryFormat.format(validAfter), CONSENSUS_MICRODESC,
dayDirectoryFileFormat.format(validAfter)
@@ -724,7 +715,6 @@ public class ArchiveWriter extends CollecTorMain {
SortedSet<String> serverDescriptorDigests) {
SimpleDateFormat printFormat = new SimpleDateFormat(
"yyyy/MM/dd/yyyy-MM-dd-HH-mm-ss");
- printFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(this.outputDirectory, "vote",
printFormat.format(new Date(validAfter)) + "-vote-"
+ fingerprint + "-" + digest).toFile();
@@ -778,7 +768,6 @@ public class ArchiveWriter extends CollecTorMain {
long published) {
SimpleDateFormat printFormat = new SimpleDateFormat(
"yyyy-MM-dd-HH-mm-ss");
- printFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(this.outputDirectory, "certs",
fingerprint + "-" + printFormat.format(new Date(published))).toFile();
File[] outputFiles = new File[] { tarballFile };
@@ -792,7 +781,6 @@ public class ArchiveWriter extends CollecTorMain {
public void storeServerDescriptor(byte[] data, String digest,
long published, String extraInfoDigest) {
SimpleDateFormat printFormat = new SimpleDateFormat("yyyy/MM/");
- printFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(this.outputDirectory,
"server-descriptor", printFormat.format(new Date(published)),
digest.substring(0, 1), digest.substring(1, 2), digest).toFile();
@@ -818,7 +806,6 @@ public class ArchiveWriter extends CollecTorMain {
public void storeExtraInfoDescriptor(byte[] data,
String extraInfoDigest, long published) {
SimpleDateFormat descriptorFormat = new SimpleDateFormat("yyyy/MM/");
- descriptorFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(this.outputDirectory, "extra-info",
descriptorFormat.format(new Date(published)),
extraInfoDigest.substring(0, 1),
@@ -850,7 +837,6 @@ public class ArchiveWriter extends CollecTorMain {
* called twice to store the same microdescriptor in two different
* valid-after months. */
SimpleDateFormat descriptorFormat = new SimpleDateFormat("yyyy/MM/");
- descriptorFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
File tarballFile = Paths.get(this.outputDirectory, MICRODESC,
descriptorFormat.format(validAfter), MICRO,
microdescriptorDigest.substring(0, 1),
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
index 21a3041..0c0ccab 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/CachedRelayDescriptorReader.java
@@ -26,7 +26,6 @@ import java.util.HashSet;
import java.util.Set;
import java.util.SortedSet;
import java.util.Stack;
-import java.util.TimeZone;
import java.util.TreeSet;
/**
@@ -139,7 +138,6 @@ public class CachedRelayDescriptorReader {
.append(line.substring("valid-after ".length()));
SimpleDateFormat dateTimeFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
if (dateTimeFormat.parse(line.substring("valid-after "
.length())).getTime() < System.currentTimeMillis()
- 6L * 60L * 60L * 1000L) {
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java b/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java
index 5ff77e7..4cc03eb 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/ReferenceChecker.java
@@ -28,10 +28,8 @@ import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.HashSet;
-import java.util.Locale;
import java.util.Set;
import java.util.SortedSet;
-import java.util.TimeZone;
import java.util.TreeSet;
public class ReferenceChecker {
@@ -56,10 +54,8 @@ public class ReferenceChecker {
private static DateFormat dateTimeFormat;
static {
- dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'",
- Locale.US);
+ dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
dateTimeFormat.setLenient(false);
- dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private static final long ONE_HOUR = 60L * 60L * 1000L;
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
index b0fcebb..7155caa 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorDownloader.java
@@ -29,7 +29,6 @@ import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
-import java.util.TimeZone;
import java.util.TreeMap;
import java.util.TreeSet;
@@ -327,7 +326,6 @@ public class RelayDescriptorDownloader {
* list and the list of authorities to download all server and
* extra-info descriptors from. */
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- format.setTimeZone(TimeZone.getTimeZone("UTC"));
long now = System.currentTimeMillis();
this.currentValidAfter = format.format((now / (60L * 60L * 1000L))
* (60L * 60L * 1000L));
@@ -924,7 +922,6 @@ public class RelayDescriptorDownloader {
* pretty badly. */
SimpleDateFormat parseFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- parseFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
String ascii = new String(allData, StandardCharsets.US_ASCII);
int start;
int end = -1;
diff --git a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java
index 3905458..53f205a 100644
--- a/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java
+++ b/src/main/java/org/torproject/metrics/collector/relaydescs/RelayDescriptorParser.java
@@ -21,7 +21,6 @@ import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeParseException;
import java.util.SortedSet;
-import java.util.TimeZone;
import java.util.TreeSet;
/**
@@ -61,7 +60,6 @@ public class RelayDescriptorParser {
this.aw = aw;
this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
public void setRelayDescriptorDownloader(
@@ -96,7 +94,6 @@ public class RelayDescriptorParser {
}
SimpleDateFormat parseFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- parseFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
if (line.startsWith("network-status-version 3")) {
String statusType = "consensus";
if (line.equals("network-status-version 3 microdesc")) {
1
0
30 Apr '20
commit 0f5536ed68c79be50a9b1e326356008f7ffaefff
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 30 17:29:35 2020 +0200
Archive OnionPerf analysis .json files.
Implements #34072.
---
CHANGELOG.md | 5 +
build.xml | 2 +-
.../collector/onionperf/OnionPerfDownloader.java | 218 +++++++++++++++++----
src/main/resources/collector.properties | 2 +-
src/main/resources/create-tarballs.sh | 7 +
5 files changed, 195 insertions(+), 39 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9bbedc4..a55a0fc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,10 @@
# Changes in version 1.1?.? - 2020-0?-??
+ * Medium changes
+ - Update to metrics-lib 2.12.1.
+ - Download OnionPerf analysis .json files in addition to .tpf
+ files.
+
* Minor changes
- Simplify logging configuration.
- Set default locale `US` and default time zone `UTC` at the
diff --git a/build.xml b/build.xml
index 748351e..a9988f5 100644
--- a/build.xml
+++ b/build.xml
@@ -12,7 +12,7 @@
<property name="release.version" value="1.14.1-dev" />
<property name="project-main-class" value="org.torproject.metrics.collector.Main" />
<property name="name" value="collector"/>
- <property name="metricslibversion" value="2.10.0" />
+ <property name="metricslibversion" value="2.12.1" />
<property name="jarincludes" value="collector.properties logback.xml" />
<patternset id="runtime" >
diff --git a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
index b651620..d22ac0b 100644
--- a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
@@ -13,6 +13,7 @@ import org.torproject.metrics.collector.conf.Key;
import org.torproject.metrics.collector.cron.CollecTorMain;
import org.torproject.metrics.collector.downloader.Downloader;
+import org.apache.commons.compress.utils.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -32,14 +33,16 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.SortedSet;
import java.util.Stack;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-/** Download download .tpf files from OnionPerf hosts. */
+/** Download OnionPerf files from OnionPerf hosts. */
public class OnionPerfDownloader extends CollecTorMain {
private static final Logger logger = LoggerFactory.getLogger(
@@ -47,6 +50,8 @@ public class OnionPerfDownloader extends CollecTorMain {
private static final String TORPERF = "torperf";
+ private static final String ONIONPERF = "onionperf";
+
/** Instantiate the OnionPerf module using the given configuration. */
public OnionPerfDownloader(Configuration config) {
super(config);
@@ -54,21 +59,25 @@ public class OnionPerfDownloader extends CollecTorMain {
}
/** File containing the download history, which is necessary, because
- * OnionPerf does not delete older .tpf files, but which enables us to do
- * so. */
+ * OnionPerf does not delete older files, but which enables us to do so. */
private File onionPerfDownloadedFile;
- /** Full URLs of .tpf files downloaded in the current or in past
- * executions. */
- private SortedSet<String> downloadedTpfFiles = new TreeSet<>();
+ /** Full URLs of files downloaded in the current or in past executions. */
+ private SortedSet<String> downloadedFiles = new TreeSet<>();
/** Base URLs of configured OnionPerf hosts. */
private URL[] onionPerfHosts = null;
- /** Directory for storing archived .tpf files. */
+ /** Relative URLs of available .tpf files by base URL. */
+ private Map<URL, List<String>> tpfFileUrls = new HashMap<>();
+
+ /** Relative URLs of available OnionPerf analysis files by base URL. */
+ private Map<URL, List<String>> onionPerfAnalysisFileUrls = new HashMap<>();
+
+ /** Directory for storing archived files. */
private File archiveDirectory = null;
- /** Directory for storing recent .tpf files. */
+ /** Directory for storing recent files. */
private File recentDirectory = null;
@Override
@@ -87,19 +96,17 @@ public class OnionPerfDownloader extends CollecTorMain {
new File(config.getPath(Key.StatsPath).toFile(),
"onionperf-downloaded");
this.onionPerfHosts = config.getUrlArray(Key.OnionPerfHosts);
- this.readDownloadedOnionPerfTpfFiles();
- this.archiveDirectory = new File(config.getPath(Key.OutputPath).toFile(),
- TORPERF);
- this.recentDirectory = new File(config.getPath(Key.RecentPath).toFile(),
- TORPERF);
+ this.readDownloadedOnionPerfFiles();
+ this.archiveDirectory = config.getPath(Key.OutputPath).toFile();
+ this.recentDirectory = config.getPath(Key.RecentPath).toFile();
for (URL baseUrl : this.onionPerfHosts) {
this.downloadFromOnionPerfHost(baseUrl);
}
- this.writeDownloadedOnionPerfTpfFiles();
+ this.writeDownloadedOnionPerfFiles();
this.cleanUpRsyncDirectory();
}
- private void readDownloadedOnionPerfTpfFiles() {
+ private void readDownloadedOnionPerfFiles() {
if (!this.onionPerfDownloadedFile.exists()) {
return;
}
@@ -107,47 +114,69 @@ public class OnionPerfDownloader extends CollecTorMain {
this.onionPerfDownloadedFile))) {
String line;
while ((line = br.readLine()) != null) {
- this.downloadedTpfFiles.add(line);
+ this.downloadedFiles.add(line);
}
} catch (IOException e) {
logger.info("Unable to read download history file '{}'. Ignoring "
- + "download history and downloading all available .tpf files.",
+ + "download history and downloading all available files.",
this.onionPerfDownloadedFile.getAbsolutePath());
- this.downloadedTpfFiles.clear();
+ this.downloadedFiles.clear();
}
}
private void downloadFromOnionPerfHost(URL baseUrl) {
logger.info("Downloading from OnionPerf host {}", baseUrl);
- List<String> tpfFileNames =
- this.downloadOnionPerfDirectoryListing(baseUrl);
+ this.downloadOnionPerfDirectoryListing(baseUrl);
String source = baseUrl.getHost().split("\\.")[0];
- for (String tpfFileName : tpfFileNames) {
- this.downloadAndParseOnionPerfTpfFile(baseUrl, source, tpfFileName);
+ if (this.tpfFileUrls.containsKey(baseUrl)) {
+ for (String tpfFileName : this.tpfFileUrls.get(baseUrl)) {
+ this.downloadAndParseOnionPerfTpfFile(baseUrl, source, tpfFileName);
+ }
+ }
+ if (this.onionPerfAnalysisFileUrls.containsKey(baseUrl)) {
+ for (String onionPerfAnalysisFileName
+ : this.onionPerfAnalysisFileUrls.get(baseUrl)) {
+ this.downloadAndParseOnionPerfAnalysisFile(baseUrl, source,
+ onionPerfAnalysisFileName);
+ }
}
}
- /** Pattern for links contained in directory listings. */
+ /** Patterns for links contained in directory listings. */
private static final Pattern TPF_FILE_URL_PATTERN =
Pattern.compile(".*<a href=\"([^\"]+\\.tpf)\">.*");
- private List<String> downloadOnionPerfDirectoryListing(URL baseUrl) {
- List<String> tpfFileUrls = new ArrayList<>();
+ private static final Pattern ONIONPERF_ANALYSIS_FILE_URL_PATTERN =
+ Pattern.compile(
+ ".*<a href=\"([0-9-]{10}\\.onionperf\\.analysis\\.json\\.xz)\">.*");
+
+ private void downloadOnionPerfDirectoryListing(URL baseUrl) {
try (BufferedReader br = new BufferedReader(new InputStreamReader(
baseUrl.openStream()))) {
String line;
while ((line = br.readLine()) != null) {
- Matcher matcher = TPF_FILE_URL_PATTERN.matcher(line);
- if (matcher.matches() && !matcher.group(1).startsWith("/")) {
- tpfFileUrls.add(matcher.group(1));
+ Matcher tpfFileMatcher = TPF_FILE_URL_PATTERN.matcher(line);
+ if (tpfFileMatcher.matches()
+ && !tpfFileMatcher.group(1).startsWith("/")) {
+ this.tpfFileUrls.putIfAbsent(baseUrl, new ArrayList<>());
+ this.tpfFileUrls.get(baseUrl).add(tpfFileMatcher.group(1));
+ }
+ Matcher onionPerfAnalysisFileMatcher
+ = ONIONPERF_ANALYSIS_FILE_URL_PATTERN.matcher(line);
+ if (onionPerfAnalysisFileMatcher.matches()
+ && !onionPerfAnalysisFileMatcher.group(1).startsWith("/")) {
+ this.onionPerfAnalysisFileUrls.putIfAbsent(baseUrl,
+ new ArrayList<>());
+ this.onionPerfAnalysisFileUrls.get(baseUrl)
+ .add(onionPerfAnalysisFileMatcher.group(1));
}
}
} catch (IOException e) {
logger.warn("Unable to download directory listing from '{}'. Skipping "
+ "this OnionPerf host.", baseUrl);
- tpfFileUrls.clear();
+ this.tpfFileUrls.remove(baseUrl);
+ this.onionPerfAnalysisFileUrls.remove(baseUrl);
}
- return tpfFileUrls;
}
private static final DateFormat DATE_FORMAT;
@@ -169,7 +198,7 @@ public class OnionPerfDownloader extends CollecTorMain {
}
/* Skip if we successfully downloaded this file before. */
- if (this.downloadedTpfFiles.contains(tpfFileUrl.toString())) {
+ if (this.downloadedFiles.contains(tpfFileUrl.toString())) {
return;
}
@@ -197,7 +226,8 @@ public class OnionPerfDownloader extends CollecTorMain {
}
/* Download file contents to temporary file. */
- File tempFile = new File(this.recentDirectory, "." + tpfFileName);
+ File tempFile = new File(this.recentDirectory,
+ TORPERF + "/." + tpfFileName);
byte[] downloadedBytes;
try {
downloadedBytes = Downloader.downloadFromHttpServer(
@@ -263,7 +293,7 @@ public class OnionPerfDownloader extends CollecTorMain {
/* Copy/move files in place. */
File archiveFile = new File(this.archiveDirectory,
- date.replaceAll("-", "/") + "/" + tpfFileName);
+ TORPERF + "/" + date.replaceAll("-", "/") + "/" + tpfFileName);
archiveFile.getParentFile().mkdirs();
try {
Files.copy(tempFile.toPath(), archiveFile.toPath(),
@@ -274,18 +304,132 @@ public class OnionPerfDownloader extends CollecTorMain {
tempFile.delete();
return;
}
- File recentFile = new File(this.recentDirectory, tpfFileName);
+ File recentFile = new File(this.recentDirectory,
+ TORPERF + "/" + tpfFileName);
+ tempFile.renameTo(recentFile);
+
+ /* Add to download history to avoid downloading it again. */
+ this.downloadedFiles.add(baseUrl + tpfFileName);
+ }
+
+
+ private void downloadAndParseOnionPerfAnalysisFile(URL baseUrl, String source,
+ String onionPerfAnalysisFileName) {
+ URL onionPerfAnalysisFileUrl;
+ try {
+ onionPerfAnalysisFileUrl = new URL(baseUrl, onionPerfAnalysisFileName);
+ } catch (MalformedURLException e1) {
+ logger.warn("Unable to put together base URL '{}' and file path '{}' to "
+ + "a URL. Skipping.", baseUrl, onionPerfAnalysisFileName);
+ return;
+ }
+
+ /* Skip if we successfully downloaded this file before. */
+ if (this.downloadedFiles.contains(onionPerfAnalysisFileUrl.toString())) {
+ return;
+ }
+
+ /* Parse date from file name: yyyy-MM-dd.onionperf.analysis.json.xz */
+ String date;
+ try {
+ date = onionPerfAnalysisFileName.substring(0, 10);
+ DATE_FORMAT.parse(date);
+ } catch (NumberFormatException | ParseException e) {
+ logger.warn("Invalid file name '{}{}'. Skipping.", baseUrl,
+ onionPerfAnalysisFileName, e);
+ return;
+ }
+
+ /* Download file contents to temporary file. */
+ File tempFile = new File(this.recentDirectory,
+ ONIONPERF + "/." + onionPerfAnalysisFileName);
+ byte[] downloadedBytes;
+ try {
+ downloadedBytes = Downloader.downloadFromHttpServer(
+ new URL(baseUrl + onionPerfAnalysisFileName));
+ } catch (IOException e) {
+ logger.warn("Unable to download '{}{}'. Skipping.", baseUrl,
+ onionPerfAnalysisFileName, e);
+ return;
+ }
+ if (null == downloadedBytes) {
+ logger.warn("Unable to download '{}{}'. Skipping.", baseUrl,
+ onionPerfAnalysisFileName);
+ return;
+ }
+ tempFile.getParentFile().mkdirs();
+ try {
+ Files.write(tempFile.toPath(), downloadedBytes);
+ } catch (IOException e) {
+ logger.warn("Unable to write previously downloaded '{}{}' to temporary "
+ + "file '{}'. Skipping.", baseUrl, onionPerfAnalysisFileName,
+ tempFile, e);
+ return;
+ }
+
+ /* Validate contained descriptors. */
+ DescriptorParser descriptorParser =
+ DescriptorSourceFactory.createDescriptorParser();
+ byte[] rawDescriptorBytes;
+ try {
+ rawDescriptorBytes = IOUtils.toByteArray(
+ Files.newInputStream(tempFile.toPath()));
+ } catch (IOException e) {
+ logger.warn("OnionPerf file '{}{}' could not be read. Skipping.", baseUrl,
+ onionPerfAnalysisFileName, e);
+ tempFile.delete();
+ return;
+ }
+ Iterable<Descriptor> descriptors = descriptorParser.parseDescriptors(
+ rawDescriptorBytes, null, onionPerfAnalysisFileName);
+ String message = null;
+ for (Descriptor descriptor : descriptors) {
+ if (!(descriptor instanceof TorperfResult)) {
+ message = "File contains descriptors other than an OnionPerf analysis "
+ + "document: " + descriptor.getClass();
+ break;
+ }
+ TorperfResult torperf = (TorperfResult) descriptor;
+ if (!source.equals(torperf.getSource())) {
+ message = "File contains transfer from another source: "
+ + torperf.getSource();
+ break;
+ }
+ }
+ if (null != message) {
+ logger.warn("OnionPerf file '{}{}' was found to be invalid: {}. "
+ + "Skipping.", baseUrl, onionPerfAnalysisFileName, message);
+ tempFile.delete();
+ return;
+ }
+
+ /* Copy/move files in place. */
+ File archiveFile = new File(this.archiveDirectory,
+ ONIONPERF + "/" + date.replaceAll("-", "/") + "/" + date + "." + source
+ + ".onionperf.analysis.json.xz");
+ archiveFile.getParentFile().mkdirs();
+ try {
+ Files.copy(tempFile.toPath(), archiveFile.toPath(),
+ StandardCopyOption.REPLACE_EXISTING);
+ } catch (IOException e) {
+ logger.warn("Unable to copy OnionPerf file {} to {}. Skipping.",
+ tempFile, archiveFile, e);
+ tempFile.delete();
+ return;
+ }
+ File recentFile = new File(this.recentDirectory,
+ ONIONPERF + "/" + date + "." + source + ".onionperf.analysis.json.xz");
tempFile.renameTo(recentFile);
/* Add to download history to avoid downloading it again. */
- this.downloadedTpfFiles.add(baseUrl + tpfFileName);
+ this.downloadedFiles.add(baseUrl + onionPerfAnalysisFileName);
}
- private void writeDownloadedOnionPerfTpfFiles() {
+ private void writeDownloadedOnionPerfFiles() {
this.onionPerfDownloadedFile.getParentFile().mkdirs();
try (BufferedWriter bw = new BufferedWriter(new FileWriter(
this.onionPerfDownloadedFile))) {
- for (String line : this.downloadedTpfFiles) {
+ for (String line : this.downloadedFiles) {
bw.write(line);
bw.newLine();
}
diff --git a/src/main/resources/collector.properties b/src/main/resources/collector.properties
index 61baed5..2347021 100644
--- a/src/main/resources/collector.properties
+++ b/src/main/resources/collector.properties
@@ -175,7 +175,7 @@ ExitlistUrl = https://check.torproject.org/exit-addresses
######## OnionPerf downloader ########
#
## Define descriptor sources
-# possible values: Remote,Sync
+# possible values: Remote,Sync (.tpf files only!)
OnionPerfSources = Remote
# Retrieve files from the following CollecTor instances.
# List of URLs separated by comma.
diff --git a/src/main/resources/create-tarballs.sh b/src/main/resources/create-tarballs.sh
index 07952c7..fcac2f3 100755
--- a/src/main/resources/create-tarballs.sh
+++ b/src/main/resources/create-tarballs.sh
@@ -40,6 +40,8 @@ TARBALLS=(
exit-list-$YEARTWO-$MONTHTWO
torperf-$YEARONE-$MONTHONE
torperf-$YEARTWO-$MONTHTWO
+ onionperf-$YEARONE-$MONTHONE
+ onionperf-$YEARTWO-$MONTHTWO
certs
microdescs-$YEARONE-$MONTHONE
microdescs-$YEARTWO-$MONTHTWO
@@ -73,6 +75,8 @@ DIRECTORIES=(
$OUTDIR/exit-lists/$YEARTWO/$MONTHTWO/
$OUTDIR/torperf/$YEARONE/$MONTHONE/
$OUTDIR/torperf/$YEARTWO/$MONTHTWO/
+ $OUTDIR/onionperf/$YEARONE/$MONTHONE/
+ $OUTDIR/onionperf/$YEARTWO/$MONTHTWO/
$OUTDIR/relay-descriptors/certs/
$OUTDIR/relay-descriptors/microdesc/$YEARONE/$MONTHONE
$OUTDIR/relay-descriptors/microdesc/$YEARTWO/$MONTHTWO
@@ -178,6 +182,9 @@ ln -f -s -t $ARCHIVEDIR/relay-descriptors/bandwidths/ $TARBALLTARGETDIR/bandwidt
mkdir -p $ARCHIVEDIR/torperf/
ln -f -s -t $ARCHIVEDIR/torperf/ $TARBALLTARGETDIR/torperf-20??-??.tar.xz
+mkdir -p $ARCHIVEDIR/onionperf/
+ln -f -s -t $ARCHIVEDIR/onionperf/ $TARBALLTARGETDIR/onionperf-20??-??.tar.xz
+
mkdir -p $ARCHIVEDIR/webstats/
ln -f -s -t $ARCHIVEDIR/webstats/ $TARBALLTARGETDIR/webstats-20??-??.tar
1
0
commit 2a0c40f54b51bd734f8db1be1c1026bac9b661cd
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 30 17:42:00 2020 +0200
Prepare for 1.15.0 release.
---
CHANGELOG.md | 2 +-
build.xml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a55a0fc..8e58e7c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,4 @@
-# Changes in version 1.1?.? - 2020-0?-??
+# Changes in version 1.15.0 - 2020-04-30
* Medium changes
- Update to metrics-lib 2.12.1.
diff --git a/build.xml b/build.xml
index a9988f5..1e22fef 100644
--- a/build.xml
+++ b/build.xml
@@ -9,7 +9,7 @@
<property name="javadoc-title" value="CollecTor API Documentation"/>
<property name="implementation-title" value="CollecTor" />
- <property name="release.version" value="1.14.1-dev" />
+ <property name="release.version" value="1.15.0" />
<property name="project-main-class" value="org.torproject.metrics.collector.Main" />
<property name="name" value="collector"/>
<property name="metricslibversion" value="2.12.1" />
1
0
commit 2a0c40f54b51bd734f8db1be1c1026bac9b661cd
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 30 17:42:00 2020 +0200
Prepare for 1.15.0 release.
---
CHANGELOG.md | 2 +-
build.xml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a55a0fc..8e58e7c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,4 @@
-# Changes in version 1.1?.? - 2020-0?-??
+# Changes in version 1.15.0 - 2020-04-30
* Medium changes
- Update to metrics-lib 2.12.1.
diff --git a/build.xml b/build.xml
index a9988f5..1e22fef 100644
--- a/build.xml
+++ b/build.xml
@@ -9,7 +9,7 @@
<property name="javadoc-title" value="CollecTor API Documentation"/>
<property name="implementation-title" value="CollecTor" />
- <property name="release.version" value="1.14.1-dev" />
+ <property name="release.version" value="1.15.0" />
<property name="project-main-class" value="org.torproject.metrics.collector.Main" />
<property name="name" value="collector"/>
<property name="metricslibversion" value="2.12.1" />
1
0
30 Apr '20
commit 0f5536ed68c79be50a9b1e326356008f7ffaefff
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 30 17:29:35 2020 +0200
Archive OnionPerf analysis .json files.
Implements #34072.
---
CHANGELOG.md | 5 +
build.xml | 2 +-
.../collector/onionperf/OnionPerfDownloader.java | 218 +++++++++++++++++----
src/main/resources/collector.properties | 2 +-
src/main/resources/create-tarballs.sh | 7 +
5 files changed, 195 insertions(+), 39 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9bbedc4..a55a0fc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,10 @@
# Changes in version 1.1?.? - 2020-0?-??
+ * Medium changes
+ - Update to metrics-lib 2.12.1.
+ - Download OnionPerf analysis .json files in addition to .tpf
+ files.
+
* Minor changes
- Simplify logging configuration.
- Set default locale `US` and default time zone `UTC` at the
diff --git a/build.xml b/build.xml
index 748351e..a9988f5 100644
--- a/build.xml
+++ b/build.xml
@@ -12,7 +12,7 @@
<property name="release.version" value="1.14.1-dev" />
<property name="project-main-class" value="org.torproject.metrics.collector.Main" />
<property name="name" value="collector"/>
- <property name="metricslibversion" value="2.10.0" />
+ <property name="metricslibversion" value="2.12.1" />
<property name="jarincludes" value="collector.properties logback.xml" />
<patternset id="runtime" >
diff --git a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
index b651620..d22ac0b 100644
--- a/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
+++ b/src/main/java/org/torproject/metrics/collector/onionperf/OnionPerfDownloader.java
@@ -13,6 +13,7 @@ import org.torproject.metrics.collector.conf.Key;
import org.torproject.metrics.collector.cron.CollecTorMain;
import org.torproject.metrics.collector.downloader.Downloader;
+import org.apache.commons.compress.utils.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -32,14 +33,16 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.SortedSet;
import java.util.Stack;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-/** Download download .tpf files from OnionPerf hosts. */
+/** Download OnionPerf files from OnionPerf hosts. */
public class OnionPerfDownloader extends CollecTorMain {
private static final Logger logger = LoggerFactory.getLogger(
@@ -47,6 +50,8 @@ public class OnionPerfDownloader extends CollecTorMain {
private static final String TORPERF = "torperf";
+ private static final String ONIONPERF = "onionperf";
+
/** Instantiate the OnionPerf module using the given configuration. */
public OnionPerfDownloader(Configuration config) {
super(config);
@@ -54,21 +59,25 @@ public class OnionPerfDownloader extends CollecTorMain {
}
/** File containing the download history, which is necessary, because
- * OnionPerf does not delete older .tpf files, but which enables us to do
- * so. */
+ * OnionPerf does not delete older files, but which enables us to do so. */
private File onionPerfDownloadedFile;
- /** Full URLs of .tpf files downloaded in the current or in past
- * executions. */
- private SortedSet<String> downloadedTpfFiles = new TreeSet<>();
+ /** Full URLs of files downloaded in the current or in past executions. */
+ private SortedSet<String> downloadedFiles = new TreeSet<>();
/** Base URLs of configured OnionPerf hosts. */
private URL[] onionPerfHosts = null;
- /** Directory for storing archived .tpf files. */
+ /** Relative URLs of available .tpf files by base URL. */
+ private Map<URL, List<String>> tpfFileUrls = new HashMap<>();
+
+ /** Relative URLs of available OnionPerf analysis files by base URL. */
+ private Map<URL, List<String>> onionPerfAnalysisFileUrls = new HashMap<>();
+
+ /** Directory for storing archived files. */
private File archiveDirectory = null;
- /** Directory for storing recent .tpf files. */
+ /** Directory for storing recent files. */
private File recentDirectory = null;
@Override
@@ -87,19 +96,17 @@ public class OnionPerfDownloader extends CollecTorMain {
new File(config.getPath(Key.StatsPath).toFile(),
"onionperf-downloaded");
this.onionPerfHosts = config.getUrlArray(Key.OnionPerfHosts);
- this.readDownloadedOnionPerfTpfFiles();
- this.archiveDirectory = new File(config.getPath(Key.OutputPath).toFile(),
- TORPERF);
- this.recentDirectory = new File(config.getPath(Key.RecentPath).toFile(),
- TORPERF);
+ this.readDownloadedOnionPerfFiles();
+ this.archiveDirectory = config.getPath(Key.OutputPath).toFile();
+ this.recentDirectory = config.getPath(Key.RecentPath).toFile();
for (URL baseUrl : this.onionPerfHosts) {
this.downloadFromOnionPerfHost(baseUrl);
}
- this.writeDownloadedOnionPerfTpfFiles();
+ this.writeDownloadedOnionPerfFiles();
this.cleanUpRsyncDirectory();
}
- private void readDownloadedOnionPerfTpfFiles() {
+ private void readDownloadedOnionPerfFiles() {
if (!this.onionPerfDownloadedFile.exists()) {
return;
}
@@ -107,47 +114,69 @@ public class OnionPerfDownloader extends CollecTorMain {
this.onionPerfDownloadedFile))) {
String line;
while ((line = br.readLine()) != null) {
- this.downloadedTpfFiles.add(line);
+ this.downloadedFiles.add(line);
}
} catch (IOException e) {
logger.info("Unable to read download history file '{}'. Ignoring "
- + "download history and downloading all available .tpf files.",
+ + "download history and downloading all available files.",
this.onionPerfDownloadedFile.getAbsolutePath());
- this.downloadedTpfFiles.clear();
+ this.downloadedFiles.clear();
}
}
private void downloadFromOnionPerfHost(URL baseUrl) {
logger.info("Downloading from OnionPerf host {}", baseUrl);
- List<String> tpfFileNames =
- this.downloadOnionPerfDirectoryListing(baseUrl);
+ this.downloadOnionPerfDirectoryListing(baseUrl);
String source = baseUrl.getHost().split("\\.")[0];
- for (String tpfFileName : tpfFileNames) {
- this.downloadAndParseOnionPerfTpfFile(baseUrl, source, tpfFileName);
+ if (this.tpfFileUrls.containsKey(baseUrl)) {
+ for (String tpfFileName : this.tpfFileUrls.get(baseUrl)) {
+ this.downloadAndParseOnionPerfTpfFile(baseUrl, source, tpfFileName);
+ }
+ }
+ if (this.onionPerfAnalysisFileUrls.containsKey(baseUrl)) {
+ for (String onionPerfAnalysisFileName
+ : this.onionPerfAnalysisFileUrls.get(baseUrl)) {
+ this.downloadAndParseOnionPerfAnalysisFile(baseUrl, source,
+ onionPerfAnalysisFileName);
+ }
}
}
- /** Pattern for links contained in directory listings. */
+ /** Patterns for links contained in directory listings. */
private static final Pattern TPF_FILE_URL_PATTERN =
Pattern.compile(".*<a href=\"([^\"]+\\.tpf)\">.*");
- private List<String> downloadOnionPerfDirectoryListing(URL baseUrl) {
- List<String> tpfFileUrls = new ArrayList<>();
+ private static final Pattern ONIONPERF_ANALYSIS_FILE_URL_PATTERN =
+ Pattern.compile(
+ ".*<a href=\"([0-9-]{10}\\.onionperf\\.analysis\\.json\\.xz)\">.*");
+
+ private void downloadOnionPerfDirectoryListing(URL baseUrl) {
try (BufferedReader br = new BufferedReader(new InputStreamReader(
baseUrl.openStream()))) {
String line;
while ((line = br.readLine()) != null) {
- Matcher matcher = TPF_FILE_URL_PATTERN.matcher(line);
- if (matcher.matches() && !matcher.group(1).startsWith("/")) {
- tpfFileUrls.add(matcher.group(1));
+ Matcher tpfFileMatcher = TPF_FILE_URL_PATTERN.matcher(line);
+ if (tpfFileMatcher.matches()
+ && !tpfFileMatcher.group(1).startsWith("/")) {
+ this.tpfFileUrls.putIfAbsent(baseUrl, new ArrayList<>());
+ this.tpfFileUrls.get(baseUrl).add(tpfFileMatcher.group(1));
+ }
+ Matcher onionPerfAnalysisFileMatcher
+ = ONIONPERF_ANALYSIS_FILE_URL_PATTERN.matcher(line);
+ if (onionPerfAnalysisFileMatcher.matches()
+ && !onionPerfAnalysisFileMatcher.group(1).startsWith("/")) {
+ this.onionPerfAnalysisFileUrls.putIfAbsent(baseUrl,
+ new ArrayList<>());
+ this.onionPerfAnalysisFileUrls.get(baseUrl)
+ .add(onionPerfAnalysisFileMatcher.group(1));
}
}
} catch (IOException e) {
logger.warn("Unable to download directory listing from '{}'. Skipping "
+ "this OnionPerf host.", baseUrl);
- tpfFileUrls.clear();
+ this.tpfFileUrls.remove(baseUrl);
+ this.onionPerfAnalysisFileUrls.remove(baseUrl);
}
- return tpfFileUrls;
}
private static final DateFormat DATE_FORMAT;
@@ -169,7 +198,7 @@ public class OnionPerfDownloader extends CollecTorMain {
}
/* Skip if we successfully downloaded this file before. */
- if (this.downloadedTpfFiles.contains(tpfFileUrl.toString())) {
+ if (this.downloadedFiles.contains(tpfFileUrl.toString())) {
return;
}
@@ -197,7 +226,8 @@ public class OnionPerfDownloader extends CollecTorMain {
}
/* Download file contents to temporary file. */
- File tempFile = new File(this.recentDirectory, "." + tpfFileName);
+ File tempFile = new File(this.recentDirectory,
+ TORPERF + "/." + tpfFileName);
byte[] downloadedBytes;
try {
downloadedBytes = Downloader.downloadFromHttpServer(
@@ -263,7 +293,7 @@ public class OnionPerfDownloader extends CollecTorMain {
/* Copy/move files in place. */
File archiveFile = new File(this.archiveDirectory,
- date.replaceAll("-", "/") + "/" + tpfFileName);
+ TORPERF + "/" + date.replaceAll("-", "/") + "/" + tpfFileName);
archiveFile.getParentFile().mkdirs();
try {
Files.copy(tempFile.toPath(), archiveFile.toPath(),
@@ -274,18 +304,132 @@ public class OnionPerfDownloader extends CollecTorMain {
tempFile.delete();
return;
}
- File recentFile = new File(this.recentDirectory, tpfFileName);
+ File recentFile = new File(this.recentDirectory,
+ TORPERF + "/" + tpfFileName);
+ tempFile.renameTo(recentFile);
+
+ /* Add to download history to avoid downloading it again. */
+ this.downloadedFiles.add(baseUrl + tpfFileName);
+ }
+
+
+ private void downloadAndParseOnionPerfAnalysisFile(URL baseUrl, String source,
+ String onionPerfAnalysisFileName) {
+ URL onionPerfAnalysisFileUrl;
+ try {
+ onionPerfAnalysisFileUrl = new URL(baseUrl, onionPerfAnalysisFileName);
+ } catch (MalformedURLException e1) {
+ logger.warn("Unable to put together base URL '{}' and file path '{}' to "
+ + "a URL. Skipping.", baseUrl, onionPerfAnalysisFileName);
+ return;
+ }
+
+ /* Skip if we successfully downloaded this file before. */
+ if (this.downloadedFiles.contains(onionPerfAnalysisFileUrl.toString())) {
+ return;
+ }
+
+ /* Parse date from file name: yyyy-MM-dd.onionperf.analysis.json.xz */
+ String date;
+ try {
+ date = onionPerfAnalysisFileName.substring(0, 10);
+ DATE_FORMAT.parse(date);
+ } catch (NumberFormatException | ParseException e) {
+ logger.warn("Invalid file name '{}{}'. Skipping.", baseUrl,
+ onionPerfAnalysisFileName, e);
+ return;
+ }
+
+ /* Download file contents to temporary file. */
+ File tempFile = new File(this.recentDirectory,
+ ONIONPERF + "/." + onionPerfAnalysisFileName);
+ byte[] downloadedBytes;
+ try {
+ downloadedBytes = Downloader.downloadFromHttpServer(
+ new URL(baseUrl + onionPerfAnalysisFileName));
+ } catch (IOException e) {
+ logger.warn("Unable to download '{}{}'. Skipping.", baseUrl,
+ onionPerfAnalysisFileName, e);
+ return;
+ }
+ if (null == downloadedBytes) {
+ logger.warn("Unable to download '{}{}'. Skipping.", baseUrl,
+ onionPerfAnalysisFileName);
+ return;
+ }
+ tempFile.getParentFile().mkdirs();
+ try {
+ Files.write(tempFile.toPath(), downloadedBytes);
+ } catch (IOException e) {
+ logger.warn("Unable to write previously downloaded '{}{}' to temporary "
+ + "file '{}'. Skipping.", baseUrl, onionPerfAnalysisFileName,
+ tempFile, e);
+ return;
+ }
+
+ /* Validate contained descriptors. */
+ DescriptorParser descriptorParser =
+ DescriptorSourceFactory.createDescriptorParser();
+ byte[] rawDescriptorBytes;
+ try {
+ rawDescriptorBytes = IOUtils.toByteArray(
+ Files.newInputStream(tempFile.toPath()));
+ } catch (IOException e) {
+ logger.warn("OnionPerf file '{}{}' could not be read. Skipping.", baseUrl,
+ onionPerfAnalysisFileName, e);
+ tempFile.delete();
+ return;
+ }
+ Iterable<Descriptor> descriptors = descriptorParser.parseDescriptors(
+ rawDescriptorBytes, null, onionPerfAnalysisFileName);
+ String message = null;
+ for (Descriptor descriptor : descriptors) {
+ if (!(descriptor instanceof TorperfResult)) {
+ message = "File contains descriptors other than an OnionPerf analysis "
+ + "document: " + descriptor.getClass();
+ break;
+ }
+ TorperfResult torperf = (TorperfResult) descriptor;
+ if (!source.equals(torperf.getSource())) {
+ message = "File contains transfer from another source: "
+ + torperf.getSource();
+ break;
+ }
+ }
+ if (null != message) {
+ logger.warn("OnionPerf file '{}{}' was found to be invalid: {}. "
+ + "Skipping.", baseUrl, onionPerfAnalysisFileName, message);
+ tempFile.delete();
+ return;
+ }
+
+ /* Copy/move files in place. */
+ File archiveFile = new File(this.archiveDirectory,
+ ONIONPERF + "/" + date.replaceAll("-", "/") + "/" + date + "." + source
+ + ".onionperf.analysis.json.xz");
+ archiveFile.getParentFile().mkdirs();
+ try {
+ Files.copy(tempFile.toPath(), archiveFile.toPath(),
+ StandardCopyOption.REPLACE_EXISTING);
+ } catch (IOException e) {
+ logger.warn("Unable to copy OnionPerf file {} to {}. Skipping.",
+ tempFile, archiveFile, e);
+ tempFile.delete();
+ return;
+ }
+ File recentFile = new File(this.recentDirectory,
+ ONIONPERF + "/" + date + "." + source + ".onionperf.analysis.json.xz");
tempFile.renameTo(recentFile);
/* Add to download history to avoid downloading it again. */
- this.downloadedTpfFiles.add(baseUrl + tpfFileName);
+ this.downloadedFiles.add(baseUrl + onionPerfAnalysisFileName);
}
- private void writeDownloadedOnionPerfTpfFiles() {
+ private void writeDownloadedOnionPerfFiles() {
this.onionPerfDownloadedFile.getParentFile().mkdirs();
try (BufferedWriter bw = new BufferedWriter(new FileWriter(
this.onionPerfDownloadedFile))) {
- for (String line : this.downloadedTpfFiles) {
+ for (String line : this.downloadedFiles) {
bw.write(line);
bw.newLine();
}
diff --git a/src/main/resources/collector.properties b/src/main/resources/collector.properties
index 61baed5..2347021 100644
--- a/src/main/resources/collector.properties
+++ b/src/main/resources/collector.properties
@@ -175,7 +175,7 @@ ExitlistUrl = https://check.torproject.org/exit-addresses
######## OnionPerf downloader ########
#
## Define descriptor sources
-# possible values: Remote,Sync
+# possible values: Remote,Sync (.tpf files only!)
OnionPerfSources = Remote
# Retrieve files from the following CollecTor instances.
# List of URLs separated by comma.
diff --git a/src/main/resources/create-tarballs.sh b/src/main/resources/create-tarballs.sh
index 07952c7..fcac2f3 100755
--- a/src/main/resources/create-tarballs.sh
+++ b/src/main/resources/create-tarballs.sh
@@ -40,6 +40,8 @@ TARBALLS=(
exit-list-$YEARTWO-$MONTHTWO
torperf-$YEARONE-$MONTHONE
torperf-$YEARTWO-$MONTHTWO
+ onionperf-$YEARONE-$MONTHONE
+ onionperf-$YEARTWO-$MONTHTWO
certs
microdescs-$YEARONE-$MONTHONE
microdescs-$YEARTWO-$MONTHTWO
@@ -73,6 +75,8 @@ DIRECTORIES=(
$OUTDIR/exit-lists/$YEARTWO/$MONTHTWO/
$OUTDIR/torperf/$YEARONE/$MONTHONE/
$OUTDIR/torperf/$YEARTWO/$MONTHTWO/
+ $OUTDIR/onionperf/$YEARONE/$MONTHONE/
+ $OUTDIR/onionperf/$YEARTWO/$MONTHTWO/
$OUTDIR/relay-descriptors/certs/
$OUTDIR/relay-descriptors/microdesc/$YEARONE/$MONTHONE
$OUTDIR/relay-descriptors/microdesc/$YEARTWO/$MONTHTWO
@@ -178,6 +182,9 @@ ln -f -s -t $ARCHIVEDIR/relay-descriptors/bandwidths/ $TARBALLTARGETDIR/bandwidt
mkdir -p $ARCHIVEDIR/torperf/
ln -f -s -t $ARCHIVEDIR/torperf/ $TARBALLTARGETDIR/torperf-20??-??.tar.xz
+mkdir -p $ARCHIVEDIR/onionperf/
+ln -f -s -t $ARCHIVEDIR/onionperf/ $TARBALLTARGETDIR/onionperf-20??-??.tar.xz
+
mkdir -p $ARCHIVEDIR/webstats/
ln -f -s -t $ARCHIVEDIR/webstats/ $TARBALLTARGETDIR/webstats-20??-??.tar
1
0
commit 037f463761efeb9d7cfb50442c9c3a68a25120f3
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 30 17:16:47 2020 +0200
Bump version to 2.12.1-dev.
---
CHANGELOG.md | 3 +++
build.xml | 2 +-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9870962..bb8430d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,6 @@
+# Changes in version 2.??.? - 2020-??-??
+
+
# Changes in version 2.12.1 - 2020-04-30
* Minor changes
diff --git a/build.xml b/build.xml
index ab62a88..ac4a3ed 100644
--- a/build.xml
+++ b/build.xml
@@ -7,7 +7,7 @@
<project default="usage" name="metrics-lib" basedir="."
xmlns:ivy="antlib:org.apache.ivy.ant">
- <property name="release.version" value="2.12.1" />
+ <property name="release.version" value="2.12.1-dev" />
<property name="javadoc-title" value="Tor Metrics Library API Documentation"/>
<property name="javadoc-excludes" value="**/impl/** **/index/** **/internal/** **/log/**" />
<property name="implementation-title" value="Tor Metrics Library" />
1
0
commit aa55fd05f01f71b113cbf72b60d6aba77aa04bfd
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu Apr 30 17:11:37 2020 +0200
Prepare for 2.12.1 release.
---
CHANGELOG.md | 2 +-
build.xml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8fbc4bd..9870962 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,4 @@
-# Changes in version 2.??.? - 2020-??-??
+# Changes in version 2.12.1 - 2020-04-30
* Minor changes
- Change back how we treat xz-compressed files by leaving
diff --git a/build.xml b/build.xml
index e3ed6e6..ab62a88 100644
--- a/build.xml
+++ b/build.xml
@@ -7,7 +7,7 @@
<project default="usage" name="metrics-lib" basedir="."
xmlns:ivy="antlib:org.apache.ivy.ant">
- <property name="release.version" value="2.12.0-dev" />
+ <property name="release.version" value="2.12.1" />
<property name="javadoc-title" value="Tor Metrics Library API Documentation"/>
<property name="javadoc-excludes" value="**/impl/** **/index/** **/internal/** **/log/**" />
<property name="implementation-title" value="Tor Metrics Library" />
1
0