commit 2db5c12fe9d8efa63a324f4b0680e3fd33715753 Author: Karsten Loesing karsten.loesing@gmx.net Date: Mon May 21 15:32:36 2012 +0200
Make some major changes to the bridge descriptor sanitizer.
- Bridge network statuses contain a "published" line containing the publication timestamp, so that parsers don't have to learn that timestamp from the file name anymore.
- Bridge network status entries are ordered by hex-encoded fingerprint, not by base64-encoded fingerprint, which is mostly a cosmetic change.
- Server descriptors and extra-info descriptors are stored under the SHA1 hashes of the descriptor identifiers of their non-scrubbed forms. Previously, descriptors were (supposed to be; see #5607) stored under the digests of their scrubbed forms. The reason for hashing digests is to prevent looking up an existing descriptor from the bridge authority by its non-scrubbed descriptor digest. With this change, we don't have to repair references between statuses, server descriptors, and extra-info descriptors anymore which turned out to be error-prone (#5608). Server descriptors and extra-info descriptors contain a new "router-digest" line with the hex-formatted descriptor identifier. These lines are necessary, because we cannot calculate the identifier anymore and because we don't want to rely on the file name.
- Stop sanitizing bridge nicknames (#5684).
- Stop sanitizing *-stats lines (#5807).
- All sanitized bridge descriptors contain @type annotations (#5651). --- .../ernie/db/BridgeDescriptorParser.java | 20 +- .../torproject/ernie/db/BridgeSnapshotReader.java | 4 +- .../ernie/db/SanitizedBridgesWriter.java | 908 ++++---------------- 3 files changed, 151 insertions(+), 781 deletions(-)
diff --git a/src/org/torproject/ernie/db/BridgeDescriptorParser.java b/src/org/torproject/ernie/db/BridgeDescriptorParser.java index 0be62f7..7773525 100644 --- a/src/org/torproject/ernie/db/BridgeDescriptorParser.java +++ b/src/org/torproject/ernie/db/BridgeDescriptorParser.java @@ -16,7 +16,7 @@ public class BridgeDescriptorParser { this.logger = Logger.getLogger(BridgeDescriptorParser.class.getName()); } - public void parse(byte[] allData, String dateTime, boolean sanitized) { + public void parse(byte[] allData, String dateTime) { try { BufferedReader br = new BufferedReader(new StringReader( new String(allData, "US-ASCII"))); @@ -25,27 +25,15 @@ public class BridgeDescriptorParser { return; } else if (line.startsWith("r ")) { if (this.sbw != null) { - if (sanitized) { - this.sbw.storeSanitizedNetworkStatus(allData, dateTime); - } else { - this.sbw.sanitizeAndStoreNetworkStatus(allData, dateTime); - } + this.sbw.sanitizeAndStoreNetworkStatus(allData, dateTime); } } else if (line.startsWith("router ")) { if (this.sbw != null) { - if (sanitized) { - this.sbw.storeSanitizedServerDescriptor(allData); - } else { - this.sbw.sanitizeAndStoreServerDescriptor(allData); - } + this.sbw.sanitizeAndStoreServerDescriptor(allData); } } else if (line.startsWith("extra-info ")) { if (this.sbw != null) { - if (sanitized) { - this.sbw.storeSanitizedExtraInfoDescriptor(allData); - } else { - this.sbw.sanitizeAndStoreExtraInfoDescriptor(allData); - } + this.sbw.sanitizeAndStoreExtraInfoDescriptor(allData); } } } catch (IOException e) { diff --git a/src/org/torproject/ernie/db/BridgeSnapshotReader.java b/src/org/torproject/ernie/db/BridgeSnapshotReader.java index 1683b58..0de9d83 100644 --- a/src/org/torproject/ernie/db/BridgeSnapshotReader.java +++ b/src/org/torproject/ernie/db/BridgeSnapshotReader.java @@ -119,7 +119,7 @@ public class BridgeSnapshotReader { } } if (firstLine.startsWith("r ")) { - bdp.parse(allData, dateTime, false); + bdp.parse(allData, dateTime); parsedStatuses++; } else if (descriptorImportHistory.contains(fileDigest)) { /* Skip server descriptors or extra-info descriptors if @@ -155,7 +155,7 @@ public class BridgeSnapshotReader { DigestUtils.sha(descBytes)); if (!descriptorImportHistory.contains( descriptorDigest)) { - bdp.parse(descBytes, dateTime, false); + bdp.parse(descBytes, dateTime); descriptorImportHistory.add(descriptorDigest); if (firstLine.startsWith("router ")) { parsedServerDescriptors++; diff --git a/src/org/torproject/ernie/db/SanitizedBridgesWriter.java b/src/org/torproject/ernie/db/SanitizedBridgesWriter.java index ff53cf0..ba33026 100644 --- a/src/org/torproject/ernie/db/SanitizedBridgesWriter.java +++ b/src/org/torproject/ernie/db/SanitizedBridgesWriter.java @@ -25,141 +25,15 @@ import org.apache.commons.codec.binary.*; * (lists of all bridges at a given time), server descriptors (published * by the bridge to advertise their capabilities), and extra-info * descriptors (published by the bridge, mainly for statistical analysis). - * - * Network statuses, server descriptors, and extra-info descriptors are - * linked via descriptor digests: extra-info descriptors are referenced - * from server descriptors, and server descriptors are referenced from - * network statuses. These references need to be changed during the - * sanitizing process, because descriptor contents change and so do the - * descriptor digests. - * - * No assumptions are made about the order in which bridge descriptors are - * parsed. The approach taken here is to sanitize bridge descriptors even - * with incomplete knowledge about references and to update them as soon - * as these information get known. We are keeping a persistent data - * structure, the bridge descriptor mapping, to hold information about - * every single descriptor. The idea is that every descriptor is (a) - * referenced from a network status and consists of (b) a server - * descriptor and (c) an extra-info descriptor, both of which are - * published at the same time. Using this data structure, we can repair - * references as soon as we learn more about the descriptor and regardless - * of the order of incoming bridge descriptors. - * - * The process of sanitizing a bridge descriptor is as follows, depending - * on the type of descriptor: - * - * Network statuses are processed by sanitizing every r line separately - * and looking up whether the descriptor mapping contains a bridge with - * given identity hash and descriptor publication time. If so, the new - * server descriptor identifier can be added. If not, we're adding all - * 0's. - * - * While sanitizing a server descriptor, its identity hash and publication - * time are looked up in order to put in the extra-info descriptor - * identifier in case the corresponding extra-info descriptor was - * sanitized before. Further, its publication time is noted down, so that - * all network statuses that might be referencing this server descriptor - * can be re-written at the end of the sanitizing procedure. - * - * Extra-info descriptors are processed by looking up their identity hash - * and publication time in the descriptor mapping. If the corresponding - * server descriptor was sanitized before, the server descriptor is - * re-written to include the new extra-info descriptor digest, and the - * publication time is noted down in order to re-write the network - * statuses possibly referencing this extra-info descriptor and its - * corresponding server descriptor at the end of the sanitizing process. - * - * After sanitizing all bridge descriptors, the network statuses that - * might be referencing server descriptors which have been (re-)written - * during this execution are re-written, too. This may be necessary in - * order to update previously broken references to server descriptors. */ public class SanitizedBridgesWriter {
/** - * Hex representation of null reference that is written to bridge - * descriptors if we don't have the real reference, yet. - */ - private static final String NULL_REFERENCE = - "0000000000000000000000000000000000000000"; - - /** - * Mapping between a descriptor as referenced from a network status to - * the digests of server descriptor and extra-info descriptor. - */ - private static class DescriptorMapping { - - /** - * Creates a new mapping from comma-separated values as read from the - * persistent mapping file. - */ - private DescriptorMapping(String commaSeparatedValues) { - String[] parts = commaSeparatedValues.split(","); - this.hashedBridgeIdentity = parts[0]; - this.published = parts[1]; - this.serverDescriptorIdentifier = parts[2]; - this.extraInfoDescriptorIdentifier = parts[3]; - } - - /** - * Creates a new mapping for a given identity hash and descriptor - * publication time that has all 0's as descriptor digests. - */ - private DescriptorMapping(String hashedBridgeIdentity, - String published) { - this.hashedBridgeIdentity = hashedBridgeIdentity; - this.published = published; - this.serverDescriptorIdentifier = NULL_REFERENCE; - this.extraInfoDescriptorIdentifier = NULL_REFERENCE; - } - private String hashedBridgeIdentity; - private String published; - private String serverDescriptorIdentifier; - private String extraInfoDescriptorIdentifier; - - /** - * Returns a string representation of this descriptor mapping that can - * be written to the persistent mapping file. - */ - public String toString() { - return this.hashedBridgeIdentity + "," + this.published + "," - + this.serverDescriptorIdentifier + "," - + this.extraInfoDescriptorIdentifier; - } - } - - /** - * File containing the mapping between network status entries, server - * descriptors, and extra-info descriptors. - */ - private File bridgeDescriptorMappingsFile; - - /** - * Mapping between status entries, server descriptors, and extra-info - * descriptors. This mapping is required to re-establish the references - * from status entries to server descriptors and from server descriptors - * to extra-info descriptors. The original references are broken when - * sanitizing, because descriptor contents change and so do the - * descriptor digests that are used for referencing. Map key contains - * hashed bridge identity and descriptor publication time, map value - * contains map key plus new server descriptor identifier and new - * extra-info descriptor identifier. - */ - private SortedMap<String, DescriptorMapping> bridgeDescriptorMappings; - - /** * Logger for this class. */ private Logger logger;
/** - * Publication times of server descriptors and extra-info descriptors - * parsed in the current execution. These times are used to determine - * which statuses need to be rewritten at the end of the execution. - */ - private SortedSet<String> descriptorPublicationTimes; - - /** * Output directory for writing sanitized bridge descriptors. */ private File sanitizedBridgesDirectory; @@ -170,21 +44,20 @@ public class SanitizedBridgesWriter {
private SortedMap<String, byte[]> secretsForHashingIPAddresses;
- private String bridgeDescriptorMappingsCutOffTimestamp; + private String bridgeSanitizingCutOffTimestamp;
- private boolean haveWarnedAboutLimitedMapping; + private boolean haveWarnedAboutInterval;
private File bridgeIpSecretsFile;
private SecureRandom secureRandom;
/** - * Initializes this class, including reading in the known descriptor - * mapping. + * Initializes this class. */ public SanitizedBridgesWriter(File sanitizedBridgesDirectory, File statsDirectory, boolean replaceIPAddressesWithHashes, - long limitBridgeDescriptorMappings) { + long limitBridgeSanitizingInterval) {
if (sanitizedBridgesDirectory == null || statsDirectory == null) { throw new IllegalArgumentException(); @@ -198,11 +71,6 @@ public class SanitizedBridgesWriter { this.logger = Logger.getLogger( SanitizedBridgesWriter.class.getName());
- /* Initialize data structure. */ - this.bridgeDescriptorMappings = new TreeMap<String, - DescriptorMapping>(); - this.descriptorPublicationTimes = new TreeSet<String>(); - /* Initialize secure random number generator if we need it. */ if (this.replaceIPAddressesWithHashes) { try { @@ -259,57 +127,17 @@ public class SanitizedBridgesWriter { } }
- /* If we're configured to keep descriptor mappings only for a limited - * time, define the cut-off day and time. */ - if (limitBridgeDescriptorMappings >= 0L) { + /* If we're configured to keep secrets only for a limited time, define + * the cut-off day and time. */ + if (limitBridgeSanitizingInterval >= 0L) { SimpleDateFormat formatter = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); - this.bridgeDescriptorMappingsCutOffTimestamp = formatter.format( + this.bridgeSanitizingCutOffTimestamp = formatter.format( System.currentTimeMillis() - 24L * 60L * 60L * 1000L - * limitBridgeDescriptorMappings); + * limitBridgeSanitizingInterval); } else { - this.bridgeDescriptorMappingsCutOffTimestamp = - "1999-12-31 23:59:59"; - } - - /* Read known descriptor mappings from disk. */ - this.bridgeDescriptorMappingsFile = new File( - "stats/bridge-descriptor-mappings"); - if (this.bridgeDescriptorMappingsFile.exists()) { - try { - BufferedReader br = new BufferedReader(new FileReader( - this.bridgeDescriptorMappingsFile)); - String line = null; - int read = 0, skipped = 0; - while ((line = br.readLine()) != null) { - if (line.split(",").length == 4) { - String[] parts = line.split(","); - if (this.bridgeDescriptorMappingsCutOffTimestamp. - compareTo(parts[1]) > 0) { - skipped++; - continue; - } - read++; - DescriptorMapping dm = new DescriptorMapping(line); - this.bridgeDescriptorMappings.put(parts[0] + "," + parts[1], - dm); - } else { - this.logger.warning("Corrupt line '" + line + "' in " - + this.bridgeDescriptorMappingsFile.getAbsolutePath() - + ". Skipping."); - continue; - } - } - br.close(); - this.logger.fine("Finished reading " + read + " descriptor " - + "mappings from disk, skipped " + skipped + "."); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not read in " - + this.bridgeDescriptorMappingsFile.getAbsolutePath() - + "."); - return; - } + this.bridgeSanitizingCutOffTimestamp = "1999-12-31 23:59:59"; } }
@@ -446,10 +274,10 @@ public class SanitizedBridgesWriter { secret, 0, 31); } if (month.compareTo( - this.bridgeDescriptorMappingsCutOffTimestamp) < 0) { + this.bridgeSanitizingCutOffTimestamp) < 0) { this.logger.warning("Generated a secret that we won't make " - + "persistent, because it's outside our bridge descriptors " - + "mapping interval."); + + "persistent, because it's outside our bridge descriptor " + + "sanitizing interval."); } else { /* Append secret to file on disk immediately before using it, or * we might end with inconsistently sanitized bridges. */ @@ -476,9 +304,8 @@ public class SanitizedBridgesWriter { }
/** - * Sanitizes a network status and writes it to disk. Processes every r - * line separately and looks up whether the descriptor mapping contains - * a bridge with given identity hash and descriptor publication time. */ + * Sanitizes a network status and writes it to disk. + */ public void sanitizeAndStoreNetworkStatus(byte[] data, String publicationTime) {
@@ -488,13 +315,13 @@ public class SanitizedBridgesWriter { return; }
- if (this.bridgeDescriptorMappingsCutOffTimestamp. + if (this.bridgeSanitizingCutOffTimestamp. compareTo(publicationTime) > 0) { - this.logger.log(!this.haveWarnedAboutLimitedMapping ? Level.WARNING + this.logger.log(!this.haveWarnedAboutInterval ? Level.WARNING : Level.FINE, "Sanitizing and storing network status with " - + "publication time outside our descriptor mapping interval. " - + "We might not be able to repair references."); - this.haveWarnedAboutLimitedMapping = true; + + "publication time outside our descriptor sanitizing " + + "interval."); + this.haveWarnedAboutInterval = true; }
/* Parse the given network status line by line. */ @@ -508,64 +335,55 @@ public class SanitizedBridgesWriter { String mostRecentDescPublished = null; byte[] fingerprintBytes = null; String descPublicationTime = null; + String hashedBridgeIdentityHex = null; while ((line = br.readLine()) != null) {
/* r lines contain sensitive information that needs to be removed * or replaced. */ if (line.startsWith("r ")) {
+ /* Clear buffer from previously scrubbed lines. */ + if (scrubbed.length() > 0) { + String scrubbedLine = scrubbed.toString(); + scrubbedLines.put(hashedBridgeIdentityHex, scrubbedLine); + scrubbed = new StringBuilder(); + } + /* Parse the relevant parts of this r line. */ String[] parts = line.split(" "); + String nickname = parts[1]; fingerprintBytes = Base64.decodeBase64(parts[2] + "=="); + String descriptorIdentifier = parts[3]; descPublicationTime = parts[4] + " " + parts[5]; String address = parts[6]; String orPort = parts[7]; String dirPort = parts[8];
- /* Look up the descriptor in the descriptor mapping, or add a - * new mapping entry if there is none. */ - String hashedBridgeIdentityHex = Hex.encodeHexString( - DigestUtils.sha(fingerprintBytes)).toLowerCase(); - String mappingKey = hashedBridgeIdentityHex + "," - + descPublicationTime; - DescriptorMapping mapping = null; - if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { - mapping = this.bridgeDescriptorMappings.get(mappingKey); - } else { - mapping = new DescriptorMapping(hashedBridgeIdentityHex. - toLowerCase(), descPublicationTime); - this.bridgeDescriptorMappings.put(mappingKey, mapping); - } - /* Determine most recent descriptor publication time. */ if (descPublicationTime.compareTo(publicationTime) <= 0 && (mostRecentDescPublished == null || - descPublicationTime.compareTo(mostRecentDescPublished) > 0)) { + descPublicationTime.compareTo( + mostRecentDescPublished) > 0)) { mostRecentDescPublished = descPublicationTime; }
/* Write scrubbed r line to buffer. */ + byte[] hashedBridgeIdentity = DigestUtils.sha(fingerprintBytes); String hashedBridgeIdentityBase64 = Base64.encodeBase64String( - DigestUtils.sha(fingerprintBytes)).substring(0, 27); - String sdi = Base64.encodeBase64String(Hex.decodeHex( - mapping.serverDescriptorIdentifier.toCharArray())). - substring(0, 27); - String scrubbedAddress = null; - try { - scrubbedAddress = scrubIpv4Address(address, fingerprintBytes, - descPublicationTime); - } catch (IOException e) { - return; - } - if (scrubbed.length() > 0) { - String scrubbedLine = scrubbed.toString(); - scrubbedLines.put(scrubbedLine.split(" ")[2], scrubbedLine); - scrubbed = new StringBuilder(); - } - scrubbed.append("r Unnamed " - + hashedBridgeIdentityBase64 + " " + sdi + " " - + descPublicationTime + " " + scrubbedAddress + " " - + orPort + " " + dirPort + "\n"); + hashedBridgeIdentity).substring(0, 27); + hashedBridgeIdentityHex = Hex.encodeHexString( + hashedBridgeIdentity); + String hashedDescriptorIdentifier = Base64.encodeBase64String( + DigestUtils.sha(Base64.decodeBase64(descriptorIdentifier + + "=="))).substring(0, 27); + String scrubbedAddress = scrubIpv4Address(address, + fingerprintBytes, + descPublicationTime); + scrubbed.append("r " + nickname + " " + + hashedBridgeIdentityBase64 + " " + + hashedDescriptorIdentifier + " " + descPublicationTime + + " " + scrubbedAddress + " " + orPort + " " + dirPort + + "\n");
/* Sanitize any addresses in a lines using the fingerprint and * descriptor publication time from the previous r line. */ @@ -598,7 +416,7 @@ public class SanitizedBridgesWriter { br.close(); if (scrubbed.length() > 0) { String scrubbedLine = scrubbed.toString(); - scrubbedLines.put(scrubbedLine.split(" ")[2], scrubbedLine); + scrubbedLines.put(hashedBridgeIdentityHex, scrubbedLine); scrubbed = new StringBuilder(); }
@@ -619,11 +437,8 @@ public class SanitizedBridgesWriter { } catch (ParseException e) { this.logger.log(Level.WARNING, "Could not parse timestamp in " + "bridge network status.", e); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not parse bridge network " - + "status.", e); return; - } catch (DecoderException e) { + } catch (IOException e) { this.logger.log(Level.WARNING, "Could not parse bridge network " + "status.", e); return; @@ -650,6 +465,8 @@ public class SanitizedBridgesWriter {
/* Write sanitized network status to disk. */ BufferedWriter bw = new BufferedWriter(new FileWriter(statusFile)); + bw.write("@type bridge-network-status 1.0\n"); + bw.write("published " + publicationTime + "\n"); for (String scrubbed : scrubbedLines.values()) { bw.write(scrubbed); } @@ -663,12 +480,7 @@ public class SanitizedBridgesWriter { }
/** - * Sanitizes a bridge server descriptor and writes it to disk. Looks up - * up bridge identity hash and publication time in the descriptor - * mapping. After sanitizing a server descriptor, its publication time - * is noted down, so that all network statuses that might be referencing - * this server descriptor can be re-written at the end of the sanitizing - * procedure. + * Sanitizes a bridge server descriptor and writes it to disk. */ public void sanitizeAndStoreServerDescriptor(byte[] data) {
@@ -678,35 +490,18 @@ public class SanitizedBridgesWriter { return; }
- /* Parse descriptor to generate a sanitized version and to look it up - * in the descriptor mapping. */ - String scrubbedDesc = null; - DescriptorMapping mapping = null; + /* Parse descriptor to generate a sanitized version. */ + String scrubbedDesc = null, published = null; try { BufferedReader br = new BufferedReader(new StringReader( new String(data, "US-ASCII"))); StringBuilder scrubbed = new StringBuilder(); String line = null, hashedBridgeIdentity = null, address = null, - published = null, routerLine = null, scrubbedAddress = null; + routerLine = null, scrubbedAddress = null; List<String> orAddresses = null, scrubbedOrAddresses = null; boolean skipCrypto = false; while ((line = br.readLine()) != null) {
- /* When we have parsed both published and fingerprint line, look - * up descriptor in the descriptor mapping or create a new one if - * there is none. */ - if (mapping == null && published != null && - hashedBridgeIdentity != null) { - String mappingKey = hashedBridgeIdentity + "," + published; - if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { - mapping = this.bridgeDescriptorMappings.get(mappingKey); - } else { - mapping = new DescriptorMapping(hashedBridgeIdentity, - published); - this.bridgeDescriptorMappings.put(mappingKey, mapping); - } - } - /* Skip all crypto parts that might be used to derive the bridge's * identity fingerprint. */ if (skipCrypto && !line.startsWith("-----END ")) { @@ -727,21 +522,18 @@ public class SanitizedBridgesWriter { } orAddresses.add(line.substring("or-address ".length()));
- /* Parse the publication time and add it to the list of descriptor - * publication times to re-write network statuses at the end of - * the sanitizing procedure. */ + /* Parse the publication time to see if we're still inside the + * sanitizing interval. */ } else if (line.startsWith("published ")) { published = line.substring("published ".length()); - if (this.bridgeDescriptorMappingsCutOffTimestamp. + if (this.bridgeSanitizingCutOffTimestamp. compareTo(published) > 0) { - this.logger.log(!this.haveWarnedAboutLimitedMapping + this.logger.log(!this.haveWarnedAboutInterval ? Level.WARNING : Level.FINE, "Sanitizing and storing " + "server descriptor with publication time outside our " - + "descriptor mapping interval. We might not be able to " - + "repair references."); - this.haveWarnedAboutLimitedMapping = true; + + "descriptor sanitizing interval."); + this.haveWarnedAboutInterval = true; } - this.descriptorPublicationTimes.add(published); scrubbed.append(line + "\n");
/* Parse the fingerprint to determine the hashed bridge @@ -775,6 +567,7 @@ public class SanitizedBridgesWriter { } catch (IOException e) { /* There's a persistence problem, so we shouldn't scrub more * IP addresses in this execution. */ + this.persistenceProblemWithSecrets = true; return; } scrubbed.append((line.startsWith("opt ") ? "opt " : "") @@ -792,9 +585,9 @@ public class SanitizedBridgesWriter { * descriptor to disk below. */ } else if (line.startsWith("router-signature")) { String[] routerLineParts = routerLine.split(" "); - scrubbedDesc = "router Unnamed " + scrubbedAddress + " " - + routerLineParts[3] + " " + routerLineParts[4] + " " - + routerLineParts[5] + "\n"; + scrubbedDesc = "router " + routerLineParts[1] + " " + + scrubbedAddress + " " + routerLineParts[3] + " " + + routerLineParts[4] + " " + routerLineParts[5] + "\n"; if (scrubbedOrAddresses != null) { for (String scrubbedOrAddress : scrubbedOrAddresses) { scrubbedDesc = scrubbedDesc += "or-address " @@ -804,14 +597,18 @@ public class SanitizedBridgesWriter { scrubbedDesc += scrubbed.toString(); break;
- /* Replace extra-info digest with the one we know from our - * descriptor mapping (which might be all 0's if we didn't parse - * the extra-info descriptor before). */ + /* Replace extra-info digest with the hashed digest of the + * non-scrubbed descriptor. */ } else if (line.startsWith("opt extra-info-digest ") || line.startsWith("extra-info-digest ")) { + String extraInfoDescriptorIdentifier = line.substring( + line.indexOf("extra-info-digest ") + + "extra-info-digest ".length()); + String hashedExtraInfoDescriptorIdentifier = + DigestUtils.shaHex(Hex.decodeHex( + extraInfoDescriptorIdentifier.toCharArray())).toUpperCase(); scrubbed.append((line.startsWith("opt ") ? "opt " : "") - + "extra-info-digest " - + mapping.extraInfoDescriptorIdentifier.toUpperCase() + + "extra-info-digest " + hashedExtraInfoDescriptorIdentifier + "\n");
/* Possibly sanitize reject lines if they contain the bridge's own @@ -844,7 +641,7 @@ public class SanitizedBridgesWriter { scrubbed.append(line + "\n");
/* Replace node fingerprints in the family line with their hashes - * and nicknames with Unnamed. */ + * and leave nicknames unchanged. */ } else if (line.startsWith("family ")) { StringBuilder familyLine = new StringBuilder("family"); for (String s : line.substring(7).split(" ")) { @@ -852,7 +649,7 @@ public class SanitizedBridgesWriter { familyLine.append(" $" + DigestUtils.shaHex(Hex.decodeHex( s.substring(1).toCharArray())).toUpperCase()); } else { - familyLine.append(" Unnamed"); + familyLine.append(" " + s); } } scrubbed.append(familyLine.toString() + "\n"); @@ -888,27 +685,44 @@ public class SanitizedBridgesWriter { return; }
- /* Determine new descriptor digest and write it to descriptor - * mapping. */ - String scrubbedHash = DigestUtils.shaHex(scrubbedDesc); - mapping.serverDescriptorIdentifier = scrubbedHash; - /* Determine filename of sanitized server descriptor. */ - String dyear = mapping.published.substring(0, 4); - String dmonth = mapping.published.substring(5, 7); + String descriptorDigest = null; + try { + String ascii = new String(data, "US-ASCII"); + String startToken = "router "; + String sigToken = "\nrouter-signature\n"; + int start = ascii.indexOf(startToken); + int sig = ascii.indexOf(sigToken) + sigToken.length(); + if (start >= 0 && sig >= 0 && sig > start) { + byte[] forDigest = new byte[sig - start]; + System.arraycopy(data, start, forDigest, 0, sig - start); + descriptorDigest = DigestUtils.shaHex(DigestUtils.sha(forDigest)); + } + } catch (UnsupportedEncodingException e) { + /* Handle below. */ + } + if (descriptorDigest == null) { + this.logger.log(Level.WARNING, "Could not calculate server " + + "descriptor digest."); + return; + } + String dyear = published.substring(0, 4); + String dmonth = published.substring(5, 7); File newFile = new File( this.sanitizedBridgesDirectory.getAbsolutePath() + "/" + dyear + "/" + dmonth + "/server-descriptors/" - + "/" + scrubbedHash.charAt(0) + "/" - + scrubbedHash.charAt(1) + "/" - + scrubbedHash); + + "/" + descriptorDigest.charAt(0) + "/" + + descriptorDigest.charAt(1) + "/" + + descriptorDigest);
/* Write sanitized server descriptor to disk, including all its parent * directories. */ try { newFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(newFile)); + bw.write("@type bridge-server-descriptor 1.0\n"); bw.write(scrubbedDesc); + bw.write("router-digest " + descriptorDigest.toUpperCase() + "\n"); bw.close(); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not write sanitized server " @@ -918,83 +732,33 @@ public class SanitizedBridgesWriter { }
/** - * Sanitizes an extra-info descriptor and writes it to disk. Looks up - * the bridge identity hash and publication time in the descriptor - * mapping. If the corresponding server descriptor was sanitized before, - * it is re-written to include the new extra-info descriptor digest and - * the publication time is noted down, too, so that all network statuses - * possibly referencing this extra-info descriptor and its corresponding - * server descriptor can be re-written at the end of the sanitizing - * procedure. + * Sanitizes an extra-info descriptor and writes it to disk. */ public void sanitizeAndStoreExtraInfoDescriptor(byte[] data) {
- /* Parse descriptor to generate a sanitized version and to look it up - * in the descriptor mapping. */ + /* Parse descriptor to generate a sanitized version. */ String scrubbedDesc = null, published = null; - DescriptorMapping mapping = null; try { BufferedReader br = new BufferedReader(new StringReader(new String( data, "US-ASCII"))); String line = null; StringBuilder scrubbed = null; String hashedBridgeIdentity = null; - boolean hasParsedBridgeStatsEndLine = false; while ((line = br.readLine()) != null) {
- /* When we have parsed both published and fingerprint line, look - * up descriptor in the descriptor mapping or create a new one if - * there is none. */ - if (mapping == null && published != null && - hashedBridgeIdentity != null) { - String mappingKey = hashedBridgeIdentity + "," + published; - if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { - mapping = this.bridgeDescriptorMappings.get(mappingKey); - } else { - mapping = new DescriptorMapping(hashedBridgeIdentity, - published); - this.bridgeDescriptorMappings.put(mappingKey, mapping); - } - } - /* Parse bridge identity from extra-info line and replace it with * its hash in the sanitized descriptor. */ + String[] parts = line.split(" "); if (line.startsWith("extra-info ")) { hashedBridgeIdentity = DigestUtils.shaHex(Hex.decodeHex( - line.split(" ")[2].toCharArray())).toLowerCase(); - scrubbed = new StringBuilder("extra-info Unnamed " + parts[2].toCharArray())).toLowerCase(); + scrubbed = new StringBuilder("extra-info " + parts[1] + " " + hashedBridgeIdentity.toUpperCase() + "\n");
- /* Parse the publication time and add it to the list of descriptor - * publication times to re-write network statuses at the end of - * the sanitizing procedure. */ + /* Parse the publication time to determine the file name. */ } else if (line.startsWith("published ")) { scrubbed.append(line + "\n"); published = line.substring("published ".length()); - if (this.bridgeDescriptorMappingsCutOffTimestamp. - compareTo(published) > 0) { - this.logger.log(!this.haveWarnedAboutLimitedMapping - ? Level.WARNING : Level.FINE, "Sanitizing and storing " - + "extra-info descriptor with publication time outside " - + "our descriptor mapping interval. We might not be able " - + "to repair references."); - this.haveWarnedAboutLimitedMapping = true; - } - - /* Write bridge-stats lines unmodified to the sanitized - * descriptor and make sure that there's always a bridge-stats-end - * line preceding the bridge-ips line. */ - } else if (line.startsWith("bridge-stats-end ")) { - scrubbed.append(line + "\n"); - hasParsedBridgeStatsEndLine = true; - } else if (line.startsWith("bridge-ips ")) { - if (!hasParsedBridgeStatsEndLine) { - this.logger.fine("bridge-ips line without preceding " - + "bridge-stats-end line in bridge descriptor. " - + "Skipping."); - return; - } - scrubbed.append(line + "\n");
/* Write the following lines unmodified to the sanitized * descriptor. */ @@ -1003,7 +767,11 @@ public class SanitizedBridgesWriter { || line.startsWith("geoip-start-time ") || line.startsWith("geoip-client-origins ") || line.startsWith("geoip-db-digest ") - || line.startsWith("conn-bi-direct ")) { + || line.startsWith("conn-bi-direct ") + || line.startsWith("bridge-") + || line.startsWith("dirreq-") + || line.startsWith("cell-") + || line.startsWith("exit-")) { scrubbed.append(line + "\n");
/* When we reach the signature, we're done. Write the sanitized @@ -1011,11 +779,6 @@ public class SanitizedBridgesWriter { } else if (line.startsWith("router-signature")) { scrubbedDesc = scrubbed.toString(); break; - /* Don't include statistics that should only be contained in relay - * extra-info descriptors. */ - } else if (line.startsWith("dirreq-") || line.startsWith("cell-") - || line.startsWith("exit-")) { - continue;
/* If we encounter an unrecognized line, stop parsing and print * out a warning. We might have overlooked sensitive information @@ -1037,34 +800,44 @@ public class SanitizedBridgesWriter { return; }
- /* Determine new descriptor digest and check if write it to descriptor - * mapping. */ - String scrubbedDescHash = DigestUtils.shaHex(scrubbedDesc); - boolean extraInfoDescriptorIdentifierHasChanged = - !scrubbedDescHash.equals(mapping.extraInfoDescriptorIdentifier); - mapping.extraInfoDescriptorIdentifier = scrubbedDescHash; - if (extraInfoDescriptorIdentifierHasChanged && - !mapping.serverDescriptorIdentifier.equals(NULL_REFERENCE)) { - this.rewriteServerDescriptor(mapping); - this.descriptorPublicationTimes.add(published); + /* Determine filename of sanitized extra-info descriptor. */ + String descriptorDigest = null; + try { + String ascii = new String(data, "US-ASCII"); + String startToken = "extra-info "; + String sigToken = "\nrouter-signature\n"; + int start = ascii.indexOf(startToken); + int sig = ascii.indexOf(sigToken) + sigToken.length(); + if (start >= 0 && sig >= 0 && sig > start) { + byte[] forDigest = new byte[sig - start]; + System.arraycopy(data, start, forDigest, 0, sig - start); + descriptorDigest = DigestUtils.shaHex(DigestUtils.sha(forDigest)); + } + } catch (UnsupportedEncodingException e) { + /* Handle below. */ } - - /* Determine filename of sanitized server descriptor. */ - String dyear = mapping.published.substring(0, 4); - String dmonth = mapping.published.substring(5, 7); + if (descriptorDigest == null) { + this.logger.log(Level.WARNING, "Could not calculate extra-info " + + "descriptor digest."); + return; + } + String dyear = published.substring(0, 4); + String dmonth = published.substring(5, 7); File newFile = new File( this.sanitizedBridgesDirectory.getAbsolutePath() + "/" + dyear + "/" + dmonth + "/extra-infos/" - + scrubbedDescHash.charAt(0) + "/" - + scrubbedDescHash.charAt(1) + "/" - + scrubbedDescHash); + + descriptorDigest.charAt(0) + "/" + + descriptorDigest.charAt(1) + "/" + + descriptorDigest);
- /* Write sanitized server descriptor to disk, including all its parent - * directories. */ + /* Write sanitized extra-info descriptor to disk, including all its + * parent directories. */ try { newFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(newFile)); + bw.write("@type bridge-extra-info 1.0\n"); bw.write(scrubbedDesc); + bw.write("router-digest " + descriptorDigest.toUpperCase() + "\n"); bw.close(); } catch (Exception e) { this.logger.log(Level.WARNING, "Could not write sanitized " @@ -1072,300 +845,6 @@ public class SanitizedBridgesWriter { } }
- public void storeSanitizedNetworkStatus(byte[] data, String published) { - if (this.bridgeDescriptorMappingsCutOffTimestamp. - compareTo(published) > 0) { - this.logger.log(!this.haveWarnedAboutLimitedMapping ? Level.WARNING - : Level.FINE, "Storing sanitized network status with " - + "publication time outside our descriptor mapping interval. " - + "We might not be able to repair references."); - this.haveWarnedAboutLimitedMapping = true; - } - String scrubbed = null; - try { - String ascii = new String(data, "US-ASCII"); - BufferedReader br2 = new BufferedReader(new StringReader(ascii)); - StringBuilder sb = new StringBuilder(); - String line = null; - while ((line = br2.readLine()) != null) { - if (line.startsWith("r ")) { - String hashedBridgeIdentity = Hex.encodeHexString( - Base64.decodeBase64(line.split(" ")[2] + "==")). - toLowerCase(); - String hashedBridgeIdentityBase64 = line.split(" ")[2]; - String readServerDescId = Hex.encodeHexString( - Base64.decodeBase64(line.split(" ")[3] + "==")). - toLowerCase(); - String descPublished = line.split(" ")[4] + " " - + line.split(" ")[5]; - String address = line.split(" ")[6]; - String mappingKey = (hashedBridgeIdentity + "," - + descPublished).toLowerCase(); - DescriptorMapping mapping = null; - if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { - mapping = this.bridgeDescriptorMappings.get(mappingKey); - } else { - mapping = new DescriptorMapping(hashedBridgeIdentity. - toLowerCase(), descPublished); - mapping.serverDescriptorIdentifier = readServerDescId; - this.bridgeDescriptorMappings.put(mappingKey, mapping); - } - String sdi = Base64.encodeBase64String(Hex.decodeHex( - mapping.serverDescriptorIdentifier.toCharArray())). - substring(0, 27); - String orPort = line.split(" ")[7]; - String dirPort = line.split(" ")[8]; - sb.append("r Unnamed " - + hashedBridgeIdentityBase64 + " " + sdi + " " - + descPublished + " " + address + " " + orPort + " " - + dirPort + "\n"); - } else { - sb.append(line + "\n"); - } - } - scrubbed = sb.toString(); - br2.close(); - } catch (DecoderException e) { - this.logger.log(Level.WARNING, "Could not parse server descriptor " - + "identifier. This must be a bug.", e); - return; - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not parse previously " - + "sanitized network status.", e); - return; - } - - /* Check if we need to overwrite the status file on disk. */ - if (new String(data).equals(scrubbed)) { - this.logger.finer("The bridge network status published " + published - + " has not changed, so we're not attempting to rewrite it."); - return; - } - - try { - /* Determine file name. */ - String syear = published.substring(0, 4); - String smonth = published.substring(5, 7); - String sday = published.substring(8, 10); - String stime = published.substring(11, 13) - + published.substring(14, 16) - + published.substring(17, 19); - File statusFile = new File( - this.sanitizedBridgesDirectory.getAbsolutePath() + "/" + syear - + "/" + smonth + "/statuses/" + sday + "/" + syear + smonth - + sday + "-" + stime + "-" - + "4A0CCD2DDC7995083D73F5D667100C8A5831F16D"); - - /* Create all parent directories to write this network status. */ - statusFile.getParentFile().mkdirs(); - - /* Write sanitized network status to disk. */ - BufferedWriter bw = new BufferedWriter(new FileWriter(statusFile)); - bw.write(scrubbed); - bw.close(); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not write previously " - + "sanitized network status.", e); - return; - } - } - - public void storeSanitizedServerDescriptor(byte[] data) { - try { - String ascii = new String(data, "US-ASCII"); - BufferedReader br2 = new BufferedReader(new StringReader(ascii)); - StringBuilder sb = new StringBuilder(); - String line2 = null, published = null; - String hashedBridgeIdentity = null; - DescriptorMapping mapping = null; - while ((line2 = br2.readLine()) != null) { - if (mapping == null && published != null && - hashedBridgeIdentity != null) { - String mappingKey = (hashedBridgeIdentity + "," + published). - toLowerCase(); - if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { - mapping = this.bridgeDescriptorMappings.get(mappingKey); - } else { - mapping = new DescriptorMapping(hashedBridgeIdentity. - toLowerCase(), published); - this.bridgeDescriptorMappings.put(mappingKey, mapping); - } - } - if (line2.startsWith("router ")) { - sb.append("router Unnamed " + line2.split(" ")[2] + " " - + line2.split(" ")[3] + " " + line2.split(" ")[4] + " " - + line2.split(" ")[5] + "\n"); - } else if (line2.startsWith("published ")) { - published = line2.substring("published ".length()); - if (this.bridgeDescriptorMappingsCutOffTimestamp. - compareTo(published) > 0) { - this.logger.log(!this.haveWarnedAboutLimitedMapping - ? Level.WARNING : Level.FINE, "Storing sanitized " - + "server descriptor with publication time outside our " - + "descriptor mapping interval. We might not be able to " - + "repair references."); - this.haveWarnedAboutLimitedMapping = true; - } - sb.append(line2 + "\n"); - this.descriptorPublicationTimes.add(published); - } else if (line2.startsWith("opt fingerprint ") || - line2.startsWith("fingerprint ")) { - hashedBridgeIdentity = line2.substring( - line2.indexOf("fingerprint") + "fingerprint".length()). - replaceAll(" ", "").toLowerCase(); - sb.append(line2 + "\n"); - } else if (line2.startsWith("opt extra-info-digest ") || - line2.startsWith("extra-info-digest")) { - sb.append((line2.startsWith("opt ") ? "opt " : "") - + "extra-info-digest " - + mapping.extraInfoDescriptorIdentifier.toUpperCase() - + "\n"); - } else { - sb.append(line2 + "\n"); - } - } - br2.close(); - String scrubbedDesc = sb.toString(); - String scrubbedHash = DigestUtils.shaHex(scrubbedDesc); - - mapping.serverDescriptorIdentifier = scrubbedHash; - String dyear = published.substring(0, 4); - String dmonth = published.substring(5, 7); - File newFile = new File( - this.sanitizedBridgesDirectory.getAbsolutePath() + "/" - + dyear + "/" + dmonth + "/server-descriptors/" - + scrubbedHash.substring(0, 1) + "/" - + scrubbedHash.substring(1, 2) + "/" - + scrubbedHash); - this.logger.finer("Storing server descriptor " - + newFile.getAbsolutePath()); - newFile.getParentFile().mkdirs(); - BufferedWriter bw = new BufferedWriter(new FileWriter( - newFile)); - bw.write(scrubbedDesc); - bw.close(); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not store unsanitized server " - + "descriptor.", e); - } - } - - public void storeSanitizedExtraInfoDescriptor(byte[] data) { - try { - String ascii = new String(data, "US-ASCII"); - BufferedReader br2 = new BufferedReader(new StringReader(ascii)); - StringBuilder sb = new StringBuilder(); - String line2 = null, published = null; - String hashedBridgeIdentity = null; - DescriptorMapping mapping = null; - while ((line2 = br2.readLine()) != null) { - if (mapping == null && published != null && - hashedBridgeIdentity != null) { - String mappingKey = (hashedBridgeIdentity + "," + published). - toLowerCase(); - if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { - mapping = this.bridgeDescriptorMappings.get(mappingKey); - } else { - mapping = new DescriptorMapping(hashedBridgeIdentity. - toLowerCase(), published); - this.bridgeDescriptorMappings.put(mappingKey, mapping); - } - } - if (line2.startsWith("extra-info ")) { - hashedBridgeIdentity = line2.split(" ")[2]; - sb.append("extra-info Unnamed " + hashedBridgeIdentity - + "\n"); - } else if (line2.startsWith("published ")) { - sb.append(line2 + "\n"); - published = line2.substring("published ".length()); - if (this.bridgeDescriptorMappingsCutOffTimestamp. - compareTo(published) > 0) { - this.logger.log(!this.haveWarnedAboutLimitedMapping - ? Level.WARNING : Level.FINE, "Storing sanitized " - + "extra-info descriptor with publication time outside " - + "our descriptor mapping interval. We might not be able " - + "to repair references."); - this.haveWarnedAboutLimitedMapping = true; - } - this.descriptorPublicationTimes.add(published); - } else { - sb.append(line2 + "\n"); - } - } - br2.close(); - String scrubbedDesc = sb.toString(); - String scrubbedHash = DigestUtils.shaHex(scrubbedDesc); - mapping.extraInfoDescriptorIdentifier = scrubbedHash; - String dyear = published.substring(0, 4); - String dmonth = published.substring(5, 7); - File newFile = new File( - this.sanitizedBridgesDirectory.getAbsolutePath() + "/" - + dyear + "/" + dmonth + "/extra-infos/" - + scrubbedHash.substring(0, 1) + "/" - + scrubbedHash.substring(1, 2) + "/" - + scrubbedHash); - this.logger.finer("Storing extra-info descriptor " - + newFile.getAbsolutePath()); - newFile.getParentFile().mkdirs(); - BufferedWriter bw = new BufferedWriter(new FileWriter( - newFile)); - bw.write(scrubbedDesc); - bw.close(); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not store sanitized " - + "extra-info descriptor.", e); - } - } - - private void rewriteNetworkStatus(File status, String published) { - try { - FileInputStream fis = new FileInputStream(status); - BufferedInputStream bis = new BufferedInputStream(fis); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - int len; - byte[] data2 = new byte[1024]; - while ((len = bis.read(data2, 0, 1024)) >= 0) { - baos.write(data2, 0, len); - } - fis.close(); - byte[] allData = baos.toByteArray(); - this.storeSanitizedNetworkStatus(allData, published); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not rewrite network " - + "status.", e); - } - } - - private void rewriteServerDescriptor(DescriptorMapping mapping) { - try { - String dyear = mapping.published.substring(0, 4); - String dmonth = mapping.published.substring(5, 7); - File serverDescriptorFile = new File( - this.sanitizedBridgesDirectory.getAbsolutePath() + "/" - + dyear + "/" + dmonth + "/server-descriptors/" - + mapping.serverDescriptorIdentifier.substring(0, 1) + "/" - + mapping.serverDescriptorIdentifier.substring(1, 2) + "/" - + mapping.serverDescriptorIdentifier); - FileInputStream fis = new FileInputStream(serverDescriptorFile); - BufferedInputStream bis = new BufferedInputStream(fis); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - int len; - byte[] data2 = new byte[1024]; - while ((len = bis.read(data2, 0, 1024)) >= 0) { - baos.write(data2, 0, len); - } - fis.close(); - byte[] allData = baos.toByteArray(); - this.storeSanitizedServerDescriptor(allData); - serverDescriptorFile.delete(); - this.logger.finer("Deleting server descriptor " - + serverDescriptorFile.getAbsolutePath()); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not rewrite server " - + "descriptor.", e); - } - } - /** * Rewrite all network statuses that might contain references to server * descriptors we added or updated in this execution. This applies to @@ -1374,107 +853,10 @@ public class SanitizedBridgesWriter { */ public void finishWriting() {
- /* Prepare parsing and formatting timestamps. */ - SimpleDateFormat dateTimeFormat = - new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); - dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - SimpleDateFormat statusFileFormat = - new SimpleDateFormat("yyyyMMdd-HHmmss"); - statusFileFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - - /* Iterate over publication timestamps of previously sanitized - * descriptors. For every publication timestamp, we want to re-write - * the network statuses that we published up to 24 hours after that - * descriptor. We keep the timestamp of the last re-written network - * status in order to make sure we re-writing any network status at - * most once. */ - this.logger.fine("Rewriting network statuses that might have " - + "changed."); - String lastDescriptorPublishedPlus24Hours = "1970-01-01 00:00:00"; - for (String published : this.descriptorPublicationTimes) { - if (published.compareTo(lastDescriptorPublishedPlus24Hours) <= 0) { - continue; - } - // find statuses 24 hours after published - SortedSet<File> statusesToRewrite = new TreeSet<File>(); - long publishedTime; - try { - publishedTime = dateTimeFormat.parse(published).getTime(); - } catch (ParseException e) { - this.logger.log(Level.WARNING, "Could not parse publication " - + "timestamp '" + published + "'. Skipping.", e); - continue; - } - String[] dayOne = dateFormat.format(publishedTime).split("-"); - - File publishedDayOne = new File( - this.sanitizedBridgesDirectory.getAbsolutePath() + "/" - + dayOne[0] + "/" + dayOne[1] + "/statuses/" + dayOne[2]); - if (publishedDayOne.exists()) { - statusesToRewrite.addAll(Arrays.asList(publishedDayOne. - listFiles())); - } - long plus24Hours = publishedTime + 24L * 60L * 60L * 1000L; - lastDescriptorPublishedPlus24Hours = dateFormat.format(plus24Hours); - String[] dayTwo = dateFormat.format(plus24Hours).split("-"); - File publishedDayTwo = new File( - this.sanitizedBridgesDirectory.getAbsolutePath() + "/" - + dayTwo[0] + "/" + dayTwo[1] + "/statuses/" + dayTwo[2]); - if (publishedDayTwo.exists()) { - statusesToRewrite.addAll(Arrays.asList(publishedDayTwo. - listFiles())); - } - for (File status : statusesToRewrite) { - String statusPublished = status.getName().substring(0, 15); - long statusTime; - try { - statusTime = statusFileFormat.parse(statusPublished).getTime(); - } catch (ParseException e) { - this.logger.log(Level.WARNING, "Could not parse network " - + "status publication timestamp '" + published - + "'. Skipping.", e); - continue; - } - if (statusTime < publishedTime || statusTime > plus24Hours) { - continue; - } - this.rewriteNetworkStatus(status, - dateTimeFormat.format(statusTime)); - } - } - this.logger.fine("Finished rewriting network statuses."); - - /* Write descriptor mappings to disk. */ - try { - this.logger.fine("Writing descriptor mappings to disk."); - BufferedWriter bw = new BufferedWriter(new FileWriter( - this.bridgeDescriptorMappingsFile)); - int wrote = 0, skipped = 0; - for (DescriptorMapping mapping : - this.bridgeDescriptorMappings.values()) { - String mappingString = mapping.toString(); - if (this.bridgeDescriptorMappingsCutOffTimestamp. - compareTo(mappingString.split(",")[1]) > 0) { - skipped++; - } else { - wrote++; - bw.write(mapping.toString() + "\n"); - } - } - bw.close(); - this.logger.fine("Finished writing " + wrote + " descriptor " - + "mappings to disk, skipped " + skipped + "."); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not write descriptor " - + "mappings to disk.", e); - } - /* Delete secrets that we don't need anymore. */ if (!this.secretsForHashingIPAddresses.isEmpty() && this.secretsForHashingIPAddresses.firstKey().compareTo( - this.bridgeDescriptorMappingsCutOffTimestamp) < 0) { + this.bridgeSanitizingCutOffTimestamp) < 0) { try { int kept = 0, deleted = 0; BufferedWriter bw = new BufferedWriter(new FileWriter( @@ -1482,7 +864,7 @@ public class SanitizedBridgesWriter { for (Map.Entry<String, byte[]> e : this.secretsForHashingIPAddresses.entrySet()) { if (e.getKey().compareTo( - this.bridgeDescriptorMappingsCutOffTimestamp) < 0) { + this.bridgeSanitizingCutOffTimestamp) < 0) { deleted++; } else { bw.write(e.getKey() + "," + Hex.encodeHexString(e.getValue())
tor-commits@lists.torproject.org