commit ff9ec67b046ce37a1d06834edb3bd980bce98a9c Author: Karsten Loesing karsten.loesing@gmx.net Date: Mon Jan 11 09:27:57 2016 +0100
Stop updating deprecated connbidirect.csv.
This removes another psql dependency. --- modules/legacy/db/tordir.sql | 19 -- .../legacy/src/org/torproject/ernie/cron/Main.java | 13 - .../cron/performance/PerformanceStatsImporter.java | 271 -------------------- shared/bin/50-run-legacy-stats.sh | 1 - website/web/WEB-INF/connbidirect-data.jsp | 1 + 5 files changed, 1 insertion(+), 304 deletions(-)
diff --git a/modules/legacy/db/tordir.sql b/modules/legacy/db/tordir.sql index 2c97829..04d31c0 100644 --- a/modules/legacy/db/tordir.sql +++ b/modules/legacy/db/tordir.sql @@ -104,19 +104,6 @@ CREATE TABLE consensus ( CONSTRAINT consensus_pkey PRIMARY KEY (validafter) );
--- TABLE connbidirect --- Contain conn-bi-direct stats strings -CREATE TABLE connbidirect ( - source CHARACTER(40) NOT NULL, - statsend TIMESTAMP WITHOUT TIME ZONE NOT NULL, - seconds INTEGER NOT NULL, - belownum BIGINT NOT NULL, - readnum BIGINT NOT NULL, - writenum BIGINT NOT NULL, - bothnum BIGINT NOT NULL, - CONSTRAINT connbidirect_pkey PRIMARY KEY (source, statsend) -); - -- TABLE network_size CREATE TABLE network_size ( date DATE NOT NULL, @@ -952,9 +939,3 @@ UNION ALL current_date - 3) ORDER BY 1, 2, 3;
--- View for exporting connbidirect statistics. -CREATE VIEW stats_connbidirect AS -SELECT DATE(statsend) AS date, source, belownum AS below, readnum AS read, - writenum AS write, bothnum AS "both" FROM connbidirect - WHERE DATE(statsend) < current_date - 1 ORDER BY 1, 2; - diff --git a/modules/legacy/src/org/torproject/ernie/cron/Main.java b/modules/legacy/src/org/torproject/ernie/cron/Main.java index 9bd2d34..fb0697c 100644 --- a/modules/legacy/src/org/torproject/ernie/cron/Main.java +++ b/modules/legacy/src/org/torproject/ernie/cron/Main.java @@ -6,7 +6,6 @@ import java.io.File; import java.util.logging.Logger;
import org.torproject.ernie.cron.network.ConsensusStatsFileHandler; -import org.torproject.ernie.cron.performance.PerformanceStatsImporter; import org.torproject.ernie.cron.performance.TorperfProcessor;
/** @@ -53,18 +52,6 @@ public class Main { rddi.importRelayDescriptors(); } rddi.closeConnection(); - - // Import conn-bi-direct statistics. - PerformanceStatsImporter psi = new PerformanceStatsImporter( - config.getWriteRelayDescriptorDatabase() ? - config.getRelayDescriptorDatabaseJDBC() : null, - config.getWriteRelayDescriptorsRawFiles() ? - config.getRelayDescriptorRawFilesDirectory() : null, - new File(config.getDirectoryArchivesDirectory()), - statsDirectory, - config.getKeepDirectoryArchiveImportHistory()); - psi.importRelayDescriptors(); - psi.closeConnection(); }
// Prepare consensus stats file handler (used for stats on running diff --git a/modules/legacy/src/org/torproject/ernie/cron/performance/PerformanceStatsImporter.java b/modules/legacy/src/org/torproject/ernie/cron/performance/PerformanceStatsImporter.java deleted file mode 100644 index 815b37f..0000000 --- a/modules/legacy/src/org/torproject/ernie/cron/performance/PerformanceStatsImporter.java +++ /dev/null @@ -1,271 +0,0 @@ -/* Copyright 2012 The Tor Project - * See LICENSE for licensing information */ -package org.torproject.ernie.cron.performance; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Iterator; -import java.util.TimeZone; -import java.util.logging.Level; -import java.util.logging.Logger; - -import org.torproject.descriptor.Descriptor; -import org.torproject.descriptor.DescriptorFile; -import org.torproject.descriptor.DescriptorReader; -import org.torproject.descriptor.DescriptorSourceFactory; -import org.torproject.descriptor.ExtraInfoDescriptor; - -public class PerformanceStatsImporter { - - /** - * How many records to commit with each database transaction. - */ - private final long autoCommitCount = 500; - - /** - * Keep track of the number of records committed before each transaction - */ - private int rbsCount = 0; - - /** - * Relay descriptor database connection. - */ - private Connection conn; - - /** - * Prepared statement to check whether a given conn-bi-direct stats - * string has been imported into the database before. - */ - private PreparedStatement psBs; - - /** - * Prepared statement to insert a conn-bi-direct stats string into the - * database. - */ - private PreparedStatement psB; - - /** - * Logger for this class. - */ - private Logger logger; - - /** - * Directory for writing raw import files. - */ - private String rawFilesDirectory; - - /** - * Raw import file containing conn-bi-direct stats strings. - */ - private BufferedWriter connBiDirectOut; - - /** - * Date format to parse timestamps. - */ - private SimpleDateFormat dateTimeFormat; - - private boolean importIntoDatabase; - private boolean writeRawImportFiles; - - private File archivesDirectory; - private File statsDirectory; - private boolean keepImportHistory; - - /** - * Initialize database importer by connecting to the database and - * preparing statements. - */ - public PerformanceStatsImporter(String connectionURL, - String rawFilesDirectory, File archivesDirectory, - File statsDirectory, boolean keepImportHistory) { - - if (archivesDirectory == null || - statsDirectory == null) { - throw new IllegalArgumentException(); - } - this.archivesDirectory = archivesDirectory; - this.statsDirectory = statsDirectory; - this.keepImportHistory = keepImportHistory; - - /* Initialize logger. */ - this.logger = Logger.getLogger( - PerformanceStatsImporter.class.getName()); - - if (connectionURL != null) { - try { - /* Connect to database. */ - this.conn = DriverManager.getConnection(connectionURL); - - /* Turn autocommit off */ - this.conn.setAutoCommit(false); - - /* Prepare statements. */ - this.psBs = conn.prepareStatement("SELECT COUNT(*) " - + "FROM connbidirect WHERE source = ? AND statsend = ?"); - this.psB = conn.prepareStatement("INSERT INTO connbidirect " - + "(source, statsend, seconds, belownum, readnum, writenum, " - + "bothnum) VALUES (?, ?, ?, ?, ?, ?, ?)"); - this.importIntoDatabase = true; - } catch (SQLException e) { - this.logger.log(Level.WARNING, "Could not connect to database or " - + "prepare statements.", e); - } - } - - /* Remember where we want to write raw import files. */ - if (rawFilesDirectory != null) { - this.rawFilesDirectory = rawFilesDirectory; - this.writeRawImportFiles = true; - } - - /* Initialize date format, so that we can format timestamps. */ - this.dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - this.dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - } - - /** - * Insert a conn-bi-direct stats string into the database. - */ - private void addConnBiDirect(String source, long statsEndMillis, - long seconds, long below, long read, long write, long both) { - String statsEnd = this.dateTimeFormat.format(statsEndMillis); - if (this.importIntoDatabase) { - try { - Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); - Timestamp statsEndTimestamp = new Timestamp(statsEndMillis); - this.psBs.setString(1, source); - this.psBs.setTimestamp(2, statsEndTimestamp, cal); - ResultSet rs = psBs.executeQuery(); - rs.next(); - if (rs.getInt(1) == 0) { - this.psB.clearParameters(); - this.psB.setString(1, source); - this.psB.setTimestamp(2, statsEndTimestamp, cal); - this.psB.setLong(3, seconds); - this.psB.setLong(4, below); - this.psB.setLong(5, read); - this.psB.setLong(6, write); - this.psB.setLong(7, both); - this.psB.executeUpdate(); - rbsCount++; - if (rbsCount % autoCommitCount == 0) { - this.conn.commit(); - } - } - } catch (SQLException e) { - this.logger.log(Level.WARNING, "Could not add conn-bi-direct " - + "stats string. We won't make any further SQL requests in " - + "this execution.", e); - this.importIntoDatabase = false; - } - } - if (this.writeRawImportFiles) { - try { - if (this.connBiDirectOut == null) { - new File(rawFilesDirectory).mkdirs(); - this.connBiDirectOut = new BufferedWriter(new FileWriter( - rawFilesDirectory + "/connbidirect.sql")); - this.connBiDirectOut.write(" COPY connbidirect (source, " - + "statsend, seconds, belownum, readnum, writenum, " - + "bothnum) FROM stdin;\n"); - } - this.connBiDirectOut.write(source + "\t" + statsEnd + "\t" - + seconds + "\t" + below + "\t" + read + "\t" + write + "\t" - + both + "\n"); - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not write conn-bi-direct " - + "stats string to raw database import file. We won't make " - + "any further attempts to write raw import files in this " - + "execution.", e); - this.writeRawImportFiles = false; - } - } - } - - public void importRelayDescriptors() { - if (archivesDirectory.exists()) { - logger.fine("Importing files in directory " + archivesDirectory - + "/..."); - DescriptorReader reader = - DescriptorSourceFactory.createDescriptorReader(); - reader.addDirectory(archivesDirectory); - if (keepImportHistory) { - reader.setExcludeFiles(new File(statsDirectory, - "performance-stats-relay-descriptor-history")); - } - Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); - while (descriptorFiles.hasNext()) { - DescriptorFile descriptorFile = descriptorFiles.next(); - if (descriptorFile.getDescriptors() != null) { - for (Descriptor descriptor : descriptorFile.getDescriptors()) { - if (descriptor instanceof ExtraInfoDescriptor) { - this.addExtraInfoDescriptor( - (ExtraInfoDescriptor) descriptor); - } - } - } - } - } - - logger.info("Finished importing relay descriptors."); - } - - private void addExtraInfoDescriptor(ExtraInfoDescriptor descriptor) { - if (descriptor.getConnBiDirectStatsEndMillis() >= 0L) { - this.addConnBiDirect(descriptor.getFingerprint(), - descriptor.getConnBiDirectStatsEndMillis(), - descriptor.getConnBiDirectStatsIntervalLength(), - descriptor.getConnBiDirectBelow(), - descriptor.getConnBiDirectRead(), - descriptor.getConnBiDirectWrite(), - descriptor.getConnBiDirectBoth()); - } - } - - /** - * Close the relay descriptor database connection. - */ - public void closeConnection() { - - /* Log stats about imported descriptors. */ - this.logger.info(String.format("Finished importing relay " - + "descriptors: %d conn-bi-direct stats lines", rbsCount)); - - /* Commit any stragglers before closing. */ - if (this.conn != null) { - try { - this.conn.commit(); - } catch (SQLException e) { - this.logger.log(Level.WARNING, "Could not commit final records " - + "to database", e); - } - try { - this.conn.close(); - } catch (SQLException e) { - this.logger.log(Level.WARNING, "Could not close database " - + "connection.", e); - } - } - - /* Close raw import files. */ - try { - if (this.connBiDirectOut != null) { - this.connBiDirectOut.write("\.\n"); - this.connBiDirectOut.close(); - } - } catch (IOException e) { - this.logger.log(Level.WARNING, "Could not close one or more raw " - + "database import files.", e); - } - } -} diff --git a/shared/bin/50-run-legacy-stats.sh b/shared/bin/50-run-legacy-stats.sh index 75fe66c..0fd5699 100755 --- a/shared/bin/50-run-legacy-stats.sh +++ b/shared/bin/50-run-legacy-stats.sh @@ -5,6 +5,5 @@ psql -U metrics tordir -c 'SELECT * FROM refresh_all();' mkdir -p stats psql -c 'COPY (SELECT * FROM stats_servers) TO STDOUT WITH CSV HEADER;' tordir > stats/servers.csv psql -c 'COPY (SELECT * FROM stats_bandwidth) TO STDOUT WITH CSV HEADER;' tordir > stats/bandwidth.csv -psql -c 'COPY (SELECT * FROM stats_connbidirect) TO STDOUT WITH CSV HEADER;' tordir > stats/connbidirect.csv cd ../../
diff --git a/website/web/WEB-INF/connbidirect-data.jsp b/website/web/WEB-INF/connbidirect-data.jsp index 8c32fc5..1be8831 100644 --- a/website/web/WEB-INF/connbidirect-data.jsp +++ b/website/web/WEB-INF/connbidirect-data.jsp @@ -18,6 +18,7 @@ <p><font color="red">As of August 25, 2015, this page and the linked data file have been replaced by <a href="connbidirect2-data.html">this page and the data file linked from there</a>. +Starting on January 11, 2016, the linked data file is not updated anymore. This page and the linked data file will be removed in the future.</font></p>