tor-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
May 2018
- 17 participants
- 1514 discussions

[metrics-web/master] Avoid underscores in write_* function parameters.
by karsten@torproject.org 29 May '18
by karsten@torproject.org 29 May '18
29 May '18
commit 3f1cafdb5c4dffecbeab2a6688218d91f35c79ce
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu May 10 21:30:54 2018 +0200
Avoid underscores in write_* function parameters.
We added underscores to parameters in write_* functions when they
would otherwise conflict with columns in the processed data. For
example, if a graph supports a `country` parameter and the data also
contains a `country` column, dplyr/tidyr won't know which `country` we
mean. That's why we renamed the parameter to `country_`.
However, we're soon going to make parameters optional, and if R
receives a couple of parameters of which one has the name `country`,
it can't match that to its `country_` parameter. We need to change the
parameter back to `country` for this to work, which conflicts with the
issue we were fixing earlier.
Turns out there's a way to use the same name for parameter and data
column: whenever we want to use the parameter, we use the
quasiquotation operator `!!` which evaluates its argument early and
inlines the result; and whenever we want to refer to the data column,
we just refer to it by name, without that operator.
Prepares #25383.
---
src/main/R/rserver/graphs.R | 26 +++++++++++++-------------
1 file changed, 13 insertions(+), 13 deletions(-)
diff --git a/src/main/R/rserver/graphs.R b/src/main/R/rserver/graphs.R
index 2ac2756..ebb8c80 100644
--- a/src/main/R/rserver/graphs.R
+++ b/src/main/R/rserver/graphs.R
@@ -629,14 +629,14 @@ plot_torperf <- function(start, end, source, server, filesize, path) {
# harder than for other functions, because plot_torperf uses different
# colours based on which sources exist, unrelated to which source is
# plotted. Left as future work.
-write_torperf <- function(start, end, source_, server_, filesize_, path) {
+write_torperf <- function(start, end, source, server, filesize, path) {
read.csv(paste(stats_dir, "torperf-1.1.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
filter(date >= as.Date(start), date <= as.Date(end),
- filesize == ifelse(filesize_ == "50kb", 50 * 1024,
- ifelse(filesize_ == "1mb", 1024 * 1024, 5 * 1024 * 1024)),
- source == ifelse(source_ == "all", "", source_),
- server == server_) %>%
+ filesize == ifelse(!!filesize == "50kb", 50 * 1024,
+ ifelse(!!filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024)),
+ source == ifelse(!!source == "all", "", !!source),
+ server == !!server) %>%
transmute(date, q1 = q1 / 1e3, md = md / 1e3, q3 = q3 / 1e3) %>%
write.csv(path, quote = FALSE, row.names = FALSE)
}
@@ -921,18 +921,18 @@ plot_userstats_bridge_version <- function(start, end, version, path) {
plot_userstats(start, end, "bridge", "version", version, "off", path)
}
-write_userstats_relay_country <- function(start, end, country_, events,
+write_userstats_relay_country <- function(start, end, country, events,
path) {
load(paste(rdata_dir, "clients-relay.RData", sep = ""))
u <- data %>%
filter(date >= as.Date(start), date <= as.Date(end),
- country == ifelse(country_ == "all", "", country_), transport == "",
+ country == ifelse(!!country == "all", "", !!country), transport == "",
version == "")
- if (country_ != "all" && events == "on") {
+ if (country != "all" && events == "on") {
u <- u %>%
mutate(downturns = clients < u$lower, upturns = clients > upper) %>%
select(date, clients, downturns, upturns, lower, upper)
- } else if (country_ != "all" && events != "off") {
+ } else if (country != "all" && events != "off") {
u <- u %>%
mutate(downturns = clients < u$lower, upturns = clients > upper) %>%
select(date, clients, downturns, upturns)
@@ -945,11 +945,11 @@ write_userstats_relay_country <- function(start, end, country_, events,
write.csv(path, quote = FALSE, row.names = FALSE)
}
-write_userstats_bridge_country <- function(start, end, country_, path) {
+write_userstats_bridge_country <- function(start, end, country, path) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
data %>%
filter(date >= as.Date(start), date <= as.Date(end),
- country == ifelse(country_ == "all", "", country_), transport == "",
+ country == ifelse(!!country == "all", "", !!country), transport == "",
version == "") %>%
select(date, clients) %>%
rename(users = clients) %>%
@@ -982,11 +982,11 @@ write_userstats_bridge_transport <- function(start, end, transports, path) {
write.csv(path, quote = FALSE, row.names = FALSE)
}
-write_userstats_bridge_version <- function(start, end, version_, path) {
+write_userstats_bridge_version <- function(start, end, version, path) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
data %>%
filter(date >= as.Date(start), date <= as.Date(end),
- country == "", transport == "", version == version_) %>%
+ country == "", transport == "", version == !!version) %>%
select(date, clients) %>%
rename(users = clients) %>%
write.csv(path, quote = FALSE, row.names = FALSE)
1
0

[metrics-web/master] Make all parameters in write_* functions optional.
by karsten@torproject.org 29 May '18
by karsten@torproject.org 29 May '18
29 May '18
commit 167e72b5a06cec3753f7f952fb3e3247bae943a5
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Fri May 11 11:54:45 2018 +0200
Make all parameters in write_* functions optional.
We now permit parameters in write_* functions to be omitted. The
effect is that we're not filtering if a parameter is missing, thus
producing a CSV file with more rows.
At the same time we're adding columns for data that was previously
pre-determined by parameter values. For example, if a user specified a
given country in a parameter, we didn't have to include a country
column containing only that country. Now we need to put that column
back.
Implements #25383.
---
src/main/R/rserver/graphs.R | 369 ++++++++++++---------
.../torproject/metrics/web/RObjectGenerator.java | 4 +
2 files changed, 222 insertions(+), 151 deletions(-)
diff --git a/src/main/R/rserver/graphs.R b/src/main/R/rserver/graphs.R
index ebb8c80..a9b7fc7 100644
--- a/src/main/R/rserver/graphs.R
+++ b/src/main/R/rserver/graphs.R
@@ -351,8 +351,13 @@ robust_call <- function(wrappee, filename) {
prepare_networksize <- function(start, end) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end), flag == "",
- country == "", version == "", platform == "", ec2bridge == "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(flag == "") %>%
+ filter(country == "") %>%
+ filter(version == "") %>%
+ filter(platform == "") %>%
+ filter(ec2bridge == "") %>%
select(date, relays, bridges)
}
@@ -373,16 +378,21 @@ plot_networksize <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_networksize <- function(start, end, path) {
+write_networksize <- function(start = NULL, end = NULL, path) {
prepare_networksize(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_versions <- function(start, end) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end), flag == "",
- country == "", version != "", platform == "", ec2bridge == "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(flag == "") %>%
+ filter(country == "") %>%
+ filter(version != "") %>%
+ filter(platform == "") %>%
+ filter(ec2bridge == "") %>%
select(date, version, relays)
}
@@ -411,17 +421,22 @@ plot_versions <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_versions <- function(start, end, path) {
+write_versions <- function(start = NULL, end = NULL, path) {
prepare_versions(start, end) %>%
spread(key = "version", value = "relays", fill = 0) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_platforms <- function(start, end) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end), flag == "",
- country == "", version == "", platform != "", ec2bridge == "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(flag == "") %>%
+ filter(country == "") %>%
+ filter(version == "") %>%
+ filter(platform != "") %>%
+ filter(ec2bridge == "") %>%
select(date, platform, relays) %>%
mutate(platform = ifelse(platform == "Darwin", "macOS",
as.character(platform)))
@@ -442,17 +457,19 @@ plot_platforms <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_platforms <- function(start, end, path) {
+write_platforms <- function(start = NULL, end = NULL, path) {
prepare_platforms(start, end) %>%
spread(platform, relays) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_bandwidth <- function(start, end) {
read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end), isexit != "",
- isguard != "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(isexit != "") %>%
+ filter(isguard != "") %>%
group_by(date) %>%
summarize(advbw = sum(advbw) * 8 / 1e9,
bwhist = sum(bwread + bwwrite) * 8 / 2e9) %>%
@@ -477,16 +494,18 @@ plot_bandwidth <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_bandwidth <- function(start, end, path) {
+write_bandwidth <- function(start = NULL, end = NULL, path) {
prepare_bandwidth(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_bwhist_flags <- function(start, end) {
read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end), isexit != "",
- isguard != "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(isexit != "") %>%
+ filter(isguard != "") %>%
mutate(variable = ifelse(isexit == "t",
ifelse(isguard == "t", "guard_and_exit", "exit_only"),
ifelse(isguard == "t", "guard_only", "middle_only")),
@@ -514,17 +533,19 @@ plot_bwhist_flags <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_bwhist_flags <- function(start, end, path) {
+write_bwhist_flags <- function(start = NULL, end = NULL, path) {
prepare_bwhist_flags(start, end) %>%
spread(variable, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_dirbytes <- function(start, end, path) {
read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end), isexit == "",
- isguard == "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(isexit == "") %>%
+ filter(isguard == "") %>%
mutate(dirread = dirread * 8 / 1e9,
dirwrite = dirwrite * 8 / 1e9) %>%
select(date, dirread, dirwrite)
@@ -548,18 +569,22 @@ plot_dirbytes <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_dirbytes <- function(start, end, path) {
+write_dirbytes <- function(start = NULL, end = NULL, path) {
prepare_dirbytes(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_relayflags <- function(start, end, flags) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end), country == "",
- version == "", platform == "", ec2bridge == "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(country == "") %>%
+ filter(version == "") %>%
+ filter(platform == "") %>%
+ filter(ec2bridge == "") %>%
mutate(flag = ifelse(flag == "", "Running", as.character(flag))) %>%
- filter(flag %in% flags) %>%
+ filter(if (!is.null(flags)) flag %in% flags else TRUE) %>%
select(date, flag, relays)
}
@@ -579,11 +604,11 @@ plot_relayflags <- function(start, end, flags, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_relayflags <- function(start, end, flags, path) {
+write_relayflags <- function(start = NULL, end = NULL, flags = NULL, path) {
prepare_relayflags(start, end, flags) %>%
mutate(flag = tolower(flag)) %>%
spread(flag, relays) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
plot_torperf <- function(start, end, source, server, filesize, path) {
@@ -629,28 +654,39 @@ plot_torperf <- function(start, end, source, server, filesize, path) {
# harder than for other functions, because plot_torperf uses different
# colours based on which sources exist, unrelated to which source is
# plotted. Left as future work.
-write_torperf <- function(start, end, source, server, filesize, path) {
+write_torperf <- function(start = NULL, end = NULL, source = NULL,
+ server = NULL, filesize = NULL, path) {
read.csv(paste(stats_dir, "torperf-1.1.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- filesize == ifelse(!!filesize == "50kb", 50 * 1024,
- ifelse(!!filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024)),
- source == ifelse(!!source == "all", "", !!source),
- server == !!server) %>%
- transmute(date, q1 = q1 / 1e3, md = md / 1e3, q3 = q3 / 1e3) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(!!source))
+ source == ifelse(!!source == "all", "", !!source) else TRUE) %>%
+ filter(if (!is.null(!!server)) server == !!server else TRUE) %>%
+ filter(if (!is.null(!!filesize))
+ filesize == ifelse(!!filesize == "50kb", 50 * 1024,
+ ifelse(!!filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024)) else
+ TRUE) %>%
+ transmute(date, filesize, source, server, q1 = q1 / 1e3, md = md / 1e3,
+ q3 = q3 / 1e3) %>%
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_torperf_failures <- function(start, end, source, server, filesize) {
- filesize_val <- ifelse(filesize == "50kb", 50 * 1024,
- ifelse(filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024))
- t <- read.csv(paste(stats_dir, "torperf-1.1.csv", sep = ""),
- colClasses = c("date" = "Date"))
- t[t$date >= start & t$date <= end & t$filesize == filesize_val &
- t$source == ifelse(source == "all", "", source) &
- t$server == server & t$requests > 0, ] %>%
- transmute(date, timeouts = timeouts / requests,
- failures = failures / requests)
+ read.csv(paste(stats_dir, "torperf-1.1.csv", sep = ""),
+ colClasses = c("date" = "Date")) %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(!!filesize))
+ filesize == ifelse(!!filesize == "50kb", 50 * 1024,
+ ifelse(!!filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024)) else
+ TRUE) %>%
+ filter(if (!is.null(!!source))
+ source == ifelse(!!source == "all", "", !!source) else TRUE) %>%
+ filter(if (!is.null(!!server)) server == !!server else TRUE) %>%
+ filter(requests > 0) %>%
+ transmute(date, filesize, source, server, timeouts = timeouts / requests,
+ failures = failures / requests)
}
plot_torperf_failures <- function(start, end, source, server, filesize, path) {
@@ -675,15 +711,17 @@ plot_torperf_failures <- function(start, end, source, server, filesize, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_torperf_failures <- function(start, end, source, server, filesize, path) {
+write_torperf_failures <- function(start = NULL, end = NULL, source = NULL,
+ server = NULL, filesize = NULL, path) {
prepare_torperf_failures(start, end, source, server, filesize) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_connbidirect <- function(start, end) {
read.csv(paste(stats_dir, "connbidirect2.csv", sep = ""),
colClasses = c("date" = "Date", "direction" = "factor")) %>%
- filter(date >= as.Date(start), date <= as.Date(end)) %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
mutate(quantile = paste("X", quantile, sep = ""),
fraction = fraction / 100) %>%
spread(quantile, fraction)
@@ -712,20 +750,23 @@ plot_connbidirect <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_connbidirect <- function(start, end, path) {
+write_connbidirect <- function(start = NULL, end = NULL, path) {
prepare_connbidirect(start, end) %>%
rename(q1 = X0.25, md = X0.5, q3 = X0.75) %>%
gather(variable, value, -(date:direction)) %>%
unite(temp, direction, variable) %>%
spread(temp, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_bandwidth_flags <- function(start, end) {
b <- read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date"))
- b <- b[b$date >= start & b$date <= end & b$isexit != "" &
- b$isguard != "", ]
+ b <- b %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(isexit != "") %>%
+ filter(isguard != "")
b <- data.frame(date = b$date,
isexit = b$isexit == "t", isguard = b$isguard == "t",
advbw = b$advbw * 8 / 1e9,
@@ -770,10 +811,10 @@ plot_bandwidth_flags <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_bandwidth_flags <- function(start, end, path) {
+write_bandwidth_flags <- function(start = NULL, end = NULL, path) {
prepare_bandwidth_flags(start, end) %>%
spread(variable, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
plot_userstats <- function(start, end, node, variable, value, events,
@@ -921,48 +962,48 @@ plot_userstats_bridge_version <- function(start, end, version, path) {
plot_userstats(start, end, "bridge", "version", version, "off", path)
}
-write_userstats_relay_country <- function(start, end, country, events,
- path) {
+write_userstats_relay_country <- function(start = NULL, end = NULL,
+ country = NULL, events = NULL, path) {
load(paste(rdata_dir, "clients-relay.RData", sep = ""))
u <- data %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- country == ifelse(!!country == "all", "", !!country), transport == "",
- version == "")
- if (country != "all" && events == "on") {
- u <- u %>%
- mutate(downturns = clients < u$lower, upturns = clients > upper) %>%
- select(date, clients, downturns, upturns, lower, upper)
- } else if (country != "all" && events != "off") {
- u <- u %>%
- mutate(downturns = clients < u$lower, upturns = clients > upper) %>%
- select(date, clients, downturns, upturns)
- } else {
- u <- u %>%
- select(date, clients)
- }
- u %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(!!country))
+ country == ifelse(!!country == "all", "", !!country) else TRUE) %>%
+ filter(transport == "") %>%
+ filter(version == "") %>%
+ mutate(downturns = clients < lower, upturns = clients > upper) %>%
+ select(date, country, clients, downturns, upturns, lower, upper) %>%
rename(users = clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
-write_userstats_bridge_country <- function(start, end, country, path) {
+write_userstats_bridge_country <- function(start = NULL, end = NULL,
+ country = NULL, path) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
data %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- country == ifelse(!!country == "all", "", !!country), transport == "",
- version == "") %>%
- select(date, clients) %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(!!country))
+ country == ifelse(!!country == "all", "", !!country) else TRUE) %>%
+ filter(transport == "") %>%
+ filter(version == "") %>%
+ select(date, country, clients) %>%
rename(users = clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
-write_userstats_bridge_transport <- function(start, end, transports, path) {
+write_userstats_bridge_transport <- function(start = NULL, end = NULL,
+ transports = NULL, path) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
u <- data %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- country == "", version == "", transport != "") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(country == "") %>%
+ filter(version == "") %>%
+ filter(transport != "") %>%
select(date, transport, clients)
- if ("!<OR>" %in% transports) {
+ if (is.null(transports) || "!<OR>" %in% transports) {
n <- u %>%
filter(transport != "<OR>") %>%
group_by(date) %>%
@@ -971,7 +1012,7 @@ write_userstats_bridge_transport <- function(start, end, transports, path) {
clients = n$clients))
}
u %>%
- filter(transport %in% transports) %>%
+ filter(if (!is.null(transports)) transport %in% transports else TRUE) %>%
mutate(transport = ifelse(transport == "<OR>", "default_or_protocol",
ifelse(transport == "!<OR>", "any_pt",
ifelse(transport == "<??>", "unknown_pluggable_transports",
@@ -979,38 +1020,41 @@ write_userstats_bridge_transport <- function(start, end, transports, path) {
group_by(date, transport) %>%
select(date, transport, clients) %>%
spread(transport, clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
-write_userstats_bridge_version <- function(start, end, version, path) {
+write_userstats_bridge_version <- function(start = NULL, end = NULL,
+ version = NULL, path) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
data %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- country == "", transport == "", version == !!version) %>%
- select(date, clients) %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(country == "") %>%
+ filter(transport == "") %>%
+ filter(if (!is.null(!!version)) version == !!version else TRUE) %>%
+ select(date, version, clients) %>%
rename(users = clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_userstats_bridge_combined <- function(start, end, country) {
- top <- 3
- country <- ifelse(country == "all", NA, country)
load(paste(rdata_dir, "userstats-bridge-combined.RData", sep = ""))
- u <- data
- u <- u[u$date >= start & u$date <= end
- & (is.na(country) | u$country == country), ]
- a <- aggregate(list(mid = (u$high + u$low) / 2),
- by = list(transport = u$transport), FUN = sum)
- a <- a[order(a$mid, decreasing = TRUE)[1:top], ]
- u <- u[u$transport %in% a$transport, ]
- u
+ data %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(!!country)) country == !!country else TRUE)
}
plot_userstats_bridge_combined <- function(start, end, country, path) {
if (country == "all") {
plot_userstats_bridge_country(start, end, country, path)
} else {
+ top <- 3
u <- prepare_userstats_bridge_combined(start, end, country)
+ a <- aggregate(list(mid = (u$high + u$low) / 2),
+ by = list(transport = u$transport), FUN = sum)
+ a <- a[order(a$mid, decreasing = TRUE)[1:top], ]
+ u <- u[u$transport %in% a$transport, ]
title <- paste("Bridge users by transport from ",
countryname(country), sep = "")
ggplot(u, aes(x = as.Date(date), ymin = low, ymax = high,
@@ -1028,26 +1072,29 @@ plot_userstats_bridge_combined <- function(start, end, country, path) {
}
}
-write_userstats_bridge_combined <- function(start, end, country, path) {
- if (country == "all") {
+write_userstats_bridge_combined <- function(start = NULL, end = NULL,
+ country = NULL, path) {
+ if (!is.null(country) && country == "all") {
write_userstats_bridge_country(start, end, country, path)
} else {
prepare_userstats_bridge_combined(start, end, country) %>%
- select(date, transport, low, high) %>%
- mutate(transport = ifelse(transport == "<OR>",
- "default_or_protocol", transport)) %>%
+ select(date, country, transport, low, high) %>%
+ mutate(transport = ifelse(transport == "<OR>", "default_or_protocol",
+ ifelse(transport == "<??>", "unknown_transport", transport))) %>%
gather(variable, value, -(date:transport)) %>%
unite(temp, transport, variable) %>%
spread(temp, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
}
prepare_advbwdist_perc <- function(start, end, p) {
read.csv(paste(stats_dir, "advbwdist.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- percentile %in% as.numeric(p)) %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(p)) percentile %in% as.numeric(p) else
+ percentile != "") %>%
transmute(date, percentile = as.factor(percentile),
variable = ifelse(isexit != "t", "all", "exits"),
advbw = advbw * 8 / 1e9)
@@ -1070,18 +1117,20 @@ plot_advbwdist_perc <- function(start, end, p, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_advbwdist_perc <- function(start, end, p, path) {
+write_advbwdist_perc <- function(start = NULL, end = NULL, p = NULL, path) {
prepare_advbwdist_perc(start, end, p) %>%
unite(temp, variable, percentile) %>%
spread(temp, advbw) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_advbwdist_relay <- function(start, end, n) {
read.csv(paste(stats_dir, "advbwdist.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- relay %in% as.numeric(n)) %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(n)) relay %in% as.numeric(n) else
+ relay != "") %>%
transmute(date, relay = as.factor(relay),
variable = ifelse(isexit != "t", "all", "exits"),
advbw = advbw * 8 / 1e9)
@@ -1104,18 +1153,19 @@ plot_advbwdist_relay <- function(start, end, n, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_advbwdist_relay <- function(start, end, n, path) {
+write_advbwdist_relay <- function(start = NULL, end = NULL, n = NULL, path) {
prepare_advbwdist_relay(start, end, n) %>%
unite(temp, variable, relay) %>%
spread(temp, advbw) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_hidserv_dir_onions_seen <- function(start, end) {
read.csv(paste(stats_dir, "hidserv.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- type == "dir-onions-seen") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(type == "dir-onions-seen") %>%
transmute(date = date, onions = ifelse(frac >= 0.01, wiqm, NA))
}
@@ -1131,16 +1181,17 @@ plot_hidserv_dir_onions_seen <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_hidserv_dir_onions_seen <- function(start, end, path) {
+write_hidserv_dir_onions_seen <- function(start = NULL, end = NULL, path) {
prepare_hidserv_dir_onions_seen(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_hidserv_rend_relayed_cells <- function(start, end) {
read.csv(paste(stats_dir, "hidserv.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end),
- type == "rend-relayed-cells") %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(type == "rend-relayed-cells") %>%
transmute(date,
relayed = ifelse(frac >= 0.01, wiqm * 8 * 512 / (86400 * 1e9), NA))
}
@@ -1158,15 +1209,16 @@ plot_hidserv_rend_relayed_cells <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_hidserv_rend_relayed_cells <- function(start, end, path) {
+write_hidserv_rend_relayed_cells <- function(start = NULL, end = NULL, path) {
prepare_hidserv_rend_relayed_cells(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_hidserv_frac_reporting <- function(start, end) {
read.csv(paste(stats_dir, "hidserv.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(date >= as.Date(start), date <= as.Date(end)) %>%
+ filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
select(date, frac, type)
}
@@ -1189,17 +1241,18 @@ plot_hidserv_frac_reporting <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_hidserv_frac_reporting <- function(start, end, path) {
+write_hidserv_frac_reporting <- function(start = NULL, end = NULL, path) {
prepare_hidserv_frac_reporting(start, end) %>%
mutate(type = ifelse(type == "dir-onions-seen", "onions", "relayed")) %>%
spread(type, frac) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_webstats_tb <- function(start, end) {
load(paste(rdata_dir, "webstats-tb.RData", sep = ""))
data %>%
- filter(log_date >= as.Date(start), log_date <= as.Date(end)) %>%
+ filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
mutate(request_type = factor(request_type))
}
@@ -1224,20 +1277,21 @@ plot_webstats_tb <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_webstats_tb <- function(start, end, path) {
+write_webstats_tb <- function(start = NULL, end = NULL, path) {
prepare_webstats_tb(start, end) %>%
rename(date = log_date) %>%
spread(request_type, count) %>%
rename(initial_downloads = tbid, signature_downloads = tbsd,
update_pings = tbup, update_requests = tbur) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_webstats_tb_platform <- function(start, end) {
read.csv(paste(stats_dir, "webstats.csv", sep = ""),
colClasses = c("log_date" = "Date")) %>%
- filter(log_date >= as.Date(start), log_date <= as.Date(end),
- request_type == "tbid") %>%
+ filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
+ filter(request_type == "tbid") %>%
group_by(log_date, platform) %>%
summarize(count = sum(count))
}
@@ -1260,12 +1314,12 @@ plot_webstats_tb_platform <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_webstats_tb_platform <- function(start, end, path) {
+write_webstats_tb_platform <- function(start = NULL, end = NULL, path) {
prepare_webstats_tb_platform(start, end) %>%
rename(date = log_date) %>%
spread(platform, count) %>%
rename(linux = l, macos = m, windows = w) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
plot_webstats_tb_locale <- function(start, end, path) {
@@ -1299,10 +1353,13 @@ plot_webstats_tb_locale <- function(start, end, path) {
# turned out to be a bit harder than for other functions, because
# plot_webstats_tb_locale needs the preliminary data frame e for its
# breaks and labels. Left as future work.
-write_webstats_tb_locale <- function(start, end, path) {
+write_webstats_tb_locale <- function(start = NULL, end = NULL, path) {
d <- read.csv(paste(stats_dir, "webstats.csv", sep = ""),
colClasses = c("log_date" = "Date", "locale" = "character"))
- d <- d[d$log_date >= start & d$log_date <= end & d$request_type == "tbid", ]
+ d <- d %>%
+ filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
+ filter(request_type == "tbid")
e <- d
e <- aggregate(list(count = e$count), by = list(locale = e$locale), FUN = sum)
e <- e[order(e$count, decreasing = TRUE), ]
@@ -1313,13 +1370,14 @@ write_webstats_tb_locale <- function(start, end, path) {
mutate(locale = tolower(locale)) %>%
rename(date = log_date) %>%
spread(locale, count) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_webstats_tm <- function(start, end) {
load(paste(rdata_dir, "webstats-tm.RData", sep = ""))
data %>%
- filter(log_date >= as.Date(start), log_date <= as.Date(end)) %>%
+ filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
mutate(request_type = factor(request_type))
}
@@ -1342,19 +1400,22 @@ plot_webstats_tm <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_webstats_tm <- function(start, end, path) {
+write_webstats_tm <- function(start = NULL, end = NULL, path) {
prepare_webstats_tm(start, end) %>%
rename(date = log_date) %>%
spread(request_type, count) %>%
rename(initial_downloads = tmid, update_pings = tmup) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_relays_ipv6 <- function(start, end) {
read.csv(paste(stats_dir, "ipv6servers.csv", sep = ""),
colClasses = c("valid_after_date" = "Date")) %>%
- filter(valid_after_date >= as.Date(start),
- valid_after_date <= as.Date(end), server == "relay") %>%
+ filter(if (!is.null(start))
+ valid_after_date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end))
+ valid_after_date <= as.Date(end) else TRUE) %>%
+ filter(server == "relay") %>%
group_by(valid_after_date) %>%
summarize(total = sum(server_count_sum_avg),
announced = sum(server_count_sum_avg[announced_ipv6 == "t"]),
@@ -1382,18 +1443,21 @@ plot_relays_ipv6 <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_relays_ipv6 <- function(start, end, path) {
+write_relays_ipv6 <- function(start = NULL, end = NULL, path) {
prepare_relays_ipv6(start, end) %>%
rename(date = valid_after_date) %>%
spread(category, count) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_bridges_ipv6 <- function(start, end) {
read.csv(paste(stats_dir, "ipv6servers.csv", sep = ""),
colClasses = c("valid_after_date" = "Date")) %>%
- filter(valid_after_date >= as.Date(start),
- valid_after_date <= as.Date(end), server == "bridge") %>%
+ filter(if (!is.null(start))
+ valid_after_date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end))
+ valid_after_date <= as.Date(end) else TRUE) %>%
+ filter(server == "bridge") %>%
group_by(valid_after_date) %>%
summarize(total = sum(server_count_sum_avg),
announced = sum(server_count_sum_avg[announced_ipv6 == "t"])) %>%
@@ -1417,18 +1481,21 @@ plot_bridges_ipv6 <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_bridges_ipv6 <- function(start, end, path) {
+write_bridges_ipv6 <- function(start = NULL, end = NULL, path) {
prepare_bridges_ipv6(start, end) %>%
rename(date = valid_after_date) %>%
spread(category, count) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
prepare_advbw_ipv6 <- function(start, end) {
read.csv(paste(stats_dir, "ipv6servers.csv", sep = ""),
colClasses = c("valid_after_date" = "Date")) %>%
- filter(valid_after_date >= as.Date(start),
- valid_after_date <= as.Date(end), server == "relay") %>%
+ filter(if (!is.null(start))
+ valid_after_date >= as.Date(start) else TRUE) %>%
+ filter(if (!is.null(end))
+ valid_after_date <= as.Date(end) else TRUE) %>%
+ filter(server == "relay") %>%
group_by(valid_after_date) %>%
summarize(total = sum(advertised_bandwidth_bytes_sum_avg),
total_guard = sum(advertised_bandwidth_bytes_sum_avg[guard_relay != "f"]),
@@ -1465,10 +1532,10 @@ plot_advbw_ipv6 <- function(start, end, path) {
ggsave(filename = path, width = 8, height = 5, dpi = 150)
}
-write_advbw_ipv6 <- function(start, end, path) {
+write_advbw_ipv6 <- function(start = NULL, end = NULL, path) {
prepare_advbw_ipv6(start, end) %>%
rename(date = valid_after_date) %>%
spread(category, advbw) %>%
- write.csv(path, quote = FALSE, row.names = FALSE)
+ write.csv(path, quote = FALSE, row.names = FALSE, na = "")
}
diff --git a/src/main/java/org/torproject/metrics/web/RObjectGenerator.java b/src/main/java/org/torproject/metrics/web/RObjectGenerator.java
index aea6db7..00fcc81 100644
--- a/src/main/java/org/torproject/metrics/web/RObjectGenerator.java
+++ b/src/main/java/org/torproject/metrics/web/RObjectGenerator.java
@@ -126,6 +126,10 @@ public class RObjectGenerator implements ServletContextListener {
queryBuilder.append("robust_call(as.call(list(");
if ("csv".equalsIgnoreCase(fileType)) {
queryBuilder.append("write_");
+ /* When we checked parameters above we also put in defaults for missing
+ * parameters. This is okay for graphs, but we want to support CSV files
+ * with empty parameters. Using the parameters we got here. */
+ checkedParameters = parameterMap;
} else {
queryBuilder.append("plot_");
}
1
0

[metrics-web/master] Append _p to all plot_* and write_* parameters.
by karsten@torproject.org 29 May '18
by karsten@torproject.org 29 May '18
29 May '18
commit 7366398fdaf3caf6e02664d070e7c35dea29cc4b
Author: Karsten Loesing <karsten.loesing(a)gmx.net>
Date: Thu May 17 11:37:36 2018 +0200
Append _p to all plot_* and write_* parameters.
This change makes our R code a little more readable in cases where
parameter names match data column names.
---
src/main/R/rserver/graphs.R | 685 +++++++++++----------
.../torproject/metrics/web/RObjectGenerator.java | 6 +-
2 files changed, 351 insertions(+), 340 deletions(-)
diff --git a/src/main/R/rserver/graphs.R b/src/main/R/rserver/graphs.R
index a9b7fc7..9a2b939 100644
--- a/src/main/R/rserver/graphs.R
+++ b/src/main/R/rserver/graphs.R
@@ -348,11 +348,11 @@ robust_call <- function(wrappee, filename) {
})
}
-prepare_networksize <- function(start, end) {
+prepare_networksize <- function(start_p, end_p) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(flag == "") %>%
filter(country == "") %>%
filter(version == "") %>%
@@ -361,8 +361,8 @@ prepare_networksize <- function(start, end) {
select(date, relays, bridges)
}
-plot_networksize <- function(start, end, path) {
- prepare_networksize(start, end) %>%
+plot_networksize <- function(start_p, end_p, path_p) {
+ prepare_networksize(start_p, end_p) %>%
gather(variable, value, -date) %>%
complete(date = full_seq(date, period = 1),
variable = c("relays", "bridges")) %>%
@@ -375,19 +375,19 @@ plot_networksize <- function(start, end, path) {
labels = c("Relays", "Bridges")) +
ggtitle("Number of relays") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_networksize <- function(start = NULL, end = NULL, path) {
- prepare_networksize(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+write_networksize <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_networksize(start_p, end_p) %>%
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_versions <- function(start, end) {
+prepare_versions <- function(start_p, end_p) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(flag == "") %>%
filter(country == "") %>%
filter(version != "") %>%
@@ -396,8 +396,8 @@ prepare_versions <- function(start, end) {
select(date, version, relays)
}
-plot_versions <- function(start, end, path) {
- s <- prepare_versions(start, end)
+plot_versions <- function(start_p, end_p, path_p) {
+ s <- prepare_versions(start_p, end_p)
known_versions <- c("Other", "0.1.0", "0.1.1", "0.1.2", "0.2.0",
"0.2.1", "0.2.2", "0.2.3", "0.2.4", "0.2.5", "0.2.6", "0.2.7",
"0.2.8", "0.2.9", "0.3.0", "0.3.1", "0.3.2", "0.3.3", "0.3.4")
@@ -418,20 +418,20 @@ plot_versions <- function(start, end, path) {
breaks = visible_versions) +
ggtitle("Relay versions") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_versions <- function(start = NULL, end = NULL, path) {
- prepare_versions(start, end) %>%
+write_versions <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_versions(start_p, end_p) %>%
spread(key = "version", value = "relays", fill = 0) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_platforms <- function(start, end) {
+prepare_platforms <- function(start_p, end_p) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(flag == "") %>%
filter(country == "") %>%
filter(version == "") %>%
@@ -442,8 +442,8 @@ prepare_platforms <- function(start, end) {
as.character(platform)))
}
-plot_platforms <- function(start, end, path) {
- prepare_platforms(start, end) %>%
+plot_platforms <- function(start_p, end_p, path_p) {
+ prepare_platforms(start_p, end_p) %>%
ggplot(aes(x = date, y = relays, colour = platform)) +
geom_line() +
scale_x_date(name = "", breaks = custom_breaks,
@@ -454,20 +454,20 @@ plot_platforms <- function(start, end, path) {
values = c("#E69F00", "#56B4E9", "#009E73", "#0072B2", "#333333")) +
ggtitle("Relay platforms") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_platforms <- function(start = NULL, end = NULL, path) {
- prepare_platforms(start, end) %>%
+write_platforms <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_platforms(start_p, end_p) %>%
spread(platform, relays) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_bandwidth <- function(start, end) {
+prepare_bandwidth <- function(start_p, end_p) {
read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(isexit != "") %>%
filter(isguard != "") %>%
group_by(date) %>%
@@ -476,8 +476,8 @@ prepare_bandwidth <- function(start, end) {
select(date, advbw, bwhist)
}
-plot_bandwidth <- function(start, end, path) {
- prepare_bandwidth(start, end) %>%
+plot_bandwidth <- function(start_p, end_p, path_p) {
+ prepare_bandwidth(start_p, end_p) %>%
gather(variable, value, -date) %>%
ggplot(aes(x = date, y = value, colour = variable)) +
geom_line() +
@@ -491,19 +491,19 @@ plot_bandwidth <- function(start, end, path) {
ggtitle("Total relay bandwidth") +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_bandwidth <- function(start = NULL, end = NULL, path) {
- prepare_bandwidth(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+write_bandwidth <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_bandwidth(start_p, end_p) %>%
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_bwhist_flags <- function(start, end) {
+prepare_bwhist_flags <- function(start_p, end_p) {
read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(isexit != "") %>%
filter(isguard != "") %>%
mutate(variable = ifelse(isexit == "t",
@@ -513,8 +513,8 @@ prepare_bwhist_flags <- function(start, end) {
select(date, variable, value)
}
-plot_bwhist_flags <- function(start, end, path) {
- prepare_bwhist_flags(start, end) %>%
+plot_bwhist_flags <- function(start_p, end_p, path_p) {
+ prepare_bwhist_flags(start_p, end_p) %>%
complete(date = full_seq(date, period = 1),
variable = unique(variable)) %>%
ggplot(aes(x = date, y = value, colour = variable)) +
@@ -530,20 +530,20 @@ plot_bwhist_flags <- function(start, end, path) {
ggtitle("Bandwidth history by relay flags") +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_bwhist_flags <- function(start = NULL, end = NULL, path) {
- prepare_bwhist_flags(start, end) %>%
+write_bwhist_flags <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_bwhist_flags(start_p, end_p) %>%
spread(variable, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_dirbytes <- function(start, end, path) {
+prepare_dirbytes <- function(start_p, end_p, path_p) {
read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(isexit == "") %>%
filter(isguard == "") %>%
mutate(dirread = dirread * 8 / 1e9,
@@ -551,8 +551,8 @@ prepare_dirbytes <- function(start, end, path) {
select(date, dirread, dirwrite)
}
-plot_dirbytes <- function(start, end, path) {
- prepare_dirbytes(start, end) %>%
+plot_dirbytes <- function(start_p, end_p, path_p) {
+ prepare_dirbytes(start_p, end_p) %>%
gather(variable, value, -date) %>%
ggplot(aes(x = date, y = value, colour = variable)) +
geom_line() +
@@ -566,30 +566,30 @@ plot_dirbytes <- function(start, end, path) {
ggtitle("Number of bytes spent on answering directory requests") +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_dirbytes <- function(start = NULL, end = NULL, path) {
- prepare_dirbytes(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+write_dirbytes <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_dirbytes(start_p, end_p) %>%
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_relayflags <- function(start, end, flags) {
+prepare_relayflags <- function(start_p, end_p, flags_p) {
read.csv(paste(stats_dir, "servers.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(country == "") %>%
filter(version == "") %>%
filter(platform == "") %>%
filter(ec2bridge == "") %>%
mutate(flag = ifelse(flag == "", "Running", as.character(flag))) %>%
- filter(if (!is.null(flags)) flag %in% flags else TRUE) %>%
+ filter(if (!is.null(flags_p)) flag %in% flags_p else TRUE) %>%
select(date, flag, relays)
}
-plot_relayflags <- function(start, end, flags, path) {
- prepare_relayflags(start, end, flags) %>%
+plot_relayflags <- function(start_p, end_p, flags_p, path_p) {
+ prepare_relayflags(start_p, end_p, flags_p) %>%
complete(date = full_seq(date, period = 1), flag = unique(flag)) %>%
ggplot(aes(x = date, y = relays, colour = as.factor(flag))) +
geom_line() +
@@ -598,36 +598,38 @@ plot_relayflags <- function(start, end, flags, path) {
scale_y_continuous(name = "", labels = formatter, limits = c(0, NA)) +
scale_colour_manual(name = "Relay flags", values = c("#E69F00",
"#56B4E9", "#009E73", "#EE6A50", "#000000", "#0072B2"),
- breaks = flags, labels = flags) +
+ breaks = flags_p, labels = flags_p) +
ggtitle("Number of relays with relay flags assigned") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_relayflags <- function(start = NULL, end = NULL, flags = NULL, path) {
- prepare_relayflags(start, end, flags) %>%
+write_relayflags <- function(start_p = NULL, end_p = NULL, flags_p = NULL,
+ path_p) {
+ prepare_relayflags(start_p, end_p, flags_p) %>%
mutate(flag = tolower(flag)) %>%
spread(flag, relays) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-plot_torperf <- function(start, end, source, server, filesize, path) {
- filesize_val <- ifelse(filesize == "50kb", 50 * 1024,
- ifelse(filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024))
+plot_torperf <- function(start_p, end_p, source_p, server_p, filesize_p,
+ path_p) {
+ filesize_val <- ifelse(filesize_p == "50kb", 50 * 1024,
+ ifelse(filesize_p == "1mb", 1024 * 1024, 5 * 1024 * 1024))
t <- read.csv(paste(stats_dir, "torperf-1.1.csv", sep = ""),
colClasses = c("date" = "Date", "source" = "character"))
known_sources <- c("all", unique(t[t$source != "", "source"]))
colours <- data.frame(source = known_sources,
colour = brewer.pal(length(known_sources), "Paired"),
stringsAsFactors = FALSE)
- colour <- colours[colours$source == source, "colour"]
+ colour <- colours[colours$source == source_p, "colour"]
filesizes <- data.frame(filesizes = c("5mb", "1mb", "50kb"),
label = c("5 MiB", "1 MiB", "50 KiB"), stringsAsFactors = FALSE)
- filesize_str <- filesizes[filesizes$filesize == filesize, "label"]
- t[t$date >= as.Date(start) & t$date <= as.Date(end) &
+ filesize_str <- filesizes[filesizes$filesize == filesize_p, "label"]
+ t[t$date >= as.Date(start_p) & t$date <= as.Date(end_p) &
t$filesize == filesize_val &
- t$source == ifelse(source == "all", "", source) &
- t$server == server, ] %>%
+ t$source == ifelse(source_p == "all", "", source_p) &
+ t$server == server_p, ] %>%
transmute(date, q1 = q1 / 1e3, md = md / 1e3, q3 = q3 / 1e3) %>%
complete(date = full_seq(date, period = 1)) %>%
ggplot(aes(x = date, y = md, fill = "line")) +
@@ -638,15 +640,15 @@ plot_torperf <- function(start, end, source, server, filesize, path) {
scale_y_continuous(name = "", labels = unit_format(unit = "s"),
limits = c(0, NA)) +
scale_fill_manual(name = paste("Measured times on",
- ifelse(source == "all", "all sources", source), "per day"),
+ ifelse(source_p == "all", "all sources", source_p), "per day"),
breaks = c("line", "ribbon"),
labels = c("Median", "1st to 3rd quartile"),
values = paste(colour, c("", "66"), sep = "")) +
ggtitle(paste("Time to complete", filesize_str,
- "request to", server, "server")) +
+ "request to", server_p, "server")) +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
# Ideally, this function would share code with plot_torperf by using a
@@ -654,81 +656,83 @@ plot_torperf <- function(start, end, source, server, filesize, path) {
# harder than for other functions, because plot_torperf uses different
# colours based on which sources exist, unrelated to which source is
# plotted. Left as future work.
-write_torperf <- function(start = NULL, end = NULL, source = NULL,
- server = NULL, filesize = NULL, path) {
+write_torperf <- function(start_p = NULL, end_p = NULL, source_p = NULL,
+ server_p = NULL, filesize_p = NULL, path_p) {
read.csv(paste(stats_dir, "torperf-1.1.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
- filter(if (!is.null(!!source))
- source == ifelse(!!source == "all", "", !!source) else TRUE) %>%
- filter(if (!is.null(!!server)) server == !!server else TRUE) %>%
- filter(if (!is.null(!!filesize))
- filesize == ifelse(!!filesize == "50kb", 50 * 1024,
- ifelse(!!filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024)) else
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+ filter(if (!is.null(source_p))
+ source == ifelse(source_p == "all", "", source_p) else TRUE) %>%
+ filter(if (!is.null(server_p)) server == server_p else TRUE) %>%
+ filter(if (!is.null(filesize_p))
+ filesize == ifelse(filesize_p == "50kb", 50 * 1024,
+ ifelse(filesize_p == "1mb", 1024 * 1024, 5 * 1024 * 1024)) else
TRUE) %>%
transmute(date, filesize, source, server, q1 = q1 / 1e3, md = md / 1e3,
q3 = q3 / 1e3) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_torperf_failures <- function(start, end, source, server, filesize) {
+prepare_torperf_failures <- function(start_p, end_p, source_p, server_p,
+ filesize_p) {
read.csv(paste(stats_dir, "torperf-1.1.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
- filter(if (!is.null(!!filesize))
- filesize == ifelse(!!filesize == "50kb", 50 * 1024,
- ifelse(!!filesize == "1mb", 1024 * 1024, 5 * 1024 * 1024)) else
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+ filter(if (!is.null(filesize_p))
+ filesize == ifelse(filesize_p == "50kb", 50 * 1024,
+ ifelse(filesize_p == "1mb", 1024 * 1024, 5 * 1024 * 1024)) else
TRUE) %>%
- filter(if (!is.null(!!source))
- source == ifelse(!!source == "all", "", !!source) else TRUE) %>%
- filter(if (!is.null(!!server)) server == !!server else TRUE) %>%
+ filter(if (!is.null(source_p))
+ source == ifelse(source_p == "all", "", source_p) else TRUE) %>%
+ filter(if (!is.null(server_p)) server == server_p else TRUE) %>%
filter(requests > 0) %>%
transmute(date, filesize, source, server, timeouts = timeouts / requests,
failures = failures / requests)
}
-plot_torperf_failures <- function(start, end, source, server, filesize, path) {
+plot_torperf_failures <- function(start_p, end_p, source_p, server_p,
+ filesize_p, path_p) {
filesizes <- data.frame(filesizes = c("5mb", "1mb", "50kb"),
label = c("5 MiB", "1 MiB", "50 KiB"), stringsAsFactors = FALSE)
- filesize_str <- filesizes[filesizes$filesize == filesize, "label"]
- prepare_torperf_failures(start, end, source, server, filesize) %>%
- gather(variable, value, -date) %>%
+ filesize_str <- filesizes[filesizes$filesize == filesize_p, "label"]
+ prepare_torperf_failures(start_p, end_p, source_p, server_p, filesize_p) %>%
+ gather(variable, value, -c(date, filesize, source, server)) %>%
ggplot(aes(x = date, y = value, colour = variable)) +
geom_point(size = 2) +
scale_x_date(name = "", breaks = custom_breaks,
labels = custom_labels, minor_breaks = custom_minor_breaks) +
scale_y_continuous(name = "", labels = percent, limits = c(0, NA)) +
scale_colour_hue(name = paste("Problems encountered on",
- ifelse(source == "all", "all sources", source)),
+ ifelse(source_p == "all", "all sources", source_p)),
h.start = 45, breaks = c("timeouts", "failures"),
labels = c("Timeouts", "Failures")) +
ggtitle(paste("Timeouts and failures of", filesize_str,
- "requests to", server, "server")) +
+ "requests to", server_p, "server")) +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_torperf_failures <- function(start = NULL, end = NULL, source = NULL,
- server = NULL, filesize = NULL, path) {
- prepare_torperf_failures(start, end, source, server, filesize) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+write_torperf_failures <- function(start_p = NULL, end_p = NULL,
+ source_p = NULL, server_p = NULL, filesize_p = NULL, path_p) {
+ prepare_torperf_failures(start_p, end_p, source_p, server_p, filesize_p) %>%
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_connbidirect <- function(start, end) {
+prepare_connbidirect <- function(start_p, end_p) {
read.csv(paste(stats_dir, "connbidirect2.csv", sep = ""),
colClasses = c("date" = "Date", "direction" = "factor")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
mutate(quantile = paste("X", quantile, sep = ""),
fraction = fraction / 100) %>%
spread(quantile, fraction)
}
-plot_connbidirect <- function(start, end, path) {
- prepare_connbidirect(start, end) %>%
+plot_connbidirect <- function(start_p, end_p, path_p) {
+ prepare_connbidirect(start_p, end_p) %>%
ggplot(aes(x = date, y = X0.5, colour = direction)) +
geom_line(size = 0.75) +
geom_ribbon(aes(x = date, ymin = X0.25, ymax = X0.75,
@@ -747,24 +751,24 @@ plot_connbidirect <- function(start, end, path) {
ggtitle("Fraction of connections used uni-/bidirectionally") +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_connbidirect <- function(start = NULL, end = NULL, path) {
- prepare_connbidirect(start, end) %>%
+write_connbidirect <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_connbidirect(start_p, end_p) %>%
rename(q1 = X0.25, md = X0.5, q3 = X0.75) %>%
gather(variable, value, -(date:direction)) %>%
unite(temp, direction, variable) %>%
spread(temp, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_bandwidth_flags <- function(start, end) {
+prepare_bandwidth_flags <- function(start_p, end_p) {
b <- read.csv(paste(stats_dir, "bandwidth.csv", sep = ""),
colClasses = c("date" = "Date"))
b <- b %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(isexit != "") %>%
filter(isguard != "")
b <- data.frame(date = b$date,
@@ -789,8 +793,8 @@ prepare_bandwidth_flags <- function(start, end) {
bandwidth
}
-plot_bandwidth_flags <- function(start, end, path) {
- prepare_bandwidth_flags(start, end) %>%
+plot_bandwidth_flags <- function(start_p, end_p, path_p) {
+ prepare_bandwidth_flags(start_p, end_p) %>%
complete(date = full_seq(date, period = 1),
variable = unique(variable)) %>%
ggplot(aes(x = date, y = value, colour = variable)) +
@@ -808,29 +812,29 @@ plot_bandwidth_flags <- function(start, end, path) {
"relay flags")) +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_bandwidth_flags <- function(start = NULL, end = NULL, path) {
- prepare_bandwidth_flags(start, end) %>%
+write_bandwidth_flags <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_bandwidth_flags(start_p, end_p) %>%
spread(variable, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-plot_userstats <- function(start, end, node, variable, value, events,
- path) {
- load(paste(rdata_dir, "clients-", node, ".RData", sep = ""))
+plot_userstats <- function(start_p, end_p, node_p, variable_p, value_p,
+ events_p, path_p) {
+ load(paste(rdata_dir, "clients-", node_p, ".RData", sep = ""))
c <- data
- u <- c[c$date >= start & c$date <= end, ]
- u <- rbind(u, data.frame(date = start,
- country = ifelse(variable == "country" & value != "all", value, ""),
- transport = ifelse(variable == "transport", value, ""),
- version = ifelse(variable == "version", value, ""),
+ u <- c[c$date >= start_p & c$date <= end_p, ]
+ u <- rbind(u, data.frame(date = start_p,
+ country = ifelse(variable_p == "country" & value_p != "all", value_p, ""),
+ transport = ifelse(variable_p == "transport", value_p, ""),
+ version = ifelse(variable_p == "version", value_p, ""),
lower = 0, upper = 0, clients = 0))
- if (node == "relay") {
- if (value != "all") {
- u <- u[u$country == value, ]
- title <- paste("Directly connecting users from", countryname(value))
+ if (node_p == "relay") {
+ if (value_p != "all") {
+ u <- u[u$country == value_p, ]
+ title <- paste("Directly connecting users from", countryname(value_p))
} else {
u <- u[u$country == "", ]
title <- "Directly connecting users"
@@ -840,8 +844,8 @@ plot_userstats <- function(start, end, node, variable, value, events,
by = list(date = as.Date(u$date, "%Y-%m-%d"),
value = u$country),
FUN = sum)
- } else if (variable == "transport") {
- if ("!<OR>" %in% value) {
+ } else if (variable_p == "transport") {
+ if ("!<OR>" %in% value_p) {
n <- u[u$transport != "" & u$transport != "<OR>", ]
n <- aggregate(list(lower = n$lower, upper = n$upper,
clients = n$clients),
@@ -852,8 +856,8 @@ plot_userstats <- function(start, end, node, variable, value, events,
version = "", lower = n$lower,
upper = n$upper, clients = n$clients))
}
- if (length(value) > 1) {
- u <- u[u$transport %in% value, ]
+ if (length(value_p) > 1) {
+ u <- u[u$transport %in% value_p, ]
u <- aggregate(list(lower = u$lower, upper = u$upper,
users = u$clients),
by = list(date = as.Date(u$date, "%Y-%m-%d"),
@@ -861,32 +865,32 @@ plot_userstats <- function(start, end, node, variable, value, events,
FUN = sum)
title <- paste("Bridge users by transport")
} else {
- u <- u[u$transport == value, ]
+ u <- u[u$transport == value_p, ]
u <- aggregate(list(lower = u$lower, upper = u$upper,
users = u$clients),
by = list(date = as.Date(u$date, "%Y-%m-%d"),
value = u$transport),
FUN = sum)
title <- paste("Bridge users using",
- ifelse(value == "<??>", "unknown pluggable transport(s)",
- ifelse(value == "<OR>", "default OR protocol",
- ifelse(value == "!<OR>", "any pluggable transport",
- ifelse(value == "fte", "FTE",
- ifelse(value == "websocket", "Flash proxy/websocket",
- paste("transport", value)))))))
+ ifelse(value_p == "<??>", "unknown pluggable transport(s)",
+ ifelse(value_p == "<OR>", "default OR protocol",
+ ifelse(value_p == "!<OR>", "any pluggable transport",
+ ifelse(value_p == "fte", "FTE",
+ ifelse(value_p == "websocket", "Flash proxy/websocket",
+ paste("transport", value_p)))))))
}
- } else if (variable == "version") {
- u <- u[u$version == value, ]
- title <- paste("Bridge users using IP", value, sep = "")
+ } else if (variable_p == "version") {
+ u <- u[u$version == value_p, ]
+ title <- paste("Bridge users using IP", value_p, sep = "")
u <- aggregate(list(lower = u$lower, upper = u$upper,
users = u$clients),
by = list(date = as.Date(u$date, "%Y-%m-%d"),
value = u$version),
FUN = sum)
} else {
- if (value != "all") {
- u <- u[u$country == value, ]
- title <- paste("Bridge users from", countryname(value))
+ if (value_p != "all") {
+ u <- u[u$country == value_p, ]
+ title <- paste("Bridge users from", countryname(value_p))
} else {
u <- u[u$country == "" & u$transport == "" & u$version == "", ]
title <- "Bridge users"
@@ -898,19 +902,19 @@ plot_userstats <- function(start, end, node, variable, value, events,
FUN = sum)
}
u <- merge(x = u, all.y = TRUE, y = data.frame(expand.grid(
- date = seq(from = as.Date(start, "%Y-%m-%d"),
- to = as.Date(end, "%Y-%m-%d"), by = "1 day"),
- value = ifelse(value == "all", "", value))))
- if (length(value) > 1) {
+ date = seq(from = as.Date(start_p, "%Y-%m-%d"),
+ to = as.Date(end_p, "%Y-%m-%d"), by = "1 day"),
+ value = ifelse(value_p == "all", "", value_p))))
+ if (length(value_p) > 1) {
plot <- ggplot(u, aes(x = date, y = users, colour = value))
} else {
plot <- ggplot(u, aes(x = date, y = users))
}
- if (length(na.omit(u$users)) > 0 & events != "off" &
- variable == "country" & length(value) == 1 && value != "all") {
+ if (length(na.omit(u$users)) > 0 & events_p != "off" &
+ variable_p == "country" & length(value_p) == 1 && value_p != "all") {
upturns <- u[u$users > u$upper, c("date", "users")]
downturns <- u[u$users < u$lower, c("date", "users")]
- if (events == "on") {
+ if (events_p == "on") {
u[!is.na(u$lower) & u$lower < 0, "lower"] <- 0
plot <- plot +
geom_ribbon(data = u, aes(ymin = lower, ymax = upper), fill = "gray")
@@ -931,79 +935,81 @@ plot_userstats <- function(start, end, node, variable, value, events,
scale_y_continuous(name = "", labels = formatter, limits = c(0, NA)) +
ggtitle(title) +
labs(caption = copyright_notice)
- if (length(value) > 1) {
+ if (length(value_p) > 1) {
plot <- plot +
- scale_colour_hue(name = "", breaks = value,
- labels = ifelse(value == "<??>", "Unknown PT",
- ifelse(value == "<OR>", "Default OR protocol",
- ifelse(value == "!<OR>", "Any PT",
- ifelse(value == "fte", "FTE",
- ifelse(value == "websocket", "Flash proxy/websocket",
- value))))))
+ scale_colour_hue(name = "", breaks = value_p,
+ labels = ifelse(value_p == "<??>", "Unknown PT",
+ ifelse(value_p == "<OR>", "Default OR protocol",
+ ifelse(value_p == "!<OR>", "Any PT",
+ ifelse(value_p == "fte", "FTE",
+ ifelse(value_p == "websocket", "Flash proxy/websocket",
+ value_p))))))
}
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-plot_userstats_relay_country <- function(start, end, country, events,
- path) {
- plot_userstats(start, end, "relay", "country", country, events, path)
+plot_userstats_relay_country <- function(start_p, end_p, country_p, events_p,
+ path_p) {
+ plot_userstats(start_p, end_p, "relay", "country", country_p, events_p,
+ path_p)
}
-plot_userstats_bridge_country <- function(start, end, country, path) {
- plot_userstats(start, end, "bridge", "country", country, "off", path)
+plot_userstats_bridge_country <- function(start_p, end_p, country_p, path_p) {
+ plot_userstats(start_p, end_p, "bridge", "country", country_p, "off", path_p)
}
-plot_userstats_bridge_transport <- function(start, end, transport, path) {
- plot_userstats(start, end, "bridge", "transport", transport, "off",
- path)
+plot_userstats_bridge_transport <- function(start_p, end_p, transport_p,
+ path_p) {
+ plot_userstats(start_p, end_p, "bridge", "transport", transport_p, "off",
+ path_p)
}
-plot_userstats_bridge_version <- function(start, end, version, path) {
- plot_userstats(start, end, "bridge", "version", version, "off", path)
+plot_userstats_bridge_version <- function(start_p, end_p, version_p, path_p) {
+ plot_userstats(start_p, end_p, "bridge", "version", version_p, "off", path_p)
}
-write_userstats_relay_country <- function(start = NULL, end = NULL,
- country = NULL, events = NULL, path) {
+write_userstats_relay_country <- function(start_p = NULL, end_p = NULL,
+ country_p = NULL, events_p = NULL, path_p) {
load(paste(rdata_dir, "clients-relay.RData", sep = ""))
u <- data %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
- filter(if (!is.null(!!country))
- country == ifelse(!!country == "all", "", !!country) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+ filter(if (!is.null(country_p))
+ country == ifelse(country_p == "all", "", country_p) else TRUE) %>%
filter(transport == "") %>%
filter(version == "") %>%
mutate(downturns = clients < lower, upturns = clients > upper) %>%
select(date, country, clients, downturns, upturns, lower, upper) %>%
rename(users = clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-write_userstats_bridge_country <- function(start = NULL, end = NULL,
- country = NULL, path) {
+write_userstats_bridge_country <- function(start_p = NULL, end_p = NULL,
+ country_p = NULL, path_p) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
data %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
- filter(if (!is.null(!!country))
- country == ifelse(!!country == "all", "", !!country) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+ filter(if (!is.null(country_p))
+ country == ifelse(country_p == "all", "", country_p) else TRUE) %>%
filter(transport == "") %>%
filter(version == "") %>%
select(date, country, clients) %>%
rename(users = clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-write_userstats_bridge_transport <- function(start = NULL, end = NULL,
- transports = NULL, path) {
+write_userstats_bridge_transport <- function(start_p = NULL, end_p = NULL,
+ transport_p = NULL, path_p) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
u <- data %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(country == "") %>%
filter(version == "") %>%
filter(transport != "") %>%
select(date, transport, clients)
- if (is.null(transports) || "!<OR>" %in% transports) {
+ if (is.null(transport_p) || "!<OR>" %in% transport_p) {
n <- u %>%
filter(transport != "<OR>") %>%
group_by(date) %>%
@@ -1012,7 +1018,7 @@ write_userstats_bridge_transport <- function(start = NULL, end = NULL,
clients = n$clients))
}
u %>%
- filter(if (!is.null(transports)) transport %in% transports else TRUE) %>%
+ filter(if (!is.null(transport_p)) transport %in% transport_p else TRUE) %>%
mutate(transport = ifelse(transport == "<OR>", "default_or_protocol",
ifelse(transport == "!<OR>", "any_pt",
ifelse(transport == "<??>", "unknown_pluggable_transports",
@@ -1020,43 +1026,43 @@ write_userstats_bridge_transport <- function(start = NULL, end = NULL,
group_by(date, transport) %>%
select(date, transport, clients) %>%
spread(transport, clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-write_userstats_bridge_version <- function(start = NULL, end = NULL,
- version = NULL, path) {
+write_userstats_bridge_version <- function(start_p = NULL, end_p = NULL,
+ version_p = NULL, path_p) {
load(paste(rdata_dir, "clients-bridge.RData", sep = ""))
data %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(country == "") %>%
filter(transport == "") %>%
- filter(if (!is.null(!!version)) version == !!version else TRUE) %>%
+ filter(if (!is.null(version_p)) version == version_p else TRUE) %>%
select(date, version, clients) %>%
rename(users = clients) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_userstats_bridge_combined <- function(start, end, country) {
+prepare_userstats_bridge_combined <- function(start_p, end_p, country_p) {
load(paste(rdata_dir, "userstats-bridge-combined.RData", sep = ""))
data %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
- filter(if (!is.null(!!country)) country == !!country else TRUE)
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+ filter(if (!is.null(country_p)) country == country_p else TRUE)
}
-plot_userstats_bridge_combined <- function(start, end, country, path) {
- if (country == "all") {
- plot_userstats_bridge_country(start, end, country, path)
+plot_userstats_bridge_combined <- function(start_p, end_p, country_p, path_p) {
+ if (country_p == "all") {
+ plot_userstats_bridge_country(start_p, end_p, country_p, path_p)
} else {
top <- 3
- u <- prepare_userstats_bridge_combined(start, end, country)
+ u <- prepare_userstats_bridge_combined(start_p, end_p, country_p)
a <- aggregate(list(mid = (u$high + u$low) / 2),
by = list(transport = u$transport), FUN = sum)
a <- a[order(a$mid, decreasing = TRUE)[1:top], ]
u <- u[u$transport %in% a$transport, ]
title <- paste("Bridge users by transport from ",
- countryname(country), sep = "")
+ countryname(country_p), sep = "")
ggplot(u, aes(x = as.Date(date), ymin = low, ymax = high,
colour = transport, fill = transport)) +
geom_ribbon(alpha = 0.5, size = 0.5) +
@@ -1068,40 +1074,40 @@ plot_userstats_bridge_combined <- function(start, end, country, path) {
ggtitle(title) +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
}
-write_userstats_bridge_combined <- function(start = NULL, end = NULL,
- country = NULL, path) {
- if (!is.null(country) && country == "all") {
- write_userstats_bridge_country(start, end, country, path)
+write_userstats_bridge_combined <- function(start_p = NULL, end_p = NULL,
+ country_p = NULL, path_p) {
+ if (!is.null(country_p) && country_p == "all") {
+ write_userstats_bridge_country(start_p, end_p, country_p, path_p)
} else {
- prepare_userstats_bridge_combined(start, end, country) %>%
+ prepare_userstats_bridge_combined(start_p, end_p, country_p) %>%
select(date, country, transport, low, high) %>%
mutate(transport = ifelse(transport == "<OR>", "default_or_protocol",
ifelse(transport == "<??>", "unknown_transport", transport))) %>%
gather(variable, value, -(date:transport)) %>%
unite(temp, transport, variable) %>%
spread(temp, value) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
}
-prepare_advbwdist_perc <- function(start, end, p) {
+prepare_advbwdist_perc <- function(start_p, end_p, p_p) {
read.csv(paste(stats_dir, "advbwdist.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
- filter(if (!is.null(p)) percentile %in% as.numeric(p) else
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+ filter(if (!is.null(p_p)) percentile %in% as.numeric(p_p) else
percentile != "") %>%
transmute(date, percentile = as.factor(percentile),
variable = ifelse(isexit != "t", "all", "exits"),
advbw = advbw * 8 / 1e9)
}
-plot_advbwdist_perc <- function(start, end, p, path) {
- prepare_advbwdist_perc(start, end, p) %>%
+plot_advbwdist_perc <- function(start_p, end_p, p_p, path_p) {
+ prepare_advbwdist_perc(start_p, end_p, p_p) %>%
mutate(variable = ifelse(variable == "all", "All relays",
"Exits only")) %>%
ggplot(aes(x = date, y = advbw, colour = percentile)) +
@@ -1114,30 +1120,31 @@ plot_advbwdist_perc <- function(start, end, p, path) {
scale_colour_hue(name = "Percentile") +
ggtitle("Advertised bandwidth distribution") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_advbwdist_perc <- function(start = NULL, end = NULL, p = NULL, path) {
- prepare_advbwdist_perc(start, end, p) %>%
+write_advbwdist_perc <- function(start_p = NULL, end_p = NULL, p_p = NULL,
+ path_p) {
+ prepare_advbwdist_perc(start_p, end_p, p_p) %>%
unite(temp, variable, percentile) %>%
spread(temp, advbw) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_advbwdist_relay <- function(start, end, n) {
+prepare_advbwdist_relay <- function(start_p, end_p, n_p) {
read.csv(paste(stats_dir, "advbwdist.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
- filter(if (!is.null(n)) relay %in% as.numeric(n) else
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
+ filter(if (!is.null(n_p)) relay %in% as.numeric(n_p) else
relay != "") %>%
transmute(date, relay = as.factor(relay),
variable = ifelse(isexit != "t", "all", "exits"),
advbw = advbw * 8 / 1e9)
}
-plot_advbwdist_relay <- function(start, end, n, path) {
- prepare_advbwdist_relay(start, end, n) %>%
+plot_advbwdist_relay <- function(start_p, end_p, n_p, path_p) {
+ prepare_advbwdist_relay(start_p, end_p, n_p) %>%
mutate(variable = ifelse(variable == "all", "All relays",
"Exits only")) %>%
ggplot(aes(x = date, y = advbw, colour = relay)) +
@@ -1150,27 +1157,28 @@ plot_advbwdist_relay <- function(start, end, n, path) {
scale_colour_hue(name = "n") +
ggtitle("Advertised bandwidth of n-th fastest relays") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_advbwdist_relay <- function(start = NULL, end = NULL, n = NULL, path) {
- prepare_advbwdist_relay(start, end, n) %>%
+write_advbwdist_relay <- function(start_p = NULL, end_p = NULL, n_p = NULL,
+ path_p) {
+ prepare_advbwdist_relay(start_p, end_p, n_p) %>%
unite(temp, variable, relay) %>%
spread(temp, advbw) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_hidserv_dir_onions_seen <- function(start, end) {
+prepare_hidserv_dir_onions_seen <- function(start_p, end_p) {
read.csv(paste(stats_dir, "hidserv.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(type == "dir-onions-seen") %>%
transmute(date = date, onions = ifelse(frac >= 0.01, wiqm, NA))
}
-plot_hidserv_dir_onions_seen <- function(start, end, path) {
- prepare_hidserv_dir_onions_seen(start, end) %>%
+plot_hidserv_dir_onions_seen <- function(start_p, end_p, path_p) {
+ prepare_hidserv_dir_onions_seen(start_p, end_p) %>%
ggplot(aes(x = date, y = onions)) +
geom_line() +
scale_x_date(name = "", breaks = custom_breaks,
@@ -1178,26 +1186,27 @@ plot_hidserv_dir_onions_seen <- function(start, end, path) {
scale_y_continuous(name = "", limits = c(0, NA), labels = formatter) +
ggtitle("Unique .onion addresses") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_hidserv_dir_onions_seen <- function(start = NULL, end = NULL, path) {
- prepare_hidserv_dir_onions_seen(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+write_hidserv_dir_onions_seen <- function(start_p = NULL, end_p = NULL,
+ path_p) {
+ prepare_hidserv_dir_onions_seen(start_p, end_p) %>%
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_hidserv_rend_relayed_cells <- function(start, end) {
+prepare_hidserv_rend_relayed_cells <- function(start_p, end_p) {
read.csv(paste(stats_dir, "hidserv.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
filter(type == "rend-relayed-cells") %>%
transmute(date,
relayed = ifelse(frac >= 0.01, wiqm * 8 * 512 / (86400 * 1e9), NA))
}
-plot_hidserv_rend_relayed_cells <- function(start, end, path) {
- prepare_hidserv_rend_relayed_cells(start, end) %>%
+plot_hidserv_rend_relayed_cells <- function(start_p, end_p, path_p) {
+ prepare_hidserv_rend_relayed_cells(start_p, end_p) %>%
ggplot(aes(x = date, y = relayed)) +
geom_line() +
scale_x_date(name = "", breaks = custom_breaks,
@@ -1206,24 +1215,25 @@ plot_hidserv_rend_relayed_cells <- function(start, end, path) {
limits = c(0, NA)) +
ggtitle("Onion-service traffic") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_hidserv_rend_relayed_cells <- function(start = NULL, end = NULL, path) {
- prepare_hidserv_rend_relayed_cells(start, end) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+write_hidserv_rend_relayed_cells <- function(start_p = NULL, end_p = NULL,
+ path_p) {
+ prepare_hidserv_rend_relayed_cells(start_p, end_p) %>%
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_hidserv_frac_reporting <- function(start, end) {
+prepare_hidserv_frac_reporting <- function(start_p, end_p) {
read.csv(paste(stats_dir, "hidserv.csv", sep = ""),
colClasses = c("date" = "Date")) %>%
- filter(if (!is.null(start)) date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) date <= as.Date(end_p) else TRUE) %>%
select(date, frac, type)
}
-plot_hidserv_frac_reporting <- function(start, end, path) {
- prepare_hidserv_frac_reporting(start, end) %>%
+plot_hidserv_frac_reporting <- function(start_p, end_p, path_p) {
+ prepare_hidserv_frac_reporting(start_p, end_p) %>%
ggplot(aes(x = date, y = frac, colour = type)) +
geom_line() +
geom_hline(yintercept = 0.01, linetype = 2) +
@@ -1238,26 +1248,26 @@ plot_hidserv_frac_reporting <- function(start, end, path) {
"statistics")) +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_hidserv_frac_reporting <- function(start = NULL, end = NULL, path) {
- prepare_hidserv_frac_reporting(start, end) %>%
+write_hidserv_frac_reporting <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_hidserv_frac_reporting(start_p, end_p) %>%
mutate(type = ifelse(type == "dir-onions-seen", "onions", "relayed")) %>%
spread(type, frac) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_webstats_tb <- function(start, end) {
+prepare_webstats_tb <- function(start_p, end_p) {
load(paste(rdata_dir, "webstats-tb.RData", sep = ""))
data %>%
- filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) log_date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) log_date <= as.Date(end_p) else TRUE) %>%
mutate(request_type = factor(request_type))
}
-plot_webstats_tb <- function(start, end, path) {
- d <- prepare_webstats_tb(start, end)
+plot_webstats_tb <- function(start_p, end_p, path_p) {
+ d <- prepare_webstats_tb(start_p, end_p)
levels(d$request_type) <- list(
"Initial downloads" = "tbid",
"Signature downloads" = "tbsd",
@@ -1274,30 +1284,30 @@ plot_webstats_tb <- function(start, end, path) {
strip.background = element_rect(fill = NA)) +
ggtitle("Tor Browser downloads and updates") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_webstats_tb <- function(start = NULL, end = NULL, path) {
- prepare_webstats_tb(start, end) %>%
+write_webstats_tb <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_webstats_tb(start_p, end_p) %>%
rename(date = log_date) %>%
spread(request_type, count) %>%
rename(initial_downloads = tbid, signature_downloads = tbsd,
update_pings = tbup, update_requests = tbur) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_webstats_tb_platform <- function(start, end) {
+prepare_webstats_tb_platform <- function(start_p, end_p) {
read.csv(paste(stats_dir, "webstats.csv", sep = ""),
colClasses = c("log_date" = "Date")) %>%
- filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) log_date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) log_date <= as.Date(end_p) else TRUE) %>%
filter(request_type == "tbid") %>%
group_by(log_date, platform) %>%
summarize(count = sum(count))
}
-plot_webstats_tb_platform <- function(start, end, path) {
- prepare_webstats_tb_platform(start, end) %>%
+plot_webstats_tb_platform <- function(start_p, end_p, path_p) {
+ prepare_webstats_tb_platform(start_p, end_p) %>%
ggplot(aes(x = log_date, y = count, colour = platform)) +
geom_point() +
geom_line() +
@@ -1311,21 +1321,22 @@ plot_webstats_tb_platform <- function(start, end, path) {
strip.background = element_rect(fill = NA)) +
ggtitle("Tor Browser downloads by platform") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_webstats_tb_platform <- function(start = NULL, end = NULL, path) {
- prepare_webstats_tb_platform(start, end) %>%
+write_webstats_tb_platform <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_webstats_tb_platform(start_p, end_p) %>%
rename(date = log_date) %>%
spread(platform, count) %>%
rename(linux = l, macos = m, windows = w) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-plot_webstats_tb_locale <- function(start, end, path) {
+plot_webstats_tb_locale <- function(start_p, end_p, path_p) {
d <- read.csv(paste(stats_dir, "webstats.csv", sep = ""),
colClasses = c("log_date" = "Date", "locale" = "character"))
- d <- d[d$log_date >= start & d$log_date <= end & d$request_type == "tbid", ]
+ d <- d[d$log_date >= start_p & d$log_date <= end_p &
+ d$request_type == "tbid", ]
e <- d
e <- aggregate(list(count = e$count), by = list(locale = e$locale), FUN = sum)
e <- e[order(e$count, decreasing = TRUE), ]
@@ -1345,7 +1356,7 @@ plot_webstats_tb_locale <- function(start, end, path) {
strip.background = element_rect(fill = NA)) +
ggtitle("Tor Browser downloads by locale") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
# Ideally, this function would share code with plot_webstats_tb_locale
@@ -1353,12 +1364,12 @@ plot_webstats_tb_locale <- function(start, end, path) {
# turned out to be a bit harder than for other functions, because
# plot_webstats_tb_locale needs the preliminary data frame e for its
# breaks and labels. Left as future work.
-write_webstats_tb_locale <- function(start = NULL, end = NULL, path) {
+write_webstats_tb_locale <- function(start_p = NULL, end_p = NULL, path_p) {
d <- read.csv(paste(stats_dir, "webstats.csv", sep = ""),
colClasses = c("log_date" = "Date", "locale" = "character"))
d <- d %>%
- filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) log_date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) log_date <= as.Date(end_p) else TRUE) %>%
filter(request_type == "tbid")
e <- d
e <- aggregate(list(count = e$count), by = list(locale = e$locale), FUN = sum)
@@ -1370,19 +1381,19 @@ write_webstats_tb_locale <- function(start = NULL, end = NULL, path) {
mutate(locale = tolower(locale)) %>%
rename(date = log_date) %>%
spread(locale, count) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_webstats_tm <- function(start, end) {
+prepare_webstats_tm <- function(start_p, end_p) {
load(paste(rdata_dir, "webstats-tm.RData", sep = ""))
data %>%
- filter(if (!is.null(start)) log_date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end)) log_date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p)) log_date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p)) log_date <= as.Date(end_p) else TRUE) %>%
mutate(request_type = factor(request_type))
}
-plot_webstats_tm <- function(start, end, path) {
- d <- prepare_webstats_tm(start, end)
+plot_webstats_tm <- function(start_p, end_p, path_p) {
+ d <- prepare_webstats_tm(start_p, end_p)
levels(d$request_type) <- list(
"Initial downloads" = "tmid",
"Update pings" = "tmup")
@@ -1397,24 +1408,24 @@ plot_webstats_tm <- function(start, end, path) {
strip.background = element_rect(fill = NA)) +
ggtitle("Tor Messenger downloads and updates") +
labs(caption = copyright_notice)
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_webstats_tm <- function(start = NULL, end = NULL, path) {
- prepare_webstats_tm(start, end) %>%
+write_webstats_tm <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_webstats_tm(start_p, end_p) %>%
rename(date = log_date) %>%
spread(request_type, count) %>%
rename(initial_downloads = tmid, update_pings = tmup) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_relays_ipv6 <- function(start, end) {
+prepare_relays_ipv6 <- function(start_p, end_p) {
read.csv(paste(stats_dir, "ipv6servers.csv", sep = ""),
colClasses = c("valid_after_date" = "Date")) %>%
- filter(if (!is.null(start))
- valid_after_date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end))
- valid_after_date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p))
+ valid_after_date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p))
+ valid_after_date <= as.Date(end_p) else TRUE) %>%
filter(server == "relay") %>%
group_by(valid_after_date) %>%
summarize(total = sum(server_count_sum_avg),
@@ -1426,8 +1437,8 @@ prepare_relays_ipv6 <- function(start, end) {
value = "count")
}
-plot_relays_ipv6 <- function(start, end, path) {
- prepare_relays_ipv6(start, end) %>%
+plot_relays_ipv6 <- function(start_p, end_p, path_p) {
+ prepare_relays_ipv6(start_p, end_p) %>%
ggplot(aes(x = valid_after_date, y = count, colour = category)) +
geom_line() +
scale_x_date(name = "", breaks = custom_breaks,
@@ -1440,23 +1451,23 @@ plot_relays_ipv6 <- function(start, end, path) {
ggtitle("Relays by IP version") +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_relays_ipv6 <- function(start = NULL, end = NULL, path) {
- prepare_relays_ipv6(start, end) %>%
+write_relays_ipv6 <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_relays_ipv6(start_p, end_p) %>%
rename(date = valid_after_date) %>%
spread(category, count) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_bridges_ipv6 <- function(start, end) {
+prepare_bridges_ipv6 <- function(start_p, end_p) {
read.csv(paste(stats_dir, "ipv6servers.csv", sep = ""),
colClasses = c("valid_after_date" = "Date")) %>%
- filter(if (!is.null(start))
- valid_after_date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end))
- valid_after_date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p))
+ valid_after_date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p))
+ valid_after_date <= as.Date(end_p) else TRUE) %>%
filter(server == "bridge") %>%
group_by(valid_after_date) %>%
summarize(total = sum(server_count_sum_avg),
@@ -1465,8 +1476,8 @@ prepare_bridges_ipv6 <- function(start, end) {
gather(total, announced, key = "category", value = "count")
}
-plot_bridges_ipv6 <- function(start, end, path) {
- prepare_bridges_ipv6(start, end) %>%
+plot_bridges_ipv6 <- function(start_p, end_p, path_p) {
+ prepare_bridges_ipv6(start_p, end_p) %>%
ggplot(aes(x = valid_after_date, y = count, colour = category)) +
geom_line() +
scale_x_date(name = "", breaks = custom_breaks,
@@ -1478,23 +1489,23 @@ plot_bridges_ipv6 <- function(start, end, path) {
ggtitle("Bridges by IP version") +
labs(caption = copyright_notice) +
theme(legend.position = "top")
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_bridges_ipv6 <- function(start = NULL, end = NULL, path) {
- prepare_bridges_ipv6(start, end) %>%
+write_bridges_ipv6 <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_bridges_ipv6(start_p, end_p) %>%
rename(date = valid_after_date) %>%
spread(category, count) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
-prepare_advbw_ipv6 <- function(start, end) {
+prepare_advbw_ipv6 <- function(start_p, end_p) {
read.csv(paste(stats_dir, "ipv6servers.csv", sep = ""),
colClasses = c("valid_after_date" = "Date")) %>%
- filter(if (!is.null(start))
- valid_after_date >= as.Date(start) else TRUE) %>%
- filter(if (!is.null(end))
- valid_after_date <= as.Date(end) else TRUE) %>%
+ filter(if (!is.null(start_p))
+ valid_after_date >= as.Date(start_p) else TRUE) %>%
+ filter(if (!is.null(end_p))
+ valid_after_date <= as.Date(end_p) else TRUE) %>%
filter(server == "relay") %>%
group_by(valid_after_date) %>%
summarize(total = sum(advertised_bandwidth_bytes_sum_avg),
@@ -1512,8 +1523,8 @@ prepare_advbw_ipv6 <- function(start, end) {
mutate(advbw = advbw * 8 / 1e9)
}
-plot_advbw_ipv6 <- function(start, end, path) {
- prepare_advbw_ipv6(start, end) %>%
+plot_advbw_ipv6 <- function(start_p, end_p, path_p) {
+ prepare_advbw_ipv6(start_p, end_p) %>%
ggplot(aes(x = valid_after_date, y = advbw, colour = category)) +
geom_line() +
scale_x_date(name = "", breaks = custom_breaks,
@@ -1529,13 +1540,13 @@ plot_advbw_ipv6 <- function(start, end, path) {
labs(caption = copyright_notice) +
theme(legend.position = "top") +
guides(colour = guide_legend(nrow = 2, byrow = TRUE))
- ggsave(filename = path, width = 8, height = 5, dpi = 150)
+ ggsave(filename = path_p, width = 8, height = 5, dpi = 150)
}
-write_advbw_ipv6 <- function(start = NULL, end = NULL, path) {
- prepare_advbw_ipv6(start, end) %>%
+write_advbw_ipv6 <- function(start_p = NULL, end_p = NULL, path_p) {
+ prepare_advbw_ipv6(start_p, end_p) %>%
rename(date = valid_after_date) %>%
spread(category, advbw) %>%
- write.csv(path, quote = FALSE, row.names = FALSE, na = "")
+ write.csv(path_p, quote = FALSE, row.names = FALSE, na = "")
}
diff --git a/src/main/java/org/torproject/metrics/web/RObjectGenerator.java b/src/main/java/org/torproject/metrics/web/RObjectGenerator.java
index 00fcc81..49e24ec 100644
--- a/src/main/java/org/torproject/metrics/web/RObjectGenerator.java
+++ b/src/main/java/org/torproject/metrics/web/RObjectGenerator.java
@@ -146,10 +146,10 @@ public class RObjectGenerator implements ServletContextListener {
imageFilenameBuilder.append("-").append(param);
}
if (parameterValues.length < 2) {
- queryBuilder.append(parameterName).append(" = '")
+ queryBuilder.append(parameterName).append("_p = '")
.append(parameterValues[0]).append("', ");
} else {
- queryBuilder.append(parameterName).append(" = c(");
+ queryBuilder.append(parameterName).append("_p = c(");
for (int i = 0; i < parameterValues.length - 1; i++) {
queryBuilder.append("'").append(parameterValues[i]).append("', ");
}
@@ -159,7 +159,7 @@ public class RObjectGenerator implements ServletContextListener {
}
imageFilenameBuilder.append(".").append(fileType);
String imageFilename = imageFilenameBuilder.toString();
- queryBuilder.append("path = '%1$s')), '%1$s')");
+ queryBuilder.append("path_p = '%1$s')), '%1$s')");
String query = queryBuilder.toString();
File imageFile = new File(this.cachedGraphsDirectory + "/"
+ imageFilename);
1
0

[tor-browser-build/master] Bug 26165: make it possible to use gcc:var/setup without hardening wrapper
by gk@torproject.org 29 May '18
by gk@torproject.org 29 May '18
29 May '18
commit 2b3a35530ece4f61f86786bc524d18448bc009eb
Author: Nicolas Vigier <boklm(a)torproject.org>
Date: Tue May 22 18:21:28 2018 +0200
Bug 26165: make it possible to use gcc:var/setup without hardening wrapper
This allows us to use it to setup the gcc host compiler for the Windows
build in projects/firefox/build.
---
projects/firefox/build | 10 ++--------
projects/gcc/config | 36 ++++++++++++++++++++----------------
2 files changed, 22 insertions(+), 24 deletions(-)
diff --git a/projects/firefox/build b/projects/firefox/build
index a7441bd..aa5fd73 100644
--- a/projects/firefox/build
+++ b/projects/firefox/build
@@ -2,16 +2,10 @@
[% c("var/setarch") -%]
[% c("var/set_default_env") -%]
[% IF c("var/windows") -%]
- mkdir -p /var/tmp/dist
- tar -C /var/tmp/dist -xf $rootdir/[% c('input_files_by_name/gcc') %]
- hgccdir=/var/tmp/dist/gcc
+ [% pc('gcc', 'var/setup', { compiler_tarfile => c('input_files_by_name/gcc') }) %]
# We need a link to our GCC, otherwise the system cc gets used which points to
# /usr/bin/gcc.
- ln -s gcc $hgccdir/bin/cc
- # Make sure our GCC (as host compiler) get used. We do that before
- # compiler setup so that mingw is first in the PATH.
- export LD_LIBRARY_PATH=$hgccdir/lib64
- export PATH=$hgccdir/bin:$PATH
+ ln -s gcc /var/tmp/dist/gcc/bin/cc
[% END -%]
[% pc(c('var/compiler'), 'var/setup', { compiler_tarfile => c('input_files_by_name/' _ c('var/compiler')) }) %]
distdir=/var/tmp/dist/[% project %]
diff --git a/projects/gcc/config b/projects/gcc/config
index 1d74a0a..b13a738 100644
--- a/projects/gcc/config
+++ b/projects/gcc/config
@@ -18,23 +18,25 @@ var:
export LD_LIBRARY_PATH=/var/tmp/dist/gcc/lib64
[% END -%]
- # Config options for hardening-wrapper
- export DEB_BUILD_HARDENING=1
- export DEB_BUILD_HARDENING_STACKPROTECTOR=1
- export DEB_BUILD_HARDENING_FORTIFY=1
- export DEB_BUILD_HARDENING_FORMAT=1
- export DEB_BUILD_HARDENING_PIE=1
+ [% IF c("hardened_gcc") -%]
+ # Config options for hardening-wrapper
+ export DEB_BUILD_HARDENING=1
+ export DEB_BUILD_HARDENING_STACKPROTECTOR=1
+ export DEB_BUILD_HARDENING_FORTIFY=1
+ export DEB_BUILD_HARDENING_FORMAT=1
+ export DEB_BUILD_HARDENING_PIE=1
- # Make sure we use the hardening wrapper
- pushd /var/tmp/dist/gcc/bin
- cp /usr/bin/hardened-cc ./
- mv gcc gcc.real
- mv c++ c++.real
- mv g++ g++.real
- ln -sf hardened-cc gcc
- ln -sf hardened-cc c++
- ln -sf hardened-cc g++
- popd
+ # Make sure we use the hardening wrapper
+ pushd /var/tmp/dist/gcc/bin
+ cp /usr/bin/hardened-cc ./
+ mv gcc gcc.real
+ mv c++ c++.real
+ mv g++ g++.real
+ ln -sf hardened-cc gcc
+ ln -sf hardened-cc c++
+ ln -sf hardened-cc g++
+ popd
+ [% END -%]
targets:
windows:
@@ -43,6 +45,8 @@ targets:
arch_deps:
- libc6-dev
- zlib1g-dev
+ linux:
+ hardened_gcc: 1
linux-i686:
var:
configure_opt: --enable-multilib --enable-languages=c,c++ --with-system-zlib
1
0

[tor-browser-build/master] Merge remote-tracking branch 'boklm/bug_26165_v2'
by gk@torproject.org 29 May '18
by gk@torproject.org 29 May '18
29 May '18
commit b1e6dd922f17555ad153cd9e05997ca41e50417a
Merge: 8fd3d1d 2b3a355
Author: Georg Koppen <gk(a)torproject.org>
Date: Tue May 29 07:48:51 2018 +0000
Merge remote-tracking branch 'boklm/bug_26165_v2'
projects/firefox/build | 10 ++--------
projects/gcc/config | 36 ++++++++++++++++++++----------------
2 files changed, 22 insertions(+), 24 deletions(-)
1
0

28 May '18
commit 07bdf93e5a4545d33e9204192eab60fd508a02da
Author: Georg Koppen <gk(a)torproject.org>
Date: Mon May 28 09:24:37 2018 +0000
Bug 26129: Show our about:tor page on startup
---
src/chrome/content/locale/non-localized.properties | 6 ++++++
src/defaults/preferences/preferences.js | 2 +-
2 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/src/chrome/content/locale/non-localized.properties b/src/chrome/content/locale/non-localized.properties
new file mode 100644
index 0000000..dca7863
--- /dev/null
+++ b/src/chrome/content/locale/non-localized.properties
@@ -0,0 +1,6 @@
+# This file must be located in a subdirectory named "locale" so that
+# about:config will display the correct value for browser.startup.homepage.
+# See fetchPref() inside toolkit/components/viewconfig/content/config.js
+
+# Default home page
+browser.startup.homepage=about:tor
diff --git a/src/defaults/preferences/preferences.js b/src/defaults/preferences/preferences.js
index f51c7db..f61cb89 100644
--- a/src/defaults/preferences/preferences.js
+++ b/src/defaults/preferences/preferences.js
@@ -53,7 +53,7 @@ pref("extensions.torbutton.confirm_plugins", true);
pref("extensions.torbutton.confirm_newnym", true);
// Browser home page:
-pref("browser.startup.homepage", "about:tor");
+pref("browser.startup.homepage", "chrome://torbutton/content/locale/non-localized.properties");
// This pref specifies an ad-hoc "version" for various pref update hacks we need to do
pref("extensions.torbutton.pref_fixup_version", 0);
1
0

[translation/torbutton-torbuttondtd_completed] Update translations for torbutton-torbuttondtd_completed
by translation@torproject.org 28 May '18
by translation@torproject.org 28 May '18
28 May '18
commit f2695cc79f15d821354f0bcddfd8bdde7dd7cd69
Author: Translation commit bot <translation(a)torproject.org>
Date: Mon May 28 21:17:26 2018 +0000
Update translations for torbutton-torbuttondtd_completed
---
da/torbutton.dtd | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/da/torbutton.dtd b/da/torbutton.dtd
index 5a82f184d..7936505c0 100644
--- a/da/torbutton.dtd
+++ b/da/torbutton.dtd
@@ -47,4 +47,5 @@
<!ENTITY torbutton.prefs.sec_limit_typography "Nogle skrifttyper og matematiksymboler er deaktiverede.">
<!ENTITY torbutton.prefs.sec_limit_graphics_and_typography "Nogle skrifttyper, ikoner, matematiksymboler og billeder er deaktiveret.">
<!ENTITY torbutton.prefs.sec_click_to_play_media "Lyd og video (HTML5-medier) er klik-for-at-afspille.">
-<!ENTITY torbutton.circuit_display.title "Tor-kredsløb for dette websted">
+<!ENTITY torbutton.circuit_display.title "Tor-kredsløb">
+<!ENTITY torbutton.circuit_display.new_circuit "Nyt kredsløb til dette sted">
1
0

[translation/torbutton-torbuttondtd] Update translations for torbutton-torbuttondtd
by translation@torproject.org 28 May '18
by translation@torproject.org 28 May '18
28 May '18
commit 7cf31b4b946d05ef97ceab38ebaad84f808b483b
Author: Translation commit bot <translation(a)torproject.org>
Date: Mon May 28 21:17:22 2018 +0000
Update translations for torbutton-torbuttondtd
---
da/torbutton.dtd | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/da/torbutton.dtd b/da/torbutton.dtd
index d5af6c14f..7936505c0 100644
--- a/da/torbutton.dtd
+++ b/da/torbutton.dtd
@@ -47,5 +47,5 @@
<!ENTITY torbutton.prefs.sec_limit_typography "Nogle skrifttyper og matematiksymboler er deaktiverede.">
<!ENTITY torbutton.prefs.sec_limit_graphics_and_typography "Nogle skrifttyper, ikoner, matematiksymboler og billeder er deaktiveret.">
<!ENTITY torbutton.prefs.sec_click_to_play_media "Lyd og video (HTML5-medier) er klik-for-at-afspille.">
-<!ENTITY torbutton.circuit_display.title "Tor Circuit">
-<!ENTITY torbutton.circuit_display.new_circuit "New Circuit for this Site">
+<!ENTITY torbutton.circuit_display.title "Tor-kredsløb">
+<!ENTITY torbutton.circuit_display.new_circuit "Nyt kredsløb til dette sted">
1
0

[translation/torbutton-torbuttonproperties_completed] Update translations for torbutton-torbuttonproperties_completed
by translation@torproject.org 28 May '18
by translation@torproject.org 28 May '18
28 May '18
commit 54e3ce0a71a311ecfa2cf3c8b3cc910b56257c70
Author: Translation commit bot <translation(a)torproject.org>
Date: Mon May 28 21:17:16 2018 +0000
Update translations for torbutton-torbuttonproperties_completed
---
da/torbutton.properties | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/da/torbutton.properties b/da/torbutton.properties
index 54df12c38..d3be2ccfd 100644
--- a/da/torbutton.properties
+++ b/da/torbutton.properties
@@ -2,9 +2,12 @@ torbutton.circuit_display.internet = Internet
torbutton.circuit_display.ip_unknown = Ukendt IP-adresse
torbutton.circuit_display.onion_site = Løg side
torbutton.circuit_display.this_browser = Denne browser
-torbutton.circuit_display.relay = relæ
+torbutton.circuit_display.relay = Relæ
torbutton.circuit_display.tor_bridge = Bro
torbutton.circuit_display.unknown_country = Ukendt land
+torbutton.circuit_display.guard = Vagt
+torbutton.circuit_display.guard_note = Dit [Vagt]-knudepunkt må ikke ændres.
+torbutton.circuit_display.learn_more = Lær mere
torbutton.content_sizer.margin_tooltip = Tor Browser tilføjer denne margin for at gøre dit vindues højde og bredde mindre genkendeligt, hvilket reducerer muligheden for at andre spore dig online.
torbutton.panel.tooltip.disabled = Klik for at aktivere Tor
torbutton.panel.tooltip.enabled = Klik for at deaktivere Tor
1
0

[translation/torbutton-torbuttonproperties] Update translations for torbutton-torbuttonproperties
by translation@torproject.org 28 May '18
by translation@torproject.org 28 May '18
28 May '18
commit 99bcd2f4e0a5902bb995e652b8827abae6d5fe16
Author: Translation commit bot <translation(a)torproject.org>
Date: Mon May 28 21:17:12 2018 +0000
Update translations for torbutton-torbuttonproperties
---
da/torbutton.properties | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/da/torbutton.properties b/da/torbutton.properties
index e24a2cea9..d3be2ccfd 100644
--- a/da/torbutton.properties
+++ b/da/torbutton.properties
@@ -5,8 +5,8 @@ torbutton.circuit_display.this_browser = Denne browser
torbutton.circuit_display.relay = Relæ
torbutton.circuit_display.tor_bridge = Bro
torbutton.circuit_display.unknown_country = Ukendt land
-torbutton.circuit_display.guard = Guard
-torbutton.circuit_display.guard_note = Your [Guard] node may not change.
+torbutton.circuit_display.guard = Vagt
+torbutton.circuit_display.guard_note = Dit [Vagt]-knudepunkt må ikke ændres.
torbutton.circuit_display.learn_more = Lær mere
torbutton.content_sizer.margin_tooltip = Tor Browser tilføjer denne margin for at gøre dit vindues højde og bredde mindre genkendeligt, hvilket reducerer muligheden for at andre spore dig online.
torbutton.panel.tooltip.disabled = Klik for at aktivere Tor
1
0