tor-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
June 2013
- 19 participants
- 1571 discussions

26 Jun '13
commit af5862bf7443eaba008b1de134c17991bf0c4373
Author: Arturo Filastò <art(a)fuffa.org>
Date: Tue Apr 23 16:58:36 2013 +0200
Move nettests into data/ subdirectory
---
data/nettests/blocking/__init__.py | 1 +
data/nettests/blocking/dnsconsistency.py | 173 ++++++
data/nettests/blocking/http_requests.py | 130 ++++
data/nettests/blocking/tcpconnect.py | 45 ++
data/nettests/examples/example_dns_http.py | 11 +
data/nettests/examples/example_dnst.py | 13 +
data/nettests/examples/example_http_checksum.py | 27 +
data/nettests/examples/example_httpt.py | 36 ++
data/nettests/examples/example_myip.py | 21 +
data/nettests/examples/example_scapyt.py | 29 +
data/nettests/examples/example_scapyt_yield.py | 25 +
data/nettests/examples/example_simple.py | 8 +
data/nettests/examples/example_tcpt.py | 21 +
.../experimental/bridge_reachability/bridget.py | 462 ++++++++++++++
.../experimental/bridge_reachability/echo.py | 132 ++++
data/nettests/experimental/chinatrigger.py | 108 ++++
data/nettests/experimental/dns_injection.py | 63 ++
data/nettests/experimental/domclass_collector.py | 33 +
.../experimental/http_filtering_bypassing.py | 84 +++
.../experimental/http_keyword_filtering.py | 45 ++
data/nettests/experimental/http_trix.py | 47 ++
.../experimental/http_uk_mobile_networks.py | 85 +++
data/nettests/experimental/keyword_filtering.py | 52 ++
data/nettests/experimental/parasitictraceroute.py | 129 ++++
data/nettests/experimental/squid.py | 117 ++++
data/nettests/manipulation/captiveportal.py | 650 ++++++++++++++++++++
data/nettests/manipulation/daphne.py | 119 ++++
data/nettests/manipulation/dnsspoof.py | 69 +++
.../manipulation/http_header_field_manipulation.py | 189 ++++++
data/nettests/manipulation/http_host.py | 141 +++++
.../manipulation/http_invalid_request_line.py | 106 ++++
data/nettests/manipulation/traceroute.py | 143 +++++
data/nettests/scanning/http_url_list.py | 98 +++
data/nettests/third_party/Makefile | 3 +
data/nettests/third_party/README | 14 +
data/nettests/third_party/netalyzr.py | 58 ++
nettests/blocking/__init__.py | 1 -
nettests/blocking/dnsconsistency.py | 173 ------
nettests/blocking/http_requests.py | 130 ----
nettests/blocking/tcpconnect.py | 45 --
nettests/examples/example_dns_http.py | 11 -
nettests/examples/example_dnst.py | 13 -
nettests/examples/example_http_checksum.py | 27 -
nettests/examples/example_httpt.py | 36 --
nettests/examples/example_myip.py | 21 -
nettests/examples/example_scapyt.py | 29 -
nettests/examples/example_scapyt_yield.py | 25 -
nettests/examples/example_simple.py | 8 -
nettests/examples/example_tcpt.py | 21 -
.../experimental/bridge_reachability/bridget.py | 462 --------------
nettests/experimental/bridge_reachability/echo.py | 132 ----
nettests/experimental/chinatrigger.py | 108 ----
nettests/experimental/dns_injection.py | 63 --
nettests/experimental/domclass_collector.py | 33 -
nettests/experimental/http_filtering_bypassing.py | 84 ---
nettests/experimental/http_keyword_filtering.py | 45 --
nettests/experimental/http_trix.py | 47 --
nettests/experimental/http_uk_mobile_networks.py | 85 ---
nettests/experimental/keyword_filtering.py | 52 --
nettests/experimental/parasitictraceroute.py | 129 ----
nettests/experimental/squid.py | 117 ----
nettests/manipulation/captiveportal.py | 650 --------------------
nettests/manipulation/daphne.py | 119 ----
nettests/manipulation/dnsspoof.py | 69 ---
.../manipulation/http_header_field_manipulation.py | 189 ------
nettests/manipulation/http_host.py | 141 -----
nettests/manipulation/http_invalid_request_line.py | 106 ----
nettests/manipulation/traceroute.py | 143 -----
nettests/scanning/http_url_list.py | 98 ---
nettests/third_party/Makefile | 3 -
nettests/third_party/README | 14 -
nettests/third_party/netalyzr.py | 58 --
72 files changed, 3487 insertions(+), 3487 deletions(-)
diff --git a/data/nettests/__init__.py b/data/nettests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/blocking/__init__.py b/data/nettests/blocking/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/data/nettests/blocking/__init__.py
@@ -0,0 +1 @@
+
diff --git a/data/nettests/blocking/dnsconsistency.py b/data/nettests/blocking/dnsconsistency.py
new file mode 100644
index 0000000..7b6e7b9
--- /dev/null
+++ b/data/nettests/blocking/dnsconsistency.py
@@ -0,0 +1,173 @@
+# -*- encoding: utf-8 -*-
+#
+# dnsconsistency
+# **************
+#
+# The test reports censorship if the cardinality of the intersection of
+# the query result set from the control server and the query result set
+# from the experimental server is zero, which is to say, if the two sets
+# have no matching results whatsoever.
+#
+# NOTE: This test frequently results in false positives due to GeoIP-based
+# load balancing on major global sites such as google, facebook, and
+# youtube, etc.
+#
+# :authors: Arturo Filastò, Isis Lovecruft
+# :licence: see LICENSE
+
+import pdb
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import dnst
+
+from ooni import nettest
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', '8.8.8.8:53',
+ 'The OONI backend that runs the DNS resolver'],
+ ['testresolvers', 'T', None,
+ 'File containing list of DNS resolvers to test against'],
+ ['testresolver', 't', None,
+ 'Specify a single test resolver to use for testing']
+ ]
+
+class DNSConsistencyTest(dnst.DNSTest):
+
+ name = "DNS Consistency"
+ description = "DNS censorship detection test"
+ version = "0.5"
+ authors = "Arturo Filastò, Isis Lovecruft"
+ requirements = None
+
+ inputFile = ['file', 'f', None,
+ 'Input file of list of hostnames to attempt to resolve']
+
+ usageOptions = UsageOptions
+ requiredOptions = ['backend', 'file']
+
+ def setUp(self):
+ if (not self.localOptions['testresolvers'] and \
+ not self.localOptions['testresolver']):
+ raise usage.UsageError("You did not specify a testresolver")
+
+ elif self.localOptions['testresolvers']:
+ test_resolvers_file = self.localOptions['testresolvers']
+
+ elif self.localOptions['testresolver']:
+ self.test_resolvers = [self.localOptions['testresolver']]
+
+ try:
+ with open(test_resolvers_file) as f:
+ self.test_resolvers = [x.split('#')[0].strip() for x in f.readlines()]
+ self.report['test_resolvers'] = self.test_resolvers
+ f.close()
+
+ except IOError, e:
+ log.exception(e)
+ raise usage.UsageError("Invalid test resolvers file")
+
+ except NameError:
+ log.debug("No test resolver file configured")
+
+ dns_ip, dns_port = self.localOptions['backend'].split(':')
+ self.control_dns_server = (dns_ip, int(dns_port))
+
+ self.report['control_resolver'] = self.control_dns_server
+
+ @defer.inlineCallbacks
+ def test_a_lookup(self):
+ """
+ We perform an A lookup on the DNS test servers for the domains to be
+ tested and an A lookup on the known good DNS server.
+
+ We then compare the results from test_resolvers and that from
+ control_resolver and see if the match up.
+ If they match up then no censorship is happening (tampering: false).
+
+ If they do not we do a reverse lookup (PTR) on the test_resolvers and
+ the control resolver for every IP address we got back and check to see
+ if anyone of them matches the control ones.
+
+ If they do then we take not of the fact that censorship is probably not
+ happening (tampering: reverse-match).
+
+ If they do not match then censorship is probably going on (tampering:
+ true).
+ """
+ log.msg("Doing the test lookups on %s" % self.input)
+ list_of_ds = []
+ hostname = self.input
+
+ self.report['tampering'] = {}
+
+ control_answers = yield self.performALookup(hostname, self.control_dns_server)
+ if not control_answers:
+ log.err("Got no response from control DNS server %s," \
+ " perhaps the DNS resolver is down?" % self.control_dns_server[0])
+ self.report['tampering'][self.control_dns_server] = 'no_answer'
+ return
+
+ for test_resolver in self.test_resolvers:
+ log.msg("Testing resolver: %s" % test_resolver)
+ test_dns_server = (test_resolver, 53)
+
+ try:
+ experiment_answers = yield self.performALookup(hostname, test_dns_server)
+ except Exception, e:
+ log.err("Problem performing the DNS lookup")
+ log.exception(e)
+ self.report['tampering'][test_resolver] = 'dns_lookup_error'
+ continue
+
+ if not experiment_answers:
+ log.err("Got no response, perhaps the DNS resolver is down?")
+ self.report['tampering'][test_resolver] = 'no_answer'
+ continue
+ else:
+ log.debug("Got the following A lookup answers %s from %s" % (experiment_answers, test_resolver))
+
+ def lookup_details():
+ """
+ A closure useful for printing test details.
+ """
+ log.msg("test resolver: %s" % test_resolver)
+ log.msg("experiment answers: %s" % experiment_answers)
+ log.msg("control answers: %s" % control_answers)
+
+ log.debug("Comparing %s with %s" % (experiment_answers, control_answers))
+ if set(experiment_answers) & set(control_answers):
+ lookup_details()
+ log.msg("tampering: false")
+ self.report['tampering'][test_resolver] = False
+ else:
+ log.msg("Trying to do reverse lookup")
+
+ experiment_reverse = yield self.performPTRLookup(experiment_answers[0], test_dns_server)
+ control_reverse = yield self.performPTRLookup(control_answers[0], self.control_dns_server)
+
+ if experiment_reverse == control_reverse:
+ log.msg("Further testing has eliminated false positives")
+ lookup_details()
+ log.msg("tampering: reverse_match")
+ self.report['tampering'][test_resolver] = 'reverse_match'
+ else:
+ log.msg("Reverse lookups do not match")
+ lookup_details()
+ log.msg("tampering: true")
+ self.report['tampering'][test_resolver] = True
+
+ def inputProcessor(self, filename=None):
+ """
+ This inputProcessor extracts domain names from urls
+ """
+ log.debug("Running dnsconsistency default processor")
+ if filename:
+ fp = open(filename)
+ for x in fp.readlines():
+ yield x.strip().split('//')[-1].split('/')[0]
+ fp.close()
+ else:
+ pass
diff --git a/data/nettests/blocking/http_requests.py b/data/nettests/blocking/http_requests.py
new file mode 100644
index 0000000..9208739
--- /dev/null
+++ b/data/nettests/blocking/http_requests.py
@@ -0,0 +1,130 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import random
+from twisted.internet import defer
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils.net import userAgents
+from ooni.templates import httpt
+from ooni.errors import failureToString, handleAllFailures
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['url', 'u', None, 'Specify a single URL to test.'],
+ ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
+ ]
+
+class HTTPRequestsTest(httpt.HTTPTest):
+ """
+ Performs a two GET requests to the set of sites to be tested for
+ censorship, one over a known good control channel (Tor), the other over the
+ test network.
+
+ We check to see if the response headers match and if the response body
+ lengths match.
+ """
+ name = "HTTP Requests Test"
+ author = "Arturo Filastò"
+ version = "0.2.3"
+
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of URLS to perform GET and POST requests to']
+
+ # These values are used for determining censorship based on response body
+ # lengths
+ control_body_length = None
+ experiment_body_length = None
+
+ def setUp(self):
+ """
+ Check for inputs.
+ """
+ if self.input:
+ self.url = self.input
+ elif self.localOptions['url']:
+ self.url = self.localOptions['url']
+ else:
+ raise Exception("No input specified")
+
+ self.factor = self.localOptions['factor']
+ self.report['control_failure'] = None
+ self.report['experiment_failure'] = None
+
+ def compare_body_lengths(self, body_length_a, body_length_b):
+
+ if body_length_b == 0 and body_length_a != 0:
+ rel = float(body_length_b)/float(body_length_a)
+ elif body_length_b == 0 and body_length_a == 0:
+ rel = float(1)
+ else:
+ rel = float(body_length_a)/float(body_length_b)
+
+ if rel > 1:
+ rel = 1/rel
+
+ self.report['body_proportion'] = rel
+ self.report['factor'] = self.factor
+ if rel > self.factor:
+ log.msg("The two body lengths appear to match")
+ log.msg("censorship is probably not happening")
+ self.report['body_length_match'] = True
+ else:
+ log.msg("The two body lengths appear to not match")
+ log.msg("censorship could be happening")
+ self.report['body_length_match'] = False
+
+ def compare_headers(self, headers_a, headers_b):
+ diff = headers_a.getDiff(headers_b)
+ if diff:
+ log.msg("Headers appear to *not* match")
+ self.report['headers_diff'] = diff
+ self.report['headers_match'] = False
+ else:
+ log.msg("Headers appear to match")
+ self.report['headers_diff'] = diff
+ self.report['headers_match'] = True
+
+ def test_get(self):
+ def callback(res):
+ experiment, control = res
+ experiment_succeeded, experiment_result = experiment
+ control_succeeded, control_result = control
+
+ if control_succeeded and experiment_succeeded:
+ self.compare_body_lengths(len(experiment_result.body),
+ len(control_result.body))
+
+ self.compare_headers(control_result.headers,
+ experiment_result.headers)
+
+ if not control_succeeded:
+ self.report['control_failure'] = failureToString(control_result)
+
+ if not experiment_succeeded:
+ self.report['experiment_failure'] = failureToString(experiment_result)
+
+ headers = {'User-Agent': [random.choice(userAgents)]}
+
+ l = []
+ log.msg("Performing GET request to %s" % self.url)
+ experiment_request = self.doRequest(self.url, method="GET",
+ headers=headers)
+
+ log.msg("Performing GET request to %s via Tor" % self.url)
+ control_request = self.doRequest(self.url, method="GET",
+ use_tor=True, headers=headers)
+
+ l.append(experiment_request)
+ l.append(control_request)
+
+ dl = defer.DeferredList(l, consumeErrors=True)
+ dl.addCallback(callback)
+
+ return dl
+
diff --git a/data/nettests/blocking/tcpconnect.py b/data/nettests/blocking/tcpconnect.py
new file mode 100644
index 0000000..3b22427
--- /dev/null
+++ b/data/nettests/blocking/tcpconnect.py
@@ -0,0 +1,45 @@
+# -*- encoding: utf-8 -*-
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from twisted.internet.error import ConnectionRefusedError
+from twisted.internet.error import TCPTimedOutError, TimeoutError
+
+from ooni import nettest
+from ooni.errors import handleAllFailures
+from ooni.utils import log
+
+class TCPFactory(Factory):
+ def buildProtocol(self, addr):
+ return Protocol()
+
+class TCPConnectTest(nettest.NetTestCase):
+ name = "TCP Connect"
+ author = "Arturo Filastò"
+ version = "0.1"
+ inputFile = ['file', 'f', None,
+ 'File containing the IP:PORT combinations to be tested, one per line']
+
+ requiredOptions = ['file']
+ def test_connect(self):
+ """
+ This test performs a TCP connection to the remote host on the specified port.
+ the report will contains the string 'success' if the test has
+ succeeded, or the reason for the failure if it has failed.
+ """
+ host, port = self.input.split(":")
+ def connectionSuccess(protocol):
+ protocol.transport.loseConnection()
+ log.debug("Got a connection to %s" % self.input)
+ self.report["connection"] = 'success'
+
+ def connectionFailed(failure):
+ self.report['connection'] = handleAllFailures(failure)
+
+ from twisted.internet import reactor
+ point = TCP4ClientEndpoint(reactor, host, int(port))
+ d = point.connect(TCPFactory())
+ d.addCallback(connectionSuccess)
+ d.addErrback(connectionFailed)
+ return d
+
diff --git a/data/nettests/examples/__init__.py b/data/nettests/examples/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/examples/example_dns_http.py b/data/nettests/examples/example_dns_http.py
new file mode 100644
index 0000000..9b76775
--- /dev/null
+++ b/data/nettests/examples/example_dns_http.py
@@ -0,0 +1,11 @@
+from twisted.internet import defer
+from ooni.templates import httpt, dnst
+
+class TestDNSandHTTP(httpt.HTTPTest, dnst.DNSTest):
+
+ @defer.inlineCallbacks
+ def test_http_and_dns(self):
+ yield self.doRequest('http://torproject.org')
+ yield self.performALookup('torproject.org', ('8.8.8.8', 53))
+
+
diff --git a/data/nettests/examples/example_dnst.py b/data/nettests/examples/example_dnst.py
new file mode 100644
index 0000000..6905637
--- /dev/null
+++ b/data/nettests/examples/example_dnst.py
@@ -0,0 +1,13 @@
+from ooni.templates import dnst
+
+class ExampleDNSTest(dnst.DNSTest):
+ inputFile = ['file', 'f', None, 'foobar']
+
+ def test_a_lookup(self):
+ def gotResult(result):
+ # Result is an array containing all the A record lookup results
+ print result
+
+ d = self.performALookup('torproject.org', ('8.8.8.8', 53))
+ d.addCallback(gotResult)
+ return d
diff --git a/data/nettests/examples/example_http_checksum.py b/data/nettests/examples/example_http_checksum.py
new file mode 100644
index 0000000..9226b52
--- /dev/null
+++ b/data/nettests/examples/example_http_checksum.py
@@ -0,0 +1,27 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Aaron Gibson
+# :licence: see LICENSE
+
+from ooni.utils import log
+from ooni.templates import httpt
+from hashlib import sha256
+
+class SHA256HTTPBodyTest(httpt.HTTPTest):
+ name = "ChecksumHTTPBodyTest"
+ author = "Aaron Gibson"
+ version = 0.1
+
+ inputFile = ['file', 'f', None,
+ 'List of URLS to perform GET requests to']
+
+ def test_http(self):
+ if self.input:
+ url = self.input
+ return self.doRequest(url)
+ else:
+ raise Exception("No input specified")
+
+ def processResponseBody(self, body):
+ body_sha256sum = sha256(body).digest()
+ self.report['checksum'] = body_sha256sum
diff --git a/data/nettests/examples/example_httpt.py b/data/nettests/examples/example_httpt.py
new file mode 100644
index 0000000..e76aed4
--- /dev/null
+++ b/data/nettests/examples/example_httpt.py
@@ -0,0 +1,36 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni.utils import log
+from ooni.templates import httpt
+
+class ExampleHTTP(httpt.HTTPTest):
+ name = "Example HTTP Test"
+ author = "Arturo Filastò"
+ version = 0.1
+
+ inputs = ['http://google.com/', 'http://wikileaks.org/',
+ 'http://torproject.org/']
+
+ def test_http(self):
+ if self.input:
+ url = self.input
+ return self.doRequest(url)
+ else:
+ raise Exception("No input specified")
+
+ def processResponseBody(self, body):
+ # XXX here shall go your logic
+ # for processing the body
+ if 'blocked' in body:
+ self.report['censored'] = True
+ else:
+ self.report['censored'] = False
+
+ def processResponseHeaders(self, headers):
+ # XXX place in here all the logic for handling the processing of HTTP
+ # Headers.
+ pass
+
diff --git a/data/nettests/examples/example_myip.py b/data/nettests/examples/example_myip.py
new file mode 100644
index 0000000..70cf773
--- /dev/null
+++ b/data/nettests/examples/example_myip.py
@@ -0,0 +1,21 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni.templates import httpt
+class MyIP(httpt.HTTPTest):
+ inputs = ['https://check.torproject.org']
+
+ def test_lookup(self):
+ return self.doRequest(self.input)
+
+ def processResponseBody(self, body):
+ import re
+ regexp = "Your IP address appears to be: <b>(.+?)<\/b>"
+ match = re.search(regexp, body)
+ try:
+ self.report['myip'] = match.group(1)
+ except:
+ self.report['myip'] = None
+
diff --git a/data/nettests/examples/example_scapyt.py b/data/nettests/examples/example_scapyt.py
new file mode 100644
index 0000000..ba04072
--- /dev/null
+++ b/data/nettests/examples/example_scapyt.py
@@ -0,0 +1,29 @@
+# -*- encoding: utf-8 -*-
+#
+# :licence: see LICENSE
+
+from twisted.python import usage
+
+from scapy.all import IP, ICMP
+
+from ooni.templates import scapyt
+
+class UsageOptions(usage.Options):
+ optParameters = [['target', 't', '8.8.8.8', "Specify the target to ping"]]
+
+class ExampleICMPPingScapy(scapyt.BaseScapyTest):
+ name = "Example ICMP Ping Test"
+
+ usageOptions = UsageOptions
+
+ def test_icmp_ping(self):
+ def finished(packets):
+ print packets
+ answered, unanswered = packets
+ for snd, rcv in answered:
+ rcv.show()
+
+ packets = IP(dst=self.localOptions['target'])/ICMP()
+ d = self.sr(packets)
+ d.addCallback(finished)
+ return d
diff --git a/data/nettests/examples/example_scapyt_yield.py b/data/nettests/examples/example_scapyt_yield.py
new file mode 100644
index 0000000..311b5aa
--- /dev/null
+++ b/data/nettests/examples/example_scapyt_yield.py
@@ -0,0 +1,25 @@
+# -*- encoding: utf-8 -*-
+#
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from scapy.all import IP, ICMP
+
+from ooni.templates import scapyt
+
+class UsageOptions(usage.Options):
+ optParameters = [['target', 't', self.localOptions['target'], "Specify the target to ping"]]
+
+class ExampleICMPPingScapyYield(scapyt.BaseScapyTest):
+ name = "Example ICMP Ping Test"
+
+ usageOptions = UsageOptions
+
+ @defer.inlineCallbacks
+ def test_icmp_ping(self):
+ packets = IP(dst=self.localOptions['target'])/ICMP()
+ answered, unanswered = yield self.sr(packets)
+ for snd, rcv in answered:
+ rcv.show()
diff --git a/data/nettests/examples/example_simple.py b/data/nettests/examples/example_simple.py
new file mode 100644
index 0000000..24de5a6
--- /dev/null
+++ b/data/nettests/examples/example_simple.py
@@ -0,0 +1,8 @@
+from twisted.internet import defer
+from ooni import nettest
+
+class MyIP(nettest.NetTestCase):
+ def test_simple(self):
+ self.report['foobar'] = 'antani'
+ return defer.succeed(42)
+
diff --git a/data/nettests/examples/example_tcpt.py b/data/nettests/examples/example_tcpt.py
new file mode 100644
index 0000000..613160b
--- /dev/null
+++ b/data/nettests/examples/example_tcpt.py
@@ -0,0 +1,21 @@
+
+from twisted.internet.error import ConnectionRefusedError
+from ooni.utils import log
+from ooni.templates import tcpt
+
+class ExampleTCPT(tcpt.TCPTest):
+ def test_hello_world(self):
+ def got_response(response):
+ print "Got this data %s" % response
+
+ def connection_failed(failure):
+ failure.trap(ConnectionRefusedError)
+ print "Connection Refused"
+
+ self.address = "127.0.0.1"
+ self.port = 57002
+ payload = "Hello World!\n\r"
+ d = self.sendPayload(payload)
+ d.addErrback(connection_failed)
+ d.addCallback(got_response)
+ return d
diff --git a/data/nettests/experimental/__init__.py b/data/nettests/experimental/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/experimental/bridge_reachability/bridget.py b/data/nettests/experimental/bridge_reachability/bridget.py
new file mode 100644
index 0000000..acf3dff
--- /dev/null
+++ b/data/nettests/experimental/bridge_reachability/bridget.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# +-----------+
+# | BRIDGET |
+# | +--------------------------------------------+
+# +--------| Use a Tor process to test making a Tor |
+# | connection to a list of bridges or relays. |
+# +--------------------------------------------+
+#
+# :authors: Isis Lovecruft, Arturo Filasto
+# :licence: see included LICENSE
+# :version: 0.1.0-alpha
+
+from __future__ import with_statement
+from functools import partial
+from random import randint
+
+import os
+import sys
+
+from twisted.python import usage
+from twisted.internet import defer, error, reactor
+
+from ooni import nettest
+
+from ooni.utils import log, date
+from ooni.utils.config import ValueChecker
+
+from ooni.utils.onion import TxtorconImportError
+from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
+
+
+try:
+ from ooni.utils.onion import parse_data_dir
+except:
+ log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
+
+class MissingAssetException(Exception):
+ pass
+
+class RandomPortException(Exception):
+ """Raised when using a random port conflicts with configured ports."""
+ def __init__(self):
+ log.msg("Unable to use random and specific ports simultaneously")
+ return sys.exit()
+
+class BridgetArgs(usage.Options):
+ """Commandline options."""
+ allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
+ sock_check = ValueChecker(allowed % "SocksPort").port_check
+ ctrl_check = ValueChecker(allowed % "ControlPort").port_check
+
+ optParameters = [
+ ['bridges', 'b', None,
+ 'File listing bridge IP:ORPorts to test'],
+ ['relays', 'f', None,
+ 'File listing relay IPs to test'],
+ ['socks', 's', 9049, None, sock_check],
+ ['control', 'c', 9052, None, ctrl_check],
+ ['torpath', 'p', None,
+ 'Path to the Tor binary to use'],
+ ['datadir', 'd', None,
+ 'Tor DataDirectory to use'],
+ ['transport', 't', None,
+ 'Tor ClientTransportPlugin'],
+ ['resume', 'r', 0,
+ 'Resume at this index']]
+ optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
+
+ def postOptions(self):
+ if not self['bridges'] and not self['relays']:
+ raise MissingAssetException(
+ "Bridget can't run without bridges or relays to test!")
+ if self['transport']:
+ ValueChecker.uid_check(
+ "Can't run bridget as root with pluggable transports!")
+ if not self['bridges']:
+ raise PTNoBridgesException
+ if self['socks'] or self['control']:
+ if self['random']:
+ raise RandomPortException
+ if self['datadir']:
+ ValueChecker.dir_check(self['datadir'])
+ if self['torpath']:
+ ValueChecker.file_check(self['torpath'])
+
+class BridgetTest(nettest.NetTestCase):
+ """
+ XXX fill me in
+
+ :ivar config:
+ An :class:`ooni.lib.txtorcon.TorConfig` instance.
+ :ivar relays:
+ A list of all provided relays to test.
+ :ivar bridges:
+ A list of all provided bridges to test.
+ :ivar socks_port:
+ Integer for Tor's SocksPort.
+ :ivar control_port:
+ Integer for Tor's ControlPort.
+ :ivar transport:
+ String defining the Tor's ClientTransportPlugin, for testing
+ a bridge's pluggable transport functionality.
+ :ivar tor_binary:
+ Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
+ """
+ name = "bridget"
+ author = "Isis Lovecruft <isis(a)torproject.org>"
+ version = "0.1"
+ description = "Use a Tor process to test connecting to bridges or relays"
+ usageOptions = BridgetArgs
+
+ def setUp(self):
+ """
+ Extra initialization steps. We only want one child Tor process
+ running, so we need to deal with most of the TorConfig() only once,
+ before the experiment runs.
+ """
+ self.socks_port = 9049
+ self.control_port = 9052
+ self.circuit_timeout = 90
+ self.tor_binary = '/usr/sbin/tor'
+ self.data_directory = None
+
+ def read_from_file(filename):
+ log.msg("Loading information from %s ..." % opt)
+ with open(filename) as fp:
+ lst = []
+ for line in fp.readlines():
+ if line.startswith('#'):
+ continue
+ else:
+ lst.append(line.replace('\n',''))
+ return lst
+
+ def __count_remaining__(which):
+ total, reach, unreach = map(lambda x: which[x],
+ ['all', 'reachable', 'unreachable'])
+ count = len(total) - reach() - unreach()
+ return count
+
+ ## XXX should we do report['bridges_up'].append(self.bridges['current'])
+ self.bridges = {}
+ self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
+ ([] for i in range(3))
+ self.bridges['reachable'] = lambda: len(self.bridges['up'])
+ self.bridges['unreachable'] = lambda: len(self.bridges['down'])
+ self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
+ self.bridges['current'] = None
+ self.bridges['pt_type'] = None
+ self.bridges['use_pt'] = False
+
+ self.relays = {}
+ self.relays['all'], self.relays['up'], self.relays['down'] = \
+ ([] for i in range(3))
+ self.relays['reachable'] = lambda: len(self.relays['up'])
+ self.relays['unreachable'] = lambda: len(self.relays['down'])
+ self.relays['remaining'] = lambda: __count_remaining__(self.relays)
+ self.relays['current'] = None
+
+ if self.localOptions:
+ try:
+ from txtorcon import TorConfig
+ except ImportError:
+ raise TxtorconImportError
+ else:
+ self.config = TorConfig()
+ finally:
+ options = self.localOptions
+
+ if options['bridges']:
+ self.config.UseBridges = 1
+ self.bridges['all'] = read_from_file(options['bridges'])
+ if options['relays']:
+ ## first hop must be in TorState().guards
+ # XXX where is this defined?
+ self.config.EntryNodes = ','.join(relay_list)
+ self.relays['all'] = read_from_file(options['relays'])
+ if options['socks']:
+ self.socks_port = options['socks']
+ if options['control']:
+ self.control_port = options['control']
+ if options['random']:
+ log.msg("Using randomized ControlPort and SocksPort ...")
+ self.socks_port = randint(1024, 2**16)
+ self.control_port = randint(1024, 2**16)
+ if options['torpath']:
+ self.tor_binary = options['torpath']
+ if options['datadir']:
+ self.data_directory = parse_data_dir(options['datadir'])
+ if options['transport']:
+ ## ClientTransportPlugin transport exec pathtobinary [options]
+ ## XXX we need a better way to deal with all PTs
+ log.msg("Using ClientTransportPlugin %s" % options['transport'])
+ self.bridges['use_pt'] = True
+ [self.bridges['pt_type'], pt_exec] = \
+ options['transport'].split(' ', 1)
+
+ if self.bridges['pt_type'] == "obfs2":
+ self.config.ClientTransportPlugin = \
+ self.bridges['pt_type'] + " " + pt_exec
+ else:
+ raise PTNotFoundException
+
+ self.config.SocksPort = self.socks_port
+ self.config.ControlPort = self.control_port
+ self.config.CookieAuthentication = 1
+
+ def test_bridget(self):
+ """
+ if bridges:
+ 1. configure first bridge line
+ 2a. configure data_dir, if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor } if any of these
+ 4. remove bridges which are public relays } fail, add current
+ 5. SIGHUP for each bridge } bridge to unreach-
+ } able bridges.
+ if relays:
+ 1a. configure the data_dir, if it doesn't exist
+ 1b. write torrc to a tempfile in data_dir
+ 2. start tor
+ 3. remove any of our relays which are already part of current
+ circuits
+ 4a. attach CustomCircuit() to self.state
+ 4b. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+ 5.
+ if bridges and relays:
+ 1. configure first bridge line
+ 2a. configure data_dir if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor
+ 4. remove bridges which are public relays
+ 5. remove any of our relays which are already part of current
+ circuits
+ 6a. attach CustomCircuit() to self.state
+ 6b. for each bridge, build three circuits, with three
+ relays each
+ 6c. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+
+ :param args:
+ The :class:`BridgetAsset` line currently being used. Except that it
+ in Bridget it doesn't, so it should be ignored and avoided.
+ """
+ try:
+ from ooni.utils import process
+ from ooni.utils.onion import remove_public_relays, start_tor
+ from ooni.utils.onion import start_tor_filter_nodes
+ from ooni.utils.onion import setup_fail, setup_done
+ from ooni.utils.onion import CustomCircuit
+ from ooni.utils.timer import deferred_timeout, TimeoutError
+ from ooni.lib.txtorcon import TorConfig, TorState
+ except ImportError:
+ raise TxtorconImportError
+ except TxtorconImportError, tie:
+ log.err(tie)
+ sys.exit()
+
+ def reconfigure_done(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['up'].
+ """
+ log.msg("Reconfiguring with 'Bridge %s' successful"
+ % bridges['current'])
+ bridges['up'].append(bridges['current'])
+ return state
+
+ def reconfigure_fail(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['down'].
+ """
+ log.msg("Reconfiguring TorConfig with parameters %s failed"
+ % state)
+ bridges['down'].append(bridges['current'])
+ return state
+
+ @defer.inlineCallbacks
+ def reconfigure_bridge(state, bridges):
+ """
+ Rewrite the Bridge line in our torrc. If use of pluggable
+ transports was specified, rewrite the line as:
+ Bridge <transport_type> <IP>:<ORPort>
+ Otherwise, rewrite in the standard form:
+ Bridge <IP>:<ORPort>
+
+ :param state:
+ A fully bootstrapped instance of
+ :class:`ooni.lib.txtorcon.TorState`.
+ :param bridges:
+ A dictionary of bridges containing the following keys:
+
+ bridges['remaining'] :: A function returning and int for the
+ number of remaining bridges to test.
+ bridges['current'] :: A string containing the <IP>:<ORPort>
+ of the current bridge.
+ bridges['use_pt'] :: A boolean, True if we're testing
+ bridges with a pluggable transport;
+ False otherwise.
+ bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
+ this is a string containing the type
+ of pluggable transport to test.
+ :return:
+ :param:`state`
+ """
+ log.msg("Current Bridge: %s" % bridges['current'])
+ log.msg("We now have %d bridges remaining to test..."
+ % bridges['remaining']())
+ try:
+ if bridges['use_pt'] is False:
+ controller_response = yield state.protocol.set_conf(
+ 'Bridge', bridges['current'])
+ elif bridges['use_pt'] and bridges['pt_type'] is not None:
+ controller_reponse = yield state.protocol.set_conf(
+ 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
+ else:
+ raise PTNotFoundException
+
+ if controller_response == 'OK':
+ finish = yield reconfigure_done(state, bridges)
+ else:
+ log.err("SETCONF for %s responded with error:\n %s"
+ % (bridges['current'], controller_response))
+ finish = yield reconfigure_fail(state, bridges)
+
+ defer.returnValue(finish)
+
+ except Exception, e:
+ log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
+ % (bridges['current'], e))
+ defer.returnValue(None)
+
+ def attacher_extend_circuit(attacher, deferred, router):
+ ## XXX todo write me
+ ## state.attacher.extend_circuit
+ raise NotImplemented
+ #attacher.extend_circuit
+
+ def state_attach(state, path):
+ log.msg("Setting up custom circuit builder...")
+ attacher = CustomCircuit(state)
+ state.set_attacher(attacher, reactor)
+ state.add_circuit_listener(attacher)
+ return state
+
+ ## OLD
+ #for circ in state.circuits.values():
+ # for relay in circ.path:
+ # try:
+ # relay_list.remove(relay)
+ # except KeyError:
+ # continue
+ ## XXX how do we attach to circuits with bridges?
+ d = defer.Deferred()
+ attacher.request_circuit_build(d)
+ return d
+
+ def state_attach_fail(state):
+ log.err("Attaching custom circuit builder failed: %s" % state)
+
+ log.msg("Bridget: initiating test ... ") ## Start the experiment
+
+ ## if we've at least one bridge, and our config has no 'Bridge' line
+ if self.bridges['remaining']() >= 1 \
+ and not 'Bridge' in self.config.config:
+
+ ## configure our first bridge line
+ self.bridges['current'] = self.bridges['all'][0]
+ self.config.Bridge = self.bridges['current']
+ ## avoid starting several
+ self.config.save() ## processes
+ assert self.config.config.has_key('Bridge'), "No Bridge Line"
+
+ ## start tor and remove bridges which are public relays
+ from ooni.utils.onion import start_tor_filter_nodes
+ state = start_tor_filter_nodes(reactor, self.config,
+ self.control_port, self.tor_binary,
+ self.data_directory, self.bridges)
+ #controller = defer.Deferred()
+ #controller.addCallback(singleton_semaphore, tor)
+ #controller.addErrback(setup_fail)
+ #bootstrap = defer.gatherResults([controller, filter_bridges],
+ # consumeErrors=True)
+
+ if state is not None:
+ log.debug("state:\n%s" % state)
+ log.debug("Current callbacks on TorState():\n%s"
+ % state.callbacks)
+
+ ## if we've got more bridges
+ if self.bridges['remaining']() >= 2:
+ #all = []
+ for bridge in self.bridges['all'][1:]:
+ self.bridges['current'] = bridge
+ #new = defer.Deferred()
+ #new.addCallback(reconfigure_bridge, state, self.bridges)
+ #all.append(new)
+ #check_remaining = defer.DeferredList(all, consumeErrors=True)
+ #state.chainDeferred(check_remaining)
+ state.addCallback(reconfigure_bridge, self.bridges)
+
+ if self.relays['remaining']() > 0:
+ while self.relays['remaining']() >= 3:
+ #path = list(self.relays.pop() for i in range(3))
+ #log.msg("Trying path %s" % '->'.join(map(lambda node:
+ # node, path)))
+ self.relays['current'] = self.relays['all'].pop()
+ for circ in state.circuits.values():
+ for node in circ.path:
+ if node == self.relays['current']:
+ self.relays['up'].append(self.relays['current'])
+ if len(circ.path) < 3:
+ try:
+ ext = attacher_extend_circuit(state.attacher, circ,
+ self.relays['current'])
+ ext.addCallback(attacher_extend_circuit_done,
+ state.attacher, circ,
+ self.relays['current'])
+ except Exception, e:
+ log.err("Extend circuit failed: %s" % e)
+ else:
+ continue
+
+ #state.callback(all)
+ #self.reactor.run()
+ return state
+
+ def disabled_startTest(self, args):
+ """
+ Local override of :meth:`OONITest.startTest` to bypass calling
+ self.control.
+
+ :param args:
+ The current line of :class:`Asset`, not used but kept for
+ compatibility reasons.
+ :return:
+ A fired deferred which callbacks :meth:`experiment` and
+ :meth:`OONITest.finished`.
+ """
+ self.start_time = date.now()
+ self.d = self.experiment(args)
+ self.d.addErrback(log.err)
+ self.d.addCallbacks(self.finished, log.err)
+ return self.d
+
+## ISIS' NOTES
+## -----------
+## TODO:
+## x cleanup documentation
+## x add DataDirectory option
+## x check if bridges are public relays
+## o take bridge_desc file as input, also be able to give same
+## format as output
+## x Add asynchronous timeout for deferred, so that we don't wait
+## o Add assychronous timout for deferred, so that we don't wait
+## forever for bridges that don't work.
diff --git a/data/nettests/experimental/bridge_reachability/echo.py b/data/nettests/experimental/bridge_reachability/echo.py
new file mode 100644
index 0000000..d4033dd
--- /dev/null
+++ b/data/nettests/experimental/bridge_reachability/echo.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simple ICMP-8 ping test.
+#
+# @authors: Isis Lovecruft, <isis(a)torproject.org>
+# @version: 0.0.2-pre-alpha
+# @license: copyright (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from twisted.python import usage
+from twisted.internet import reactor, defer
+from ooni import nettest
+from ooni.utils import log, net, Storage, txscapy
+
+try:
+ from scapy.all import IP, ICMP
+ from scapy.all import sr1
+ from ooni.lib import txscapy
+ from ooni.lib.txscapy import txsr, txsend
+ from ooni.templates.scapyt import BaseScapyTest
+except:
+ log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['dst', 'd', None, 'Host IP to ping'],
+ ['file', 'f', None, 'File of list of IPs to ping'],
+ ['interface', 'i', None, 'Network interface to use'],
+ ['count', 'c', 1, 'Number of packets to send', int],
+ ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+ ['ttl', 'l', 25, 'Set the IP Time to Live', int],
+ ['timeout', 't', 2, 'Seconds until timeout if no response', int],
+ ['pcap', 'p', None, 'Save pcap to this file'],
+ ['receive', 'r', True, 'Receive response packets']]
+
+class EchoTest(nettest.NetTestCase):
+ """
+ xxx fill me in
+ """
+ name = 'echo'
+ author = 'Isis Lovecruft <isis(a)torproject.org>'
+ description = 'A simple ping test to see if a host is reachable.'
+ version = '0.0.2'
+ requiresRoot = True
+
+ usageOptions = UsageOptions
+ #requiredOptions = ['dst']
+
+ def setUp(self, *a, **kw):
+ self.destinations = {}
+
+ if self.localOptions:
+ for key, value in self.localOptions.items():
+ log.debug("setting self.%s = %s" % (key, value))
+ setattr(self, key, value)
+
+ self.timeout *= 1000 ## convert to milliseconds
+
+ if not self.interface:
+ try:
+ iface = txscapy.getDefaultIface()
+ except Exception, e:
+ log.msg("No network interface specified!")
+ log.err(e)
+ else:
+ log.msg("Using system default interface: %s" % iface)
+ self.interface = iface
+
+ if self.pcap:
+ try:
+ self.pcapfile = open(self.pcap, 'a+')
+ except:
+ log.msg("Unable to write to pcap file %s" % self.pcap)
+ else:
+ self.pcap = net.capturePacket(self.pcapfile)
+
+ if not self.dst:
+ if self.file:
+ self.dstProcessor(self.file)
+ for key, value in self.destinations.items():
+ for label, data in value.items():
+ if not 'ans' in data:
+ self.dst = label
+ else:
+ self.addDest(self.dst)
+ log.debug("self.dst is now: %s" % self.dst)
+
+ log.debug("Initialization of %s test completed." % self.name)
+
+ def addDest(self, dest):
+ d = dest.strip()
+ self.destinations[d] = {'dst_ip': d}
+
+ def dstProcessor(self, inputfile):
+ from ipaddr import IPAddress
+
+ if os.path.isfile(inputfile):
+ with open(inputfile) as f:
+ for line in f.readlines():
+ if line.startswith('#'):
+ continue
+ self.addDest(line)
+
+ def test_icmp(self):
+ def process_response(echo_reply, dest):
+ ans, unans = echo_reply
+ if ans:
+ log.msg("Recieved echo reply from %s: %s" % (dest, ans))
+ else:
+ log.msg("No reply was received from %s. Possible censorship event." % dest)
+ log.debug("Unanswered packets: %s" % unans)
+ self.report[dest] = echo_reply
+
+ for label, data in self.destinations.items():
+ reply = sr1(IP(dst=lebal)/ICMP())
+ process = process_reponse(reply, label)
+
+ #(ans, unans) = ping
+ #self.destinations[self.dst].update({'ans': ans,
+ # 'unans': unans,
+ # 'response_packet': ping})
+ #return ping
+
+ #return reply
diff --git a/data/nettests/experimental/chinatrigger.py b/data/nettests/experimental/chinatrigger.py
new file mode 100644
index 0000000..de1f64d
--- /dev/null
+++ b/data/nettests/experimental/chinatrigger.py
@@ -0,0 +1,108 @@
+import random
+import string
+import struct
+import time
+
+from twisted.python import usage
+from ooni.templates.scapyt import BaseScapyTest
+
+class UsageOptions(usage.Options):
+ optParameters = [['dst', 'd', None, 'Specify the target address'],
+ ['port', 'p', None, 'Specify the target port']
+ ]
+
+class ChinaTriggerTest(BaseScapyTest):
+ """
+ This test is a OONI based implementation of the C tool written
+ by Philipp Winter to engage chinese probes in active scanning.
+
+ Example of running it:
+ ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
+ """
+
+ name = "chinatrigger"
+ usageOptions = UsageOptions
+ requiredOptions = ['dst', 'port']
+ timeout = 2
+
+ def setUp(self):
+ self.dst = self.localOptions['dst']
+ self.port = int(self.localOptions['port'])
+
+ @staticmethod
+ def set_random_servername(pkt):
+ ret = pkt[:121]
+ for i in range(16):
+ ret += random.choice(string.ascii_lowercase)
+ ret += pkt[121+16:]
+ return ret
+
+ @staticmethod
+ def set_random_time(pkt):
+ ret = pkt[:11]
+ ret += struct.pack('!I', int(time.time()))
+ ret += pkt[11+4:]
+ return ret
+
+ @staticmethod
+ def set_random_field(pkt):
+ ret = pkt[:15]
+ for i in range(28):
+ ret += chr(random.randint(0, 255))
+ ret += pkt[15+28:]
+ return ret
+
+ @staticmethod
+ def mutate(pkt, idx):
+ """
+ Slightly changed mutate function.
+ """
+ ret = pkt[:idx-1]
+ mutation = chr(random.randint(0, 255))
+ while mutation == pkt[idx]:
+ mutation = chr(random.randint(0, 255))
+ ret += mutation
+ ret += pkt[idx:]
+ return ret
+
+ @staticmethod
+ def set_all_random_fields(pkt):
+ pkt = ChinaTriggerTest.set_random_servername(pkt)
+ pkt = ChinaTriggerTest.set_random_time(pkt)
+ pkt = ChinaTriggerTest.set_random_field(pkt)
+ return pkt
+
+ def test_send_mutations(self):
+ from scapy.all import IP, TCP
+ pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+ "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+ "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+ "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+ "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+ "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+ "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+ "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+ "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+ "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+ "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+ "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+ "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+ "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+ "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+ "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+ "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+ "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+ "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+ "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+ "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+ "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+ "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+ "\x00\x00"
+
+ pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+ pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+ for x in range(len(pkt)):
+ mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+ pkts.append(mutation)
+ return self.sr(pkts, timeout=2)
+
diff --git a/data/nettests/experimental/dns_injection.py b/data/nettests/experimental/dns_injection.py
new file mode 100644
index 0000000..97233cf
--- /dev/null
+++ b/data/nettests/experimental/dns_injection.py
@@ -0,0 +1,63 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import dnst
+from ooni import nettest
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['resolver', 'r', '8.8.8.1', 'an invalid DNS resolver'],
+ ['timeout', 't', 3, 'timeout after which we should consider the query failed']
+ ]
+
+class DNSInjectionTest(dnst.DNSTest):
+ """
+ This test detects DNS spoofed DNS responses by performing UDP based DNS
+ queries towards an invalid DNS resolver.
+
+ For it to work we must be traversing the network segment of a machine that
+ is actively injecting DNS query answers.
+ """
+ name = "DNS Injection"
+ description = "Checks for injection of spoofed DNS answers"
+ version = "0.1"
+ authors = "Arturo Filastò"
+
+ inputFile = ['file', 'f', None,
+ 'Input file of list of hostnames to attempt to resolve']
+
+ usageOptions = UsageOptions
+ requiredOptions = ['resolver', 'file']
+
+ def setUp(self):
+ self.resolver = (self.localOptions['resolver'], 53)
+ self.queryTimeout = [self.localOptions['timeout']]
+
+ def inputProcessor(self, filename):
+ fp = open(filename)
+ for line in fp:
+ if line.startswith('http://'):
+ yield line.replace('http://', '').replace('/', '').strip()
+ else:
+ yield line.strip()
+ fp.close()
+
+ def test_injection(self):
+ self.report['injected'] = None
+
+ d = self.performALookup(self.input, self.resolver)
+ @d.addCallback
+ def cb(res):
+ log.msg("The DNS query for %s is injected" % self.input)
+ self.report['injected'] = True
+
+ @d.addErrback
+ def err(err):
+ err.trap(defer.TimeoutError)
+ log.msg("The DNS query for %s is not injected" % self.input)
+ self.report['injected'] = False
+
+ return d
+
diff --git a/data/nettests/experimental/domclass_collector.py b/data/nettests/experimental/domclass_collector.py
new file mode 100644
index 0000000..c1866f2
--- /dev/null
+++ b/data/nettests/experimental/domclass_collector.py
@@ -0,0 +1,33 @@
+# -*- encoding: utf-8 -*-
+#
+# The purpose of this collector is to compute the eigenvector for the input
+# file containing a list of sites.
+#
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import threads, defer
+
+from ooni.kit import domclass
+from ooni.templates import httpt
+
+class DOMClassCollector(httpt.HTTPTest):
+ name = "DOM class collector"
+ author = "Arturo Filastò"
+ version = 0.1
+
+ followRedirects = True
+
+ inputFile = ['file', 'f', None, 'The list of urls to build a domclass for']
+
+ def test_collect(self):
+ if self.input:
+ url = self.input
+ return self.doRequest(url)
+ else:
+ raise Exception("No input specified")
+
+ def processResponseBody(self, body):
+ eigenvalues = domclass.compute_eigenvalues_from_DOM(content=body)
+ self.report['eigenvalues'] = eigenvalues.tolist()
diff --git a/data/nettests/experimental/http_filtering_bypassing.py b/data/nettests/experimental/http_filtering_bypassing.py
new file mode 100644
index 0000000..dc103db
--- /dev/null
+++ b/data/nettests/experimental/http_filtering_bypassing.py
@@ -0,0 +1,84 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', '127.0.0.1',
+ 'The OONI backend that runs a TCP echo server'],
+ ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPFilteringBypass(tcpt.TCPTest):
+ name = "HTTPFilteringBypass"
+ version = "0.1"
+ authors = "xx"
+
+ inputFile = ['file', 'f', None,
+ 'Specify a list of hostnames to use as inputs']
+
+ usageOptions = UsageOptions
+ requiredOptions = ['backend']
+
+ def setUp(self):
+ self.port = int(self.localOptions['backendport'])
+ self.address = self.localOptions['backend']
+
+ def check_for_manipulation(self, response, payload):
+ log.debug("Checking if %s == %s" % (response, payload))
+ if response != payload:
+ self.report['tampering'] = True
+ else:
+ self.report['tampering'] = False
+
+ def test_prepend_newline(self):
+ payload = "\nGET / HTTP/1.1\n\r"
+ payload += "Host: %s\n\r" % self.input
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_tab_trick(self):
+ payload = "GET / HTTP/1.1\n\r"
+ payload += "Host: %s\t\n\r" % self.input
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_subdomain_blocking(self):
+ payload = "GET / HTTP/1.1\n\r"
+ payload += "Host: %s\n\r" % randomStr(10) + '.' + self.input
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_fuzzy_domain_blocking(self):
+ hostname_field = randomStr(10) + '.' + self.input + '.' + randomStr(10)
+ payload = "GET / HTTP/1.1\n\r"
+ payload += "Host: %s\n\r" % hostname_field
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_fuzzy_match_blocking(self):
+ hostname_field = randomStr(10) + self.input + randomStr(10)
+ payload = "GET / HTTP/1.1\n\r"
+ payload += "Host: %s\n\r" % hostname_field
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_normal_request(self):
+ payload = "GET / HTTP/1.1\n\r"
+ payload += "Host: %s\n\r" % self.input
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
diff --git a/data/nettests/experimental/http_keyword_filtering.py b/data/nettests/experimental/http_keyword_filtering.py
new file mode 100644
index 0000000..0ae9c52
--- /dev/null
+++ b/data/nettests/experimental/http_keyword_filtering.py
@@ -0,0 +1,45 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+ 'URL of the test backend to use']]
+
+class HTTPKeywordFiltering(httpt.HTTPTest):
+ """
+ This test involves performing HTTP requests containing to be tested for
+ censorship keywords.
+
+ It does not detect censorship on the client, but just logs the response from the
+ HTTP backend server.
+ """
+ name = "HTTP Keyword Filtering"
+ author = "Arturo Filastò"
+ version = "0.1.1"
+
+ inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
+
+ usageOptions = UsageOptions
+
+ requiredOptions = ['backend']
+
+ def test_get(self):
+ """
+ Perform a HTTP GET request to the backend containing the keyword to be
+ tested inside of the request body.
+ """
+ return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
+
+ def test_post(self):
+ """
+ Perform a HTTP POST request to the backend containing the keyword to be
+ tested inside of the request body.
+ """
+ return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
+
diff --git a/data/nettests/experimental/http_trix.py b/data/nettests/experimental/http_trix.py
new file mode 100644
index 0000000..85a4ba2
--- /dev/null
+++ b/data/nettests/experimental/http_trix.py
@@ -0,0 +1,47 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', '127.0.0.1',
+ 'The OONI backend that runs a TCP echo server'],
+ ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPTrix(tcpt.TCPTest):
+ name = "HTTPTrix"
+ version = "0.1"
+ authors = "Arturo Filastò"
+
+ usageOptions = UsageOptions
+ requiredOptions = ['backend']
+
+ def setUp(self):
+ self.port = int(self.localOptions['backendport'])
+ self.address = self.localOptions['backend']
+
+ def check_for_manipulation(self, response, payload):
+ log.debug("Checking if %s == %s" % (response, payload))
+ if response != payload:
+ self.report['tampering'] = True
+ else:
+ self.report['tampering'] = False
+
+ def test_for_squid_cache_object(self):
+ """
+ This detects the presence of a squid transparent HTTP proxy by sending
+ a request for cache_object://localhost/info.
+
+ This tests for the presence of a Squid Transparent proxy by sending:
+
+ GET cache_object://localhost/info HTTP/1.1
+ """
+ payload = 'GET cache_object://localhost/info HTTP/1.1'
+ payload += '\n\r'
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
diff --git a/data/nettests/experimental/http_uk_mobile_networks.py b/data/nettests/experimental/http_uk_mobile_networks.py
new file mode 100644
index 0000000..784a9e9
--- /dev/null
+++ b/data/nettests/experimental/http_uk_mobile_networks.py
@@ -0,0 +1,85 @@
+# -*- encoding: utf-8 -*-
+import yaml
+
+from twisted.python import usage
+from twisted.plugin import IPlugin
+
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ """
+ See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirec…
+ to see how the rules file should look like.
+ """
+ optParameters = [
+ ['rules', 'y', None,
+ 'Specify the redirect rules file ']
+ ]
+
+class HTTPUKMobileNetworksTest(httpt.HTTPTest):
+ """
+ This test was thought of by Open Rights Group and implemented with the
+ purpose of detecting censorship in the UK.
+ For more details on this test see:
+ https://trac.torproject.org/projects/tor/ticket/6437
+ XXX port the knowledge from the trac ticket into this test docstring
+ """
+ name = "HTTP UK mobile network redirect test"
+
+ usageOptions = UsageOptions
+
+ followRedirects = True
+
+ inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
+ requiredOptions = ['urls']
+
+ def testPattern(self, value, pattern, type):
+ if type == 'eq':
+ return value == pattern
+ elif type == 're':
+ import re
+ if re.match(pattern, value):
+ return True
+ else:
+ return False
+ else:
+ return None
+
+ def testPatterns(self, patterns, location):
+ test_result = False
+
+ if type(patterns) == list:
+ for pattern in patterns:
+ test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+ rules_file = self.localOptions['rules']
+
+ return test_result
+
+ def testRules(self, rules, location):
+ result = {}
+ blocked = False
+ for rule, value in rules.items():
+ current_rule = {}
+ current_rule['name'] = value['name']
+ current_rule['patterns'] = value['patterns']
+ current_rule['test'] = self.testPatterns(value['patterns'], location)
+ blocked |= current_rule['test']
+ result[rule] = current_rule
+ result['blocked'] = blocked
+ return result
+
+ def processRedirect(self, location):
+ self.report['redirect'] = None
+ rules_file = self.localOptions['rules']
+
+ fp = open(rules_file)
+ rules = yaml.safe_load(fp)
+ fp.close()
+
+ log.msg("Testing rules %s" % rules)
+ redirect = self.testRules(rules, location)
+ self.report['redirect'] = redirect
+
+
+
diff --git a/data/nettests/experimental/keyword_filtering.py b/data/nettests/experimental/keyword_filtering.py
new file mode 100644
index 0000000..9eec4ff
--- /dev/null
+++ b/data/nettests/experimental/keyword_filtering.py
@@ -0,0 +1,52 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.utils import log
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
+ ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
+ ]
+
+class KeywordFiltering(scapyt.BaseScapyTest):
+ name = "Keyword Filtering detection based on RST packets"
+ author = "Arturo Filastò"
+ version = "0.1"
+
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of keywords to use for censorship testing']
+
+ def test_tcp_keyword_filtering(self):
+ """
+ Places the keyword to be tested in the payload of a TCP packet.
+ XXX need to implement bisection method for enumerating keywords.
+ though this should not be an issue since we are testing all
+ the keywords in parallel.
+ """
+ def finished(packets):
+ log.debug("Finished running TCP traceroute test on port %s" % port)
+ answered, unanswered = packets
+ self.report['rst_packets'] = []
+ for snd, rcv in answered:
+ # The received packet has the RST flag
+ if rcv[TCP].flags == 4:
+ self.report['rst_packets'].append(rcv)
+
+ backend_ip, backend_port = self.localOptions['backend']
+ keyword_to_test = str(self.input)
+ packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished)
+ return d
+
diff --git a/data/nettests/experimental/parasitictraceroute.py b/data/nettests/experimental/parasitictraceroute.py
new file mode 100644
index 0000000..631c24b
--- /dev/null
+++ b/data/nettests/experimental/parasitictraceroute.py
@@ -0,0 +1,129 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
+ ['timeout', 't', 5, 'The timeout for the traceroute test'],
+ ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
+ ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
+ ['srcport', 'p', None, 'Set the source port to a specific value']]
+
+class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
+ name = "Parasitic TCP Traceroute Test"
+ author = "Arturo Filastò"
+ version = "0.1"
+
+ usageOptions = UsageOptions
+
+ def setUp(self):
+ def get_sport():
+ if self.localOptions['srcport']:
+ return int(self.localOptions['srcport'])
+ else:
+ return random.randint(1024, 65535)
+ self.get_sport = get_sport
+
+ self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
+
+ self.dport = int(self.localOptions['dstport'])
+ self.max_ttl = int(self.localOptions['maxttl'])
+
+ @defer.inlineCallbacks
+ def test_parasitic_tcp_traceroute(self):
+ """
+ Establishes a TCP stream, then sequentially sends TCP packets with
+ increasing TTL until we reach the ttl of the destination.
+
+ Requires the backend to respond with an ACK to our SYN packet (i.e.
+ the port must be open)
+
+ XXX this currently does not work properly. The problem lies in the fact
+ that we are currently using the scapy layer 3 socket. This socket makes
+ packets received be trapped by the kernel TCP stack, therefore when we
+ send out a SYN and get back a SYN-ACK the kernel stack will reply with
+ a RST because it did not send a SYN.
+
+ The quick fix to this would be to establish a TCP stream using socket
+ calls and then "cannibalizing" the TCP session with scapy.
+
+ The real fix is to make scapy use libpcap instead of raw sockets
+ obviously as we previously did... arg.
+ """
+ sport = self.get_sport()
+ dport = self.dport
+ ipid = int(RandShort())
+
+ ip_layer = IP(dst=self.dst_ip,
+ id=ipid, ttl=self.max_ttl)
+
+ syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
+
+ log.msg("Sending...")
+ syn.show2()
+
+ synack = yield self.sr1(syn)
+
+ log.msg("Got response...")
+ synack.show2()
+
+ if not synack:
+ log.err("Got no response. Try increasing max_ttl")
+ return
+
+ if synack[TCP].flags == 11:
+ log.msg("Got back a FIN ACK. The destination port is closed")
+ return
+
+ elif synack[TCP].flags == 18:
+ log.msg("Got a SYN ACK. All is well.")
+ else:
+ log.err("Got an unexpected result")
+ return
+
+ ack = ip_layer/TCP(sport=synack.dport,
+ dport=dport, flags="A",
+ seq=synack.ack, ack=synack.seq + 1)
+
+ yield self.send(ack)
+
+ self.report['hops'] = []
+ # For the time being we make the assumption that we are NATted and
+ # that the NAT will forward the packet to the destination even if the TTL has
+ for ttl in range(1, self.max_ttl):
+ log.msg("Sending packet with ttl of %s" % ttl)
+ ip_layer.ttl = ttl
+ empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
+ dport=dport, flags="A",
+ seq=synack.ack, ack=synack.seq + 1)
+
+ answer = yield self.sr1(empty_tcp_packet)
+ if not answer:
+ log.err("Got no response for ttl %s" % ttl)
+ continue
+
+ try:
+ icmp = answer[ICMP]
+ report = {'ttl': empty_tcp_packet.ttl,
+ 'address': answer.src,
+ 'rtt': answer.time - empty_tcp_packet.time
+ }
+ log.msg("%s: %s" % (dport, report))
+ self.report['hops'].append(report)
+
+ except IndexError:
+ if answer.src == self.dst_ip:
+ answer.show()
+ log.msg("Reached the destination. We have finished the traceroute")
+ return
+
diff --git a/data/nettests/experimental/squid.py b/data/nettests/experimental/squid.py
new file mode 100644
index 0000000..777bc3e
--- /dev/null
+++ b/data/nettests/experimental/squid.py
@@ -0,0 +1,117 @@
+# -*- encoding: utf-8 -*-
+#
+# Squid transparent HTTP proxy detector
+# *************************************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from ooni import utils
+from ooni.utils import log
+from ooni.templates import httpt
+
+class SquidTest(httpt.HTTPTest):
+ """
+ This test aims at detecting the presence of a squid based transparent HTTP
+ proxy. It also tries to detect the version number.
+ """
+ name = "Squid test"
+ author = "Arturo Filastò"
+ version = "0.1"
+
+ optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
+
+ #inputFile = ['urls', 'f', None, 'Urls file']
+ inputs =['http://google.com']
+ def test_cacheobject(self):
+ """
+ This detects the presence of a squid transparent HTTP proxy by sending
+ a request for cache_object://localhost/info.
+
+ The response to this request will usually also contain the squid
+ version number.
+ """
+ log.debug("Running")
+ def process_body(body):
+ if "Access Denied." in body:
+ self.report['transparent_http_proxy'] = True
+ else:
+ self.report['transparent_http_proxy'] = False
+
+ log.msg("Testing Squid proxy presence by sending a request for "\
+ "cache_object")
+ headers = {}
+ #headers["Host"] = [self.input]
+ self.report['trans_http_proxy'] = None
+ method = "GET"
+ body = "cache_object://localhost/info"
+ return self.doRequest(self.localOptions['backend'], method=method, body=body,
+ headers=headers, body_processor=process_body)
+
+ def test_search_bad_request(self):
+ """
+ Attempts to perform a request with a random invalid HTTP method.
+
+ If we are being MITMed by a Transparent Squid HTTP proxy we will get
+ back a response containing the X-Squid-Error header.
+ """
+ def process_headers(headers):
+ log.debug("Processing headers in test_search_bad_request")
+ if 'X-Squid-Error' in headers:
+ log.msg("Detected the presence of a transparent HTTP "\
+ "squid proxy")
+ self.report['trans_http_proxy'] = True
+ else:
+ log.msg("Did not detect the presence of transparent HTTP "\
+ "squid proxy")
+ self.report['transparent_http_proxy'] = False
+
+ log.msg("Testing Squid proxy presence by sending a random bad request")
+ headers = {}
+ #headers["Host"] = [self.input]
+ method = utils.randomSTR(10, True)
+ self.report['transparent_http_proxy'] = None
+ return self.doRequest(self.localOptions['backend'], method=method,
+ headers=headers, headers_processor=process_headers)
+
+ def test_squid_headers(self):
+ """
+ Detects the presence of a squid transparent HTTP proxy based on the
+ response headers it adds to the responses to requests.
+ """
+ def process_headers(headers):
+ """
+ Checks if any of the headers that squid is known to add match the
+ squid regexp.
+
+ We are looking for something that looks like this:
+
+ via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+ x-cache: MISS from cache_server
+ x-cache-lookup: MISS from cache_server:3128
+ """
+ squid_headers = {'via': r'.* \((squid.*)\)',
+ 'x-cache': r'MISS from (\w+)',
+ 'x-cache-lookup': r'MISS from (\w+:?\d+?)'
+ }
+
+ self.report['transparent_http_proxy'] = False
+ for key in squid_headers.keys():
+ if key in headers:
+ log.debug("Found %s in headers" % key)
+ m = re.search(squid_headers[key], headers[key])
+ if m:
+ log.msg("Detected the presence of squid transparent"\
+ " HTTP Proxy")
+ self.report['transparent_http_proxy'] = True
+
+ log.msg("Testing Squid proxy by looking at response headers")
+ headers = {}
+ #headers["Host"] = [self.input]
+ method = "GET"
+ self.report['transparent_http_proxy'] = None
+ d = self.doRequest(self.localOptions['backend'], method=method,
+ headers=headers, headers_processor=process_headers)
+ return d
+
+
diff --git a/data/nettests/manipulation/__init__.py b/data/nettests/manipulation/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/manipulation/captiveportal.py b/data/nettests/manipulation/captiveportal.py
new file mode 100644
index 0000000..a0f8c6b
--- /dev/null
+++ b/data/nettests/manipulation/captiveportal.py
@@ -0,0 +1,650 @@
+# -*- coding: utf-8 -*-
+# captiveportal
+# *************
+#
+# This test is a collection of tests to detect the presence of a
+# captive portal. Code is taken, in part, from the old ooni-probe,
+# which was written by Jacob Appelbaum and Arturo Filastò.
+#
+# This module performs multiple tests that match specific vendor captive
+# portal tests. This is a basic internet captive portal filter tester written
+# for RECon 2011.
+#
+# Read the following URLs to understand the captive portal detection process
+# for various vendors:
+#
+# http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
+# http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
+# http://isc.sans.org/diary.html?storyid=10312&
+# http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
+# http://code.google.com/p/chromium-os/issues/detail?3281ttp,
+# http://crbug.com/52489
+# http://crbug.com/71736
+# https://bugzilla.mozilla.org/show_bug.cgi?id=562917
+# https://bugzilla.mozilla.org/show_bug.cgi?id=603505
+# http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
+# http://tools.ietf.org/html/draft-nottingham-http-portal-02
+#
+# :authors: Jacob Appelbaum, Arturo Filastò, Isis Lovecruft
+# :license: see LICENSE for more details
+
+import base64
+import os
+import random
+import re
+import string
+import urllib2
+from urlparse import urlparse
+
+from twisted.python import usage
+from twisted.internet import defer, threads
+
+from ooni import nettest
+from ooni.templates import httpt
+from ooni.utils import net
+from ooni.utils import log
+
+try:
+ from dns import resolver
+except ImportError:
+ print "The dnspython module was not found:"
+ print "See https://crate.io/packages/dnspython/"
+ resolver = None
+
+__plugoo__ = "captiveportal"
+__desc__ = "Captive portal detection test"
+
+class UsageOptions(usage.Options):
+ optParameters = [['asset', 'a', None, 'Asset file'],
+ ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
+ ['user-agent', 'u', random.choice(net.userAgents),
+ 'User agent for HTTP requests']
+ ]
+
+class CaptivePortal(nettest.NetTestCase):
+ """
+ Compares content and status codes of HTTP responses, and attempts
+ to determine if content has been altered.
+ """
+
+ name = "captivep"
+ description = "Captive Portal Test"
+ version = '0.2'
+ author = "Isis Lovecruft"
+ usageOptions = UsageOptions
+
+ def http_fetch(self, url, headers={}):
+ """
+ Parses an HTTP url, fetches it, and returns a urllib2 response
+ object.
+ """
+ url = urlparse(url).geturl()
+ request = urllib2.Request(url, None, headers)
+ #XXX: HTTP Error 302: The HTTP server returned a redirect error that
+ #would lead to an infinite loop. The last 30x error message was: Found
+ try:
+ response = urllib2.urlopen(request)
+ response_headers = dict(response.headers)
+ return response, response_headers
+ except urllib2.HTTPError, e:
+ log.err("HTTPError: %s" % e)
+ return None, None
+
+ def http_content_match_fuzzy_opt(self, experimental_url, control_result,
+ headers=None, fuzzy=False):
+ """
+ Makes an HTTP request on port 80 for experimental_url, then
+ compares the response_content of experimental_url with the
+ control_result. Optionally, if the fuzzy parameter is set to
+ True, the response_content is compared with a regex of the
+ control_result. If the response_content from the
+ experimental_url and the control_result match, returns True
+ with the HTTP status code and headers; False, status code, and
+ headers if otherwise.
+ """
+
+ if headers is None:
+ default_ua = self.local_options['user-agent']
+ headers = {'User-Agent': default_ua}
+
+ response, response_headers = self.http_fetch(experimental_url, headers)
+
+ response_content = response.read() if response else None
+ response_code = response.code if response else None
+ if response_content is None:
+ log.err("HTTP connection appears to have failed.")
+ return False, False, False
+
+ if fuzzy:
+ pattern = re.compile(control_result)
+ match = pattern.search(response_content)
+ log.msg("Fuzzy HTTP content comparison for experiment URL")
+ log.msg("'%s'" % experimental_url)
+ if not match:
+ log.msg("does not match!")
+ return False, response_code, response_headers
+ else:
+ log.msg("and the expected control result yielded a match.")
+ return True, response_code, response_headers
+ else:
+ if str(response_content) != str(control_result):
+ log.msg("HTTP content comparison of experiment URL")
+ log.msg("'%s'" % experimental_url)
+ log.msg("and the expected control result do not match.")
+ return False, response_code, response_headers
+ else:
+ return True, response_code, response_headers
+
+ def http_status_code_match(self, experiment_code, control_code):
+ """
+ Compare two HTTP status codes, returns True if they match.
+ """
+ return int(experiment_code) == int(control_code)
+
+ def http_status_code_no_match(self, experiment_code, control_code):
+ """
+ Compare two HTTP status codes, returns True if they do not match.
+ """
+ return int(experiment_code) != int(control_code)
+
+ def dns_resolve(self, hostname, nameserver=None):
+ """
+ Resolves hostname(s) though nameserver to corresponding
+ address(es). hostname may be either a single hostname string,
+ or a list of strings. If nameserver is not given, use local
+ DNS resolver, and if that fails try using 8.8.8.8.
+ """
+ if not resolver:
+ log.msg("dnspython is not installed.\
+ Cannot perform DNS Resolve test")
+ return []
+ if isinstance(hostname, str):
+ hostname = [hostname]
+
+ if nameserver is not None:
+ res = resolver.Resolver(configure=False)
+ res.nameservers = [nameserver]
+ else:
+ res = resolver.Resolver()
+
+ response = []
+ answer = None
+
+ for hn in hostname:
+ try:
+ answer = res.query(hn)
+ except resolver.NoNameservers:
+ res.nameservers = ['8.8.8.8']
+ try:
+ answer = res.query(hn)
+ except resolver.NXDOMAIN:
+ log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+ response.append('NXDOMAIN')
+ except resolver.NXDOMAIN:
+ log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
+ response.append('NXDOMAIN')
+ finally:
+ if not answer:
+ return response
+ for addr in answer:
+ response.append(addr.address)
+ return response
+
+ def dns_resolve_match(self, experiment_hostname, control_address):
+ """
+ Resolve experiment_hostname, and check to see that it returns
+ an experiment_address which matches the control_address. If
+ they match, returns True and experiment_address; otherwise
+ returns False and experiment_address.
+ """
+ experiment_address = self.dns_resolve(experiment_hostname)
+ if not experiment_address:
+ log.debug("dns_resolve() for %s failed" % experiment_hostname)
+ return None, experiment_address
+
+ if len(set(experiment_address) & set([control_address])) > 0:
+ return True, experiment_address
+ else:
+ log.msg("DNS comparison of control '%s' does not" % control_address)
+ log.msg("match experiment response '%s'" % experiment_address)
+ return False, experiment_address
+
+ def get_auth_nameservers(self, hostname):
+ """
+ Many CPs set a nameserver to be used. Let's query that
+ nameserver for the authoritative nameservers of hostname.
+
+ The equivalent of:
+ $ dig +short NS ooni.nu
+ """
+ if not resolver:
+ log.msg("dnspython not installed.")
+ log.msg("Cannot perform test.")
+ return []
+
+ res = resolver.Resolver()
+ answer = res.query(hostname, 'NS')
+ auth_nameservers = []
+ for auth in answer:
+ auth_nameservers.append(auth.to_text())
+ return auth_nameservers
+
+ def hostname_to_0x20(self, hostname):
+ """
+ MaKEs yOur HOsTnaME lOoK LiKE THis.
+
+ For more information, see:
+ D. Dagon, et. al. "Increased DNS Forgery Resistance
+ Through 0x20-Bit Encoding". Proc. CSS, 2008.
+ """
+ hostname_0x20 = ''
+ for char in hostname:
+ l33t = random.choice(['caps', 'nocaps'])
+ if l33t == 'caps':
+ hostname_0x20 += char.capitalize()
+ else:
+ hostname_0x20 += char.lower()
+ return hostname_0x20
+
+ def check_0x20_to_auth_ns(self, hostname, sample_size=None):
+ """
+ Resolve a 0x20 DNS request for hostname over hostname's
+ authoritative nameserver(s), and check to make sure that
+ the capitalization in the 0x20 request matches that of the
+ response. Also, check the serial numbers of the SOA (Start
+ of Authority) records on the authoritative nameservers to
+ make sure that they match.
+
+ If sample_size is given, a random sample equal to that number
+ of authoritative nameservers will be queried; default is 5.
+ """
+ log.msg("")
+ log.msg("Testing random capitalization of DNS queries...")
+ log.msg("Testing that Start of Authority serial numbers match...")
+
+ auth_nameservers = self.get_auth_nameservers(hostname)
+
+ if sample_size is None:
+ sample_size = 5
+ resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
+ sample_size)
+
+ querynames = []
+ answernames = []
+ serials = []
+
+ # Even when gevent monkey patching is on, the requests here
+ # are sent without being 0x20'd, so we need to 0x20 them.
+ hostname = self.hostname_to_0x20(hostname)
+
+ for auth_ns in resolved_auth_ns:
+ res = resolver.Resolver(configure=False)
+ res.nameservers = [auth_ns]
+ try:
+ answer = res.query(hostname, 'SOA')
+ except resolver.Timeout:
+ continue
+ querynames.append(answer.qname.to_text())
+ answernames.append(answer.rrset.name.to_text())
+ for soa in answer:
+ serials.append(str(soa.serial))
+
+ if len(set(querynames).intersection(answernames)) == 1:
+ log.msg("Capitalization in DNS queries and responses match.")
+ name_match = True
+ else:
+ log.msg("The random capitalization '%s' used in" % hostname)
+ log.msg("DNS queries to that hostname's authoritative")
+ log.msg("nameservers does not match the capitalization in")
+ log.msg("the response.")
+ name_match = False
+
+ if len(set(serials)) == 1:
+ log.msg("Start of Authority serial numbers all match.")
+ serial_match = True
+ else:
+ log.msg("Some SOA serial numbers did not match the rest!")
+ serial_match = False
+
+ ret = name_match, serial_match, querynames, answernames, serials
+
+ if name_match and serial_match:
+ log.msg("Your DNS queries do not appear to be tampered.")
+ return ret
+ elif name_match or serial_match:
+ log.msg("Something is tampering with your DNS queries.")
+ return ret
+ elif not name_match and not serial_match:
+ log.msg("Your DNS queries are definitely being tampered with.")
+ return ret
+
+ def get_random_url_safe_string(self, length):
+ """
+ Returns a random url-safe string of specified length, where
+ 0 < length <= 256. The returned string will always start with
+ an alphabetic character.
+ """
+ if (length <= 0):
+ length = 1
+ elif (length > 256):
+ length = 256
+
+ random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+ while not random_ascii[:1].isalpha():
+ random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
+
+ three_quarters = int((len(random_ascii)) * (3.0/4.0))
+ random_string = random_ascii[:three_quarters]
+ return random_string
+
+ def get_random_hostname(self, length=None):
+ """
+ Returns a random hostname with SLD of specified length. If
+ length is unspecified, length=32 is used.
+
+ These *should* all resolve to NXDOMAIN. If they actually
+ resolve to a box that isn't part of a captive portal that
+ would be rather interesting.
+ """
+ if length is None:
+ length = 32
+
+ random_sld = self.get_random_url_safe_string(length)
+
+ # if it doesn't start with a letter, chuck it.
+ while not random_sld[:1].isalpha():
+ random_sld = self.get_random_url_safe_string(length)
+
+ tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
+ random_tld = urllib2.random.choice(tld_list)
+ random_hostname = random_sld + random_tld
+ return random_hostname
+
+ def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
+ """
+ Get hostname_count number of random hostnames with SLD length
+ of hostname_length, and then attempt DNS resolution. If no
+ arguments are given, default to three hostnames of 32 bytes
+ each. These random hostnames *should* resolve to NXDOMAIN,
+ except in the case where a user is presented with a captive
+ portal and remains unauthenticated, in which case the captive
+ portal may return the address of the authentication page.
+
+ If the cardinality of the intersection of the set of resolved
+ random hostnames and the single element control set
+ (['NXDOMAIN']) are equal to one, then DNS properly resolved.
+
+ Returns true if only NXDOMAINs were returned, otherwise returns
+ False with the relative complement of the control set in the
+ response set.
+ """
+ if hostname_count is None:
+ hostname_count = 3
+
+ log.msg("Generating random hostnames...")
+ log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
+
+ control = ['NXDOMAIN']
+ responses = []
+
+ for x in range(hostname_count):
+ random_hostname = self.get_random_hostname(hostname_length)
+ response_match, response_address = self.dns_resolve_match(random_hostname,
+ control[0])
+ for address in response_address:
+ if response_match is False:
+ log.msg("Strangely, DNS resolution of the random hostname")
+ log.msg("%s actually points to %s"
+ % (random_hostname, response_address))
+ responses = responses + [address]
+ else:
+ responses = responses + [address]
+
+ intersection = set(responses) & set(control)
+ relative_complement = set(responses) - set(control)
+ r = set(responses)
+
+ if len(intersection) == 1:
+ log.msg("All %d random hostnames properly resolved to NXDOMAIN."
+ % hostname_count)
+ return True, relative_complement
+ elif (len(intersection) == 1) and (len(r) > 1):
+ log.msg("Something odd happened. Some random hostnames correctly")
+ log.msg("resolved to NXDOMAIN, but several others resolved to")
+ log.msg("to the following addresses: %s" % relative_complement)
+ return False, relative_complement
+ elif (len(intersection) == 0) and (len(r) == 1):
+ log.msg("All random hostnames resolved to the IP address ")
+ log.msg("'%s', which is indicative of a captive portal." % r)
+ return False, relative_complement
+ else:
+ log.debug("Apparently, pigs are flying on your network, 'cause a")
+ log.debug("bunch of hostnames made from 32-byte random strings")
+ log.debug("just magically resolved to a bunch of random addresses.")
+ log.debug("That is definitely highly improbable. In fact, my napkin")
+ log.debug("tells me that the probability of just one of those")
+ log.debug("hostnames resolving to an address is 1.68e-59, making")
+ log.debug("it nearly twice as unlikely as an MD5 hash collision.")
+ log.debug("Either someone is seriously messing with your network,")
+ log.debug("or else you are witnessing the impossible. %s" % r)
+ return False, relative_complement
+
+ def google_dns_cp_test(self):
+ """
+ Google Chrome resolves three 10-byte random hostnames.
+ """
+ subtest = "Google Chrome DNS-based"
+ log.msg("Running the Google Chrome DNS-based captive portal test...")
+
+ gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
+
+ if gmatch:
+ log.msg("Google Chrome DNS-based captive portal test did not")
+ log.msg("detect a captive portal.")
+ return google_dns_result
+ else:
+ log.msg("Google Chrome DNS-based captive portal test believes")
+ log.msg("you are in a captive portal, or else something very")
+ log.msg("odd is happening with your DNS.")
+ return google_dns_result
+
+ def ms_dns_cp_test(self):
+ """
+ Microsoft "phones home" to a server which will always resolve
+ to the same address.
+ """
+ subtest = "Microsoft NCSI DNS-based"
+
+ log.msg("")
+ log.msg("Running the Microsoft NCSI DNS-based captive portal")
+ log.msg("test...")
+
+ msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
+ "131.107.255.255")
+ if msmatch:
+ log.msg("Microsoft NCSI DNS-based captive portal test did not")
+ log.msg("detect a captive portal.")
+ return ms_dns_result
+ else:
+ log.msg("Microsoft NCSI DNS-based captive portal test ")
+ log.msg("believes you are in a captive portal.")
+ return ms_dns_result
+
+ def run_vendor_dns_tests(self):
+ """
+ Run the vendor DNS tests.
+ """
+ report = {}
+ report['google_dns_cp'] = self.google_dns_cp_test()
+ report['ms_dns_cp'] = self.ms_dns_cp_test()
+
+ return report
+
+ def run_vendor_tests(self, *a, **kw):
+ """
+ These are several vendor tests used to detect the presence of
+ a captive portal. Each test compares HTTP status code and
+ content to the control results and has its own User-Agent
+ string, in order to emulate the test as it would occur on the
+ device it was intended for. Vendor tests are defined in the
+ format:
+ [exp_url, ctrl_result, ctrl_code, ua, test_name]
+ """
+
+ vendor_tests = [['http://www.apple.com/library/test/success.html',
+ 'Success',
+ '200',
+ 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
+ 'Apple HTTP Captive Portal'],
+ ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
+ '428 Network Authentication Required',
+ '428',
+ 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
+ 'W3 Captive Portal'],
+ ['http://www.msftncsi.com/ncsi.txt',
+ 'Microsoft NCSI',
+ '200',
+ 'Microsoft NCSI',
+ 'MS HTTP Captive Portal',]]
+
+ cm = self.http_content_match_fuzzy_opt
+ sm = self.http_status_code_match
+ snm = self.http_status_code_no_match
+
+ def compare_content(status_func, fuzzy, experiment_url, control_result,
+ control_code, headers, test_name):
+ log.msg("")
+ log.msg("Running the %s test..." % test_name)
+
+ content_match, experiment_code, experiment_headers = cm(experiment_url,
+ control_result,
+ headers, fuzzy)
+ status_match = status_func(experiment_code, control_code)
+
+ if status_match and content_match:
+ log.msg("The %s test was unable to detect" % test_name)
+ log.msg("a captive portal.")
+ return True
+ else:
+ log.msg("The %s test shows that your network" % test_name)
+ log.msg("is filtered.")
+ return False
+
+ result = []
+ for vt in vendor_tests:
+ report = {}
+ report['vt'] = vt
+
+ experiment_url = vt[0]
+ control_result = vt[1]
+ control_code = vt[2]
+ headers = {'User-Agent': vt[3]}
+ test_name = vt[4]
+
+ args = (experiment_url, control_result, control_code, headers, test_name)
+
+ if test_name == "MS HTTP Captive Portal":
+ report['result'] = compare_content(sm, False, *args)
+
+ elif test_name == "Apple HTTP Captive Portal":
+ report['result'] = compare_content(sm, True, *args)
+
+ elif test_name == "W3 Captive Portal":
+ report['result'] = compare_content(snm, True, *args)
+
+ else:
+ log.err("Ooni is trying to run an undefined CP vendor test.")
+ result.append(report)
+ return result
+
+ def control(self, experiment_result, args):
+ """
+ Compares the content and status code of the HTTP response for
+ experiment_url with the control_result and control_code
+ respectively. If the status codes match, but the experimental
+ content and control_result do not match, fuzzy matching is enabled
+ to determine if the control_result is at least included somewhere
+ in the experimental content. Returns True if matches are found,
+ and False if otherwise.
+ """
+ # XXX put this back to being parametrized
+ #experiment_url = self.local_options['experiment-url']
+ experiment_url = 'http://google.com/'
+ control_result = 'XX'
+ control_code = 200
+ ua = self.local_options['user-agent']
+
+ cm = self.http_content_match_fuzzy_opt
+ sm = self.http_status_code_match
+ snm = self.http_status_code_no_match
+
+ log.msg("Running test for '%s'..." % experiment_url)
+ content_match, experiment_code, experiment_headers = cm(experiment_url,
+ control_result)
+ status_match = sm(experiment_code, control_code)
+ if status_match and content_match:
+ log.msg("The test for '%s'" % experiment_url)
+ log.msg("was unable to detect a captive portal.")
+
+ self.report['result'] = True
+
+ elif status_match and not content_match:
+ log.msg("Retrying '%s' with fuzzy match enabled."
+ % experiment_url)
+ fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
+ control_result,
+ fuzzy=True)
+ if fuzzy_match:
+ self.report['result'] = True
+ else:
+ log.msg("Found modified content on '%s'," % experiment_url)
+ log.msg("which could indicate a captive portal.")
+
+ self.report['result'] = False
+ else:
+ log.msg("The content comparison test for ")
+ log.msg("'%s'" % experiment_url)
+ log.msg("shows that your HTTP traffic is filtered.")
+
+ self.report['result'] = False
+
+ @defer.inlineCallbacks
+ def test_captive_portal(self):
+ """
+ Runs the CaptivePortal(Test).
+
+ CONFIG OPTIONS
+ --------------
+
+ If "do_captive_portal_vendor_tests" is set to "true", then vendor
+ specific captive portal HTTP-based tests will be run.
+
+ If "do_captive_portal_dns_tests" is set to "true", then vendor
+ specific captive portal DNS-based tests will be run.
+
+ If "check_dns_requests" is set to "true", then Ooni-probe will
+ attempt to check that your DNS requests are not being tampered with
+ by a captive portal.
+
+ If "captive_portal" = "yourfilename.txt", then user-specified tests
+ will be run.
+
+ Any combination of the above tests can be run.
+ """
+
+ log.msg("")
+ log.msg("Running vendor tests...")
+ self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
+
+ log.msg("")
+ log.msg("Running vendor DNS-based tests...")
+ self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
+
+ log.msg("")
+ log.msg("Checking that DNS requests are not being tampered...")
+ self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
+
+ log.msg("")
+ log.msg("Captive portal test finished!")
+
diff --git a/data/nettests/manipulation/daphne.py b/data/nettests/manipulation/daphne.py
new file mode 100644
index 0000000..09279fa
--- /dev/null
+++ b/data/nettests/manipulation/daphne.py
@@ -0,0 +1,119 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+from twisted.internet import protocol, endpoints, reactor
+
+from ooni import nettest
+from ooni.kit import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+ def nextStep(self):
+ log.debug("Moving on to next step in the state walk")
+ self.current_data_received = 0
+ if self.current_step >= (len(self.steps) - 1):
+ log.msg("Reached the end of the state machine")
+ log.msg("Censorship fingerpint bisected!")
+ step_idx, mutation_idx = self.factory.mutation
+ log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
+ #self.transport.loseConnection()
+ if self.report:
+ self.report['mutation_idx'] = mutation_idx
+ self.report['step_idx'] = step_idx
+ self.d.callback(None)
+ return
+ else:
+ self.current_step += 1
+ if self._current_step_role() == self.role:
+ # We need to send more data because we are again responsible for
+ # doing so.
+ self.sendPayload()
+
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+ protocol = daphn3.Daphn3Protocol
+ mutation = [0,0]
+ steps = None
+
+ def buildProtocol(self, addr):
+ p = self.protocol()
+ p.steps = self.steps
+ p.factory = self
+ return p
+
+ def startedConnecting(self, connector):
+ log.msg("Started connecting %s" % connector)
+
+ def clientConnectionFailed(self, reason, connector):
+ log.err("We failed connecting the the OONIB")
+ log.err("Cannot perform test. Perhaps it got blocked?")
+ log.err("Please report this to tor-assistants(a)torproject.org")
+
+ def clientConnectionLost(self, reason, connector):
+ log.err("Daphn3 client connection lost")
+ print reason
+
+class daphn3Args(usage.Options):
+ optParameters = [
+ ['host', 'h', '127.0.0.1', 'Target Hostname'],
+ ['port', 'p', 57003, 'Target port number']]
+
+ optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
+ ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
+
+class daphn3Test(nettest.NetTestCase):
+
+ name = "Daphn3"
+ usageOptions = daphn3Args
+ inputFile = ['file', 'f', None,
+ 'Specify the pcap or YAML file to be used as input to the test']
+
+ #requiredOptions = ['file']
+
+ steps = None
+
+ def inputProcessor(self, filename):
+ """
+ step_idx is the step in the packet exchange
+ ex.
+ [.X.] are packets sent by a client or a server
+
+ client: [.1.] [.3.] [.4.]
+ server: [.2.] [.5.]
+
+ mutation_idx: is the sub index of the packet as in the byte of the
+ packet at the step_idx that is to be mutated
+
+ """
+ if self.localOptions['pcap']:
+ daphn3Steps = daphn3.read_pcap(filename)
+ else:
+ daphn3Steps = daphn3.read_yaml(filename)
+ log.debug("Loaded these steps %s" % daphn3Steps)
+ yield daphn3Steps
+
+ def test_daphn3(self):
+ host = self.localOptions['host']
+ port = int(self.localOptions['port'])
+
+ def failure(failure):
+ log.msg("Failed to connect")
+ self.report['censored'] = True
+ self.report['mutation'] = 0
+ raise Exception("Error in connection, perhaps the backend is censored")
+ return
+
+ def success(protocol):
+ log.msg("Successfully connected")
+ protocol.sendPayload()
+ return protocol.d
+
+ log.msg("Connecting to %s:%s" % (host, port))
+ endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ daphn3_factory = Daphn3ClientFactory()
+ daphn3_factory.steps = self.input
+ daphn3_factory.report = self.report
+ d = endpoint.connect(daphn3_factory)
+ d.addErrback(failure)
+ d.addCallback(success)
+ return d
+
diff --git a/data/nettests/manipulation/dnsspoof.py b/data/nettests/manipulation/dnsspoof.py
new file mode 100644
index 0000000..5c50c2f
--- /dev/null
+++ b/data/nettests/manipulation/dnsspoof.py
@@ -0,0 +1,69 @@
+from twisted.internet import defer
+from twisted.python import usage
+
+from scapy.all import IP, UDP, DNS, DNSQR
+
+from ooni.templates import scapyt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [['resolver', 'r', None,
+ 'Specify the resolver that should be used for DNS queries (ip:port)'],
+ ['hostname', 'h', None,
+ 'Specify the hostname of a censored site'],
+ ['backend', 'b', '8.8.8.8:53',
+ 'Specify the IP address of a good DNS resolver (ip:port)']
+ ]
+
+
+class DNSSpoof(scapyt.ScapyTest):
+ name = "DNS Spoof"
+ timeout = 2
+
+ usageOptions = UsageOptions
+
+ requiredOptions = ['hostname', 'resolver']
+
+ def setUp(self):
+ self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
+ self.resolverPort = int(self.resolverPort)
+
+ self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
+ self.controlResolverPort = int(self.controlResolverPort)
+
+ self.hostname = self.localOptions['hostname']
+
+ def postProcessor(self, report):
+ """
+ This is not tested, but the concept is that if the two responses
+ match up then spoofing is occuring.
+ """
+ try:
+ test_answer = report['test_a_lookup']['answered_packets'][0][1]
+ control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
+ except IndexError:
+ self.report['spoofing'] = 'no_answer'
+ return
+
+ if test_answer[UDP] == control_answer[UDP]:
+ self.report['spoofing'] = True
+ else:
+ self.report['spoofing'] = False
+ return
+
+ @defer.inlineCallbacks
+ def test_a_lookup(self):
+ question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
+ qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+ log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
+ yield self.sr1(question)
+
+ @defer.inlineCallbacks
+ def test_control_a_lookup(self):
+ question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
+ qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
+ log.msg("Performing query to %s with %s:%s" % (self.hostname,
+ self.controlResolverAddr, self.controlResolverPort))
+ yield self.sr1(question)
+
+
diff --git a/data/nettests/manipulation/http_header_field_manipulation.py b/data/nettests/manipulation/http_header_field_manipulation.py
new file mode 100644
index 0000000..509f4ef
--- /dev/null
+++ b/data/nettests/manipulation/http_header_field_manipulation.py
@@ -0,0 +1,189 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import random
+import json
+import yaml
+
+from twisted.python import usage
+
+from ooni.utils import log, net, randomStr
+from ooni.templates import httpt
+from ooni.utils.txagentwithsocks import TrueHeaders
+
+def random_capitalization(string):
+ output = ""
+ original_string = string
+ string = string.swapcase()
+ for i in range(len(string)):
+ if random.randint(0, 1):
+ output += string[i].swapcase()
+ else:
+ output += string[i]
+ if original_string == output:
+ return random_capitalization(output)
+ else:
+ return output
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['backend', 'b', 'http://127.0.0.1:57001',
+ 'URL of the backend to use for sending the requests'],
+ ['headers', 'h', None,
+ 'Specify a yaml formatted file from which to read the request headers to send']
+ ]
+
+class HTTPHeaderFieldManipulation(httpt.HTTPTest):
+ """
+ It performes HTTP requests with request headers that vary capitalization
+ towards a backend. If the headers reported by the server differ from
+ the ones we sent, then we have detected tampering.
+ """
+ name = "HTTP Header Field Manipulation"
+ author = "Arturo Filastò"
+ version = "0.1.3"
+
+ randomizeUA = False
+ usageOptions = UsageOptions
+
+ requiredOptions = ['backend']
+
+ def get_headers(self):
+ headers = {}
+ if self.localOptions['headers']:
+ try:
+ f = open(self.localOptions['headers'])
+ except IOError:
+ raise Exception("Specified input file does not exist")
+ content = ''.join(f.readlines())
+ f.close()
+ headers = yaml.safe_load(content)
+ return headers
+ else:
+ # XXX generate these from a random choice taken from whatheaders.com
+ # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
+ headers = {"User-Agent": [random.choice(net.userAgents)],
+ "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
+ "Accept-Encoding": ["gzip,deflate,sdch"],
+ "Accept-Language": ["en-US,en;q=0.8"],
+ "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
+ "Host": [randomStr(15)+'.com']
+ }
+ return headers
+
+ def get_random_caps_headers(self):
+ headers = {}
+ normal_headers = self.get_headers()
+ for k, v in normal_headers.items():
+ new_key = random_capitalization(k)
+ headers[new_key] = v
+ return headers
+
+ def processInputs(self):
+ if self.localOptions['backend']:
+ self.url = self.localOptions['backend']
+ else:
+ raise Exception("No backend specified")
+
+ def processResponseBody(self, data):
+ self.check_for_tampering(data)
+
+ def check_for_tampering(self, data):
+ """
+ Here we do checks to verify if the request we made has been tampered
+ with. We have 3 categories of tampering:
+
+ * **total** when the response is not a json object and therefore we were not
+ able to reach the ooniprobe test backend
+
+ * **request_line_capitalization** when the HTTP Request line (e.x. GET /
+ HTTP/1.1) does not match the capitalization we set.
+
+ * **header_field_number** when the number of headers we sent does not match
+ with the ones the backend received
+
+ * **header_name_capitalization** when the header field names do not match
+ those that we sent.
+
+ * **header_field_value** when the header field value does not match with the
+ one we transmitted.
+ """
+ log.msg("Checking for tampering on %s" % self.url)
+
+ self.report['tampering'] = {
+ 'total': False,
+ 'request_line_capitalization': False,
+ 'header_name_capitalization': False,
+ 'header_field_value': False,
+ 'header_field_number': False
+ }
+ try:
+ response = json.loads(data)
+ except ValueError:
+ self.report['tampering']['total'] = True
+ return
+
+ request_request_line = "%s / HTTP/1.1" % self.request_method
+
+ try:
+ response_request_line = response['request_line']
+ response_headers_dict = response['headers_dict']
+ except KeyError:
+ self.report['tampering']['total'] = True
+ return
+
+ if request_request_line != response_request_line:
+ self.report['tampering']['request_line_capitalization'] = True
+
+ request_headers = TrueHeaders(self.request_headers)
+ diff = request_headers.getDiff(TrueHeaders(response_headers_dict),
+ ignore=['Connection'])
+ if diff:
+ self.report['tampering']['header_field_name'] = True
+ else:
+ self.report['tampering']['header_field_name'] = False
+ self.report['tampering']['header_name_diff'] = list(diff)
+ log.msg(" total: %(total)s" % self.report['tampering'])
+ log.msg(" request_line_capitalization: %(request_line_capitalization)s" % self.report['tampering'])
+ log.msg(" header_name_capitalization: %(header_name_capitalization)s" % self.report['tampering'])
+ log.msg(" header_field_value: %(header_field_value)s" % self.report['tampering'])
+ log.msg(" header_field_number: %(header_field_number)s" % self.report['tampering'])
+
+ def test_get(self):
+ self.request_method = "GET"
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_get_random_capitalization(self):
+ self.request_method = random_capitalization("GET")
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_post(self):
+ self.request_method = "POST"
+ self.request_headers = self.get_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_post_random_capitalization(self):
+ self.request_method = random_capitalization("POST")
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_put(self):
+ self.request_method = "PUT"
+ self.request_headers = self.get_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
+ def test_put_random_capitalization(self):
+ self.request_method = random_capitalization("PUT")
+ self.request_headers = self.get_random_caps_headers()
+ return self.doRequest(self.url, self.request_method,
+ headers=self.request_headers)
+
diff --git a/data/nettests/manipulation/http_host.py b/data/nettests/manipulation/http_host.py
new file mode 100644
index 0000000..d95d836
--- /dev/null
+++ b/data/nettests/manipulation/http_host.py
@@ -0,0 +1,141 @@
+# -*- encoding: utf-8 -*-
+#
+# HTTP Host Test
+# **************
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+import json
+from twisted.python import usage
+
+from ooni.utils import randomStr, randomSTR
+
+from ooni.utils import log
+from ooni.templates import httpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
+ 'URL of the test backend to use. Should be \
+ listening on port 80 and be a \
+ HTTPReturnJSONHeadersHelper'],
+ ['content', 'c', None, 'The file to read \
+ from containing the content of a block page']]
+
+class HTTPHost(httpt.HTTPTest):
+ """
+ This test is aimed at detecting the presence of a transparent HTTP proxy
+ and enumerating the sites that are being censored by it.
+
+ It places inside of the Host header field the hostname of the site that is
+ to be tested for censorship and then determines if the probe is behind a
+ transparent HTTP proxy (because the response from the backend server does
+ not match) and if the site is censorsed, by checking if the page that it
+ got back matches the input block page.
+ """
+ name = "HTTP Host"
+ author = "Arturo Filastò"
+ version = "0.2.3"
+
+ randomizeUA = False
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of hostnames to test for censorship']
+
+ requiredOptions = ['backend']
+
+ def test_filtering_prepend_newline_to_method(self):
+ headers = {}
+ headers["Host"] = [self.input]
+ return self.doRequest(self.localOptions['backend'], method="\nGET",
+ headers=headers)
+
+ def test_filtering_add_tab_to_host(self):
+ headers = {}
+ headers["Host"] = [self.input + '\t']
+ return self.doRequest(self.localOptions['backend'],
+ headers=headers)
+
+ def test_filtering_of_subdomain(self):
+ headers = {}
+ headers["Host"] = [randomStr(10) + '.' + self.input]
+ return self.doRequest(self.localOptions['backend'],
+ headers=headers)
+
+ def test_filtering_via_fuzzy_matching(self):
+ headers = {}
+ headers["Host"] = [randomStr(10) + self.input + randomStr(10)]
+ return self.doRequest(self.localOptions['backend'],
+ headers=headers)
+
+ def test_send_host_header(self):
+ """
+ Stuffs the HTTP Host header field with the site to be tested for
+ censorship and does an HTTP request of this kind to our backend.
+
+ We randomize the HTTP User Agent headers.
+ """
+ headers = {}
+ headers["Host"] = [self.input]
+ return self.doRequest(self.localOptions['backend'],
+ headers=headers)
+
+ def check_for_censorship(self, body):
+ """
+ If we have specified what a censorship page looks like here we will
+ check if the page we are looking at matches it.
+
+ XXX this is not tested, though it is basically what was used to detect
+ censorship in the palestine case.
+ """
+ if self.localOptions['content']:
+ self.report['censored'] = True
+ censorship_page = open(self.localOptions['content'])
+ response_page = iter(body.split("\n"))
+
+ for censorship_line in censorship_page.xreadlines():
+ response_line = response_page.next()
+ if response_line != censorship_line:
+ self.report['censored'] = False
+ break
+
+ censorship_page.close()
+ else:
+ self.report['censored'] = None
+
+ def processResponseBody(self, body):
+ """
+ XXX this is to be filled in with either a domclass based classified or
+ with a rule that will allow to detect that the body of the result is
+ that of a censored site.
+ """
+ # If we don't see a json array we know that something is wrong for
+ # sure
+ if not body.startswith("{"):
+ log.msg("This does not appear to be JSON")
+ self.report['transparent_http_proxy'] = True
+ self.check_for_censorship(body)
+ return
+ try:
+ content = json.loads(body)
+ except:
+ log.msg("The json does not parse, this is not what we expected")
+ self.report['transparent_http_proxy'] = True
+ self.check_for_censorship(body)
+ return
+
+ # We base the determination of the presence of a transparent HTTP
+ # proxy on the basis of the response containing the json that is to be
+ # returned by a HTTP Request Test Helper
+ if 'request_headers' in content and \
+ 'request_line' in content and \
+ 'headers_dict' in content:
+ log.msg("Found the keys I expected in %s" % content)
+ self.report['transparent_http_proxy'] = False
+ self.report['censored'] = False
+ else:
+ log.msg("Did not find the keys I expected in %s" % content)
+ self.report['transparent_http_proxy'] = True
+ self.check_for_censorship(body)
+
diff --git a/data/nettests/manipulation/http_invalid_request_line.py b/data/nettests/manipulation/http_invalid_request_line.py
new file mode 100644
index 0000000..2482282
--- /dev/null
+++ b/data/nettests/manipulation/http_invalid_request_line.py
@@ -0,0 +1,106 @@
+# -*- encoding: utf-8 -*-
+from twisted.python import usage
+
+from ooni.utils import log
+from ooni.utils import randomStr, randomSTR
+from ooni.templates import tcpt
+
+class UsageOptions(usage.Options):
+ optParameters = [['backend', 'b', '127.0.0.1',
+ 'The OONI backend that runs a TCP echo server'],
+ ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
+
+class HTTPInvalidRequestLine(tcpt.TCPTest):
+ """
+ The goal of this test is to do some very basic and not very noisy fuzzing
+ on the HTTP request line. We generate a series of requests that are not
+ valid HTTP requests.
+
+ Unless elsewhere stated 'Xx'*N refers to N*2 random upper or lowercase
+ ascii letters or numbers ('XxXx' will be 4).
+ """
+ name = "HTTP Invalid Request Line"
+ version = "0.1.4"
+ authors = "Arturo Filastò"
+
+ usageOptions = UsageOptions
+ requiredOptions = ['backend']
+
+ def setUp(self):
+ self.port = int(self.localOptions['backendport'])
+ self.address = self.localOptions['backend']
+
+ def check_for_manipulation(self, response, payload):
+ log.debug("Checking if %s == %s" % (response, payload))
+ if response != payload:
+ self.report['tampering'] = True
+ else:
+ self.report['tampering'] = False
+
+ def test_random_invalid_method(self):
+ """
+ We test sending data to a TCP echo server listening on port 80, if what
+ we get back is not what we have sent then there is tampering going on.
+ This is for example what squid will return when performing such
+ request:
+
+ HTTP/1.0 400 Bad Request
+ Server: squid/2.6.STABLE21
+ Date: Sat, 23 Jul 2011 02:22:44 GMT
+ Content-Type: text/html
+ Content-Length: 1178
+ Expires: Sat, 23 Jul 2011 02:22:44 GMT
+ X-Squid-Error: ERR_INVALID_REQ 0
+ X-Cache: MISS from cache_server
+ X-Cache-Lookup: NONE from cache_server:3128
+ Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+ Proxy-Connection: close
+
+ """
+ payload = randomSTR(4) + " / HTTP/1.1\n\r"
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_random_invalid_field_count(self):
+ """
+ This generates a request that looks like this:
+
+ XxXxX XxXxX XxXxX XxXxX
+
+ This may trigger some bugs in the HTTP parsers of transparent HTTP
+ proxies.
+ """
+ payload = ' '.join(randomStr(5) for x in range(4))
+ payload += "\n\r"
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_random_big_request_method(self):
+ """
+ This generates a request that looks like this:
+
+ Xx*512 / HTTP/1.1
+ """
+ payload = randomStr(1024) + ' / HTTP/1.1\n\r'
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
+ def test_random_invalid_version_number(self):
+ """
+ This generates a request that looks like this:
+
+ GET / HTTP/XxX
+ """
+ payload = 'GET / HTTP/' + randomStr(3)
+ payload += '\n\r'
+
+ d = self.sendPayload(payload)
+ d.addCallback(self.check_for_manipulation, payload)
+ return d
+
diff --git a/data/nettests/manipulation/traceroute.py b/data/nettests/manipulation/traceroute.py
new file mode 100644
index 0000000..3f6f17b
--- /dev/null
+++ b/data/nettests/manipulation/traceroute.py
@@ -0,0 +1,143 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.python import usage
+from twisted.internet import defer
+
+from ooni.templates import scapyt
+
+from scapy.all import *
+
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [
+ ['backend', 'b', '8.8.8.8', 'Test backend to use'],
+ ['timeout', 't', 5, 'The timeout for the traceroute test'],
+ ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
+ ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
+ ]
+
+class TracerouteTest(scapyt.BaseScapyTest):
+ name = "Multi Protocol Traceroute Test"
+ author = "Arturo Filastò"
+ version = "0.1.1"
+
+ usageOptions = UsageOptions
+ dst_ports = [0, 22, 23, 53, 80, 123, 443, 8080, 65535]
+
+ def setUp(self):
+ def get_sport(protocol):
+ if self.localOptions['srcport']:
+ return int(self.localOptions['srcport'])
+ else:
+ return random.randint(1024, 65535)
+
+ self.get_sport = get_sport
+
+ def max_ttl_and_timeout(self):
+ max_ttl = int(self.localOptions['maxttl'])
+ timeout = int(self.localOptions['timeout'])
+ self.report['max_ttl'] = max_ttl
+ self.report['timeout'] = timeout
+ return max_ttl, timeout
+
+
+ def postProcessor(self, report):
+ tcp_hops = report['test_tcp_traceroute']
+ udp_hops = report['test_udp_traceroute']
+ icmp_hops = report['test_icmp_traceroute']
+
+
+ def test_tcp_traceroute(self):
+ """
+ Does a traceroute to the destination by sending TCP SYN packets
+ with TTLs from 1 until max_ttl.
+ """
+ def finished(packets, port):
+ log.debug("Finished running TCP traceroute test on port %s" % port)
+ answered, unanswered = packets
+ self.report['hops_'+str(port)] = []
+ for snd, rcv in answered:
+ try:
+ sport = snd[UDP].sport
+ except IndexError:
+ log.err("Source port for this traceroute was not found. This is probably a bug")
+ sport = -1
+
+ report = {'ttl': snd.ttl,
+ 'address': rcv.src,
+ 'rtt': rcv.time - snd.time,
+ 'sport': sport
+ }
+ log.debug("%s: %s" % (port, report))
+ self.report['hops_'+str(port)].append(report)
+
+ dl = []
+ max_ttl, timeout = self.max_ttl_and_timeout()
+ for port in self.dst_ports:
+ packets = IP(dst=self.localOptions['backend'],
+ ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
+ sport=self.get_sport('tcp'))
+
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished, port)
+ dl.append(d)
+ return defer.DeferredList(dl)
+
+ def test_udp_traceroute(self):
+ """
+ Does a traceroute to the destination by sending UDP packets with empty
+ payloads with TTLs from 1 until max_ttl.
+ """
+ def finished(packets, port):
+ log.debug("Finished running UDP traceroute test on port %s" % port)
+ answered, unanswered = packets
+ self.report['hops_'+str(port)] = []
+ for snd, rcv in answered:
+ report = {'ttl': snd.ttl,
+ 'address': rcv.src,
+ 'rtt': rcv.time - snd.time,
+ 'sport': snd[UDP].sport
+ }
+ log.debug("%s: %s" % (port, report))
+ self.report['hops_'+str(port)].append(report)
+ dl = []
+ max_ttl, timeout = self.max_ttl_and_timeout()
+ for port in self.dst_ports:
+ packets = IP(dst=self.localOptions['backend'],
+ ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
+ sport=self.get_sport('udp'))
+
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished, port)
+ dl.append(d)
+ return defer.DeferredList(dl)
+
+ def test_icmp_traceroute(self):
+ """
+ Does a traceroute to the destination by sending ICMP echo request
+ packets with TTLs from 1 until max_ttl.
+ """
+ def finished(packets):
+ log.debug("Finished running ICMP traceroute test")
+ answered, unanswered = packets
+ self.report['hops'] = []
+ for snd, rcv in answered:
+ report = {'ttl': snd.ttl,
+ 'address': rcv.src,
+ 'rtt': rcv.time - snd.time
+ }
+ log.debug("%s" % (report))
+ self.report['hops'].append(report)
+ dl = []
+ max_ttl, timeout = self.max_ttl_and_timeout()
+ packets = IP(dst=self.localOptions['backend'],
+ ttl=(1,max_ttl), id=RandShort())/ICMP()
+
+ d = self.sr(packets, timeout=timeout)
+ d.addCallback(finished)
+ return d
+
diff --git a/data/nettests/scanning/__init__.py b/data/nettests/scanning/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/scanning/http_url_list.py b/data/nettests/scanning/http_url_list.py
new file mode 100644
index 0000000..0accaae
--- /dev/null
+++ b/data/nettests/scanning/http_url_list.py
@@ -0,0 +1,98 @@
+# -*- encoding: utf-8 -*-
+#
+# :authors: Arturo Filastò
+# :licence: see LICENSE
+
+from twisted.internet import defer
+from twisted.python import usage
+from ooni.templates import httpt
+from ooni.utils import log
+
+class UsageOptions(usage.Options):
+ optParameters = [['content', 'c', None,
+ 'The file to read from containing the content of a block page'],
+ ['url', 'u', None, 'Specify a single URL to test.']
+ ]
+
+class HTTPURLList(httpt.HTTPTest):
+ """
+ Performs GET, POST and PUT requests to a list of URLs specified as
+ input and checks if the page that we get back as a result matches that
+ of a block page given as input.
+
+ If no block page is given as input to the test it will simply collect the
+ responses to the HTTP requests and write them to a report file.
+ """
+ name = "HTTP URL List"
+ author = "Arturo Filastò"
+ version = "0.1.3"
+
+ usageOptions = UsageOptions
+
+ inputFile = ['file', 'f', None,
+ 'List of URLS to perform GET and POST requests to']
+
+ def setUp(self):
+ """
+ Check for inputs.
+ """
+ if self.input:
+ self.url = self.input
+ elif self.localOptions['url']:
+ self.url = self.localOptions['url']
+ else:
+ raise Exception("No input specified")
+
+ def check_for_content_censorship(self, body):
+ """
+ If we have specified what a censorship page looks like here we will
+ check if the page we are looking at matches it.
+
+ XXX this is not tested, though it is basically what was used to detect
+ censorship in the palestine case.
+ """
+ self.report['censored'] = True
+
+ censorship_page = open(self.localOptions['content']).xreadlines()
+ response_page = iter(body.split("\n"))
+
+ # We first allign the two pages to the first HTML tag (something
+ # starting with <). This is useful so that we can give as input to this
+ # test something that comes from the output of curl -kis
+ # http://the_page/
+ for line in censorship_page:
+ if line.strip().startswith("<"):
+ break
+ for line in response_page:
+ if line.strip().startswith("<"):
+ break
+
+ for censorship_line in censorship_page:
+ try:
+ response_line = response_page.next()
+ except StopIteration:
+ # The censored page and the response we got do not match in
+ # length.
+ self.report['censored'] = False
+ break
+ censorship_line = censorship_line.replace("\n", "")
+ if response_line != censorship_line:
+ self.report['censored'] = False
+
+ censorship_page.close()
+
+ def processResponseBody(self, body):
+ if self.localOptions['content']:
+ log.msg("Checking for censorship in response body")
+ self.check_for_content_censorship(body)
+
+ def test_get(self):
+ return self.doRequest(self.url, method="GET")
+
+ def test_post(self):
+ return self.doRequest(self.url, method="POST")
+
+ def test_put(self):
+ return self.doRequest(self.url, method="PUT")
+
+
diff --git a/data/nettests/third_party/Makefile b/data/nettests/third_party/Makefile
new file mode 100644
index 0000000..16adfe0
--- /dev/null
+++ b/data/nettests/third_party/Makefile
@@ -0,0 +1,3 @@
+fetch:
+ wget http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
+ chmod +x NetalyzrCLI.jar
diff --git a/data/nettests/third_party/README b/data/nettests/third_party/README
new file mode 100644
index 0000000..d9e435f
--- /dev/null
+++ b/data/nettests/third_party/README
@@ -0,0 +1,14 @@
+There is no license for NetalyzrCLI.jar; so while we include it, it's just
+for ease of use.
+
+We currently support interfacing with the ICSI Netalyzr system by wrapping
+the NetalyzrCLI.jar client. It was downloaded on August 5th, 2011 from the
+following URL:
+ http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
+
+More information about the client is available on the cli web page:
+ http://netalyzr.icsi.berkeley.edu/cli.html
+
+After looking at NetalyzrCLI.jar, I discovered that '-d' runs it in a
+debugging mode that is quite useful for understanding their testing
+framework as it runs.
diff --git a/data/nettests/third_party/__init__.py b/data/nettests/third_party/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/data/nettests/third_party/netalyzr.py b/data/nettests/third_party/netalyzr.py
new file mode 100644
index 0000000..9b21831
--- /dev/null
+++ b/data/nettests/third_party/netalyzr.py
@@ -0,0 +1,58 @@
+# -*- encoding: utf-8 -*-
+#
+# This is a wrapper around the Netalyzer Java command line client
+#
+# :authors: Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo "hellais" Filastò <art(a)fuffa.org>
+# :licence: see LICENSE
+
+from ooni import nettest
+from ooni.utils import log
+import time
+import os
+from twisted.internet import reactor, threads, defer
+
+class NetalyzrWrapperTest(nettest.NetTestCase):
+ name = "NetalyzrWrapper"
+
+ def setUp(self):
+ cwd = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+ # XXX set the output directory to something more uniform
+ outputdir = os.path.join(cwd, '..', '..')
+
+ program_path = os.path.join(cwd, 'NetalyzrCLI.jar')
+ program = "java -jar %s -d" % program_path
+
+ test_token = time.asctime(time.gmtime()).replace(" ", "_").strip()
+
+ self.output_file = os.path.join(outputdir,
+ "NetalyzrCLI_" + test_token + ".out")
+ self.output_file.strip()
+ self.run_me = program + " 2>&1 >> " + self.output_file
+
+ def blocking_call(self):
+ try:
+ result = threads.blockingCallFromThread(reactor, os.system, self.run_me)
+ except:
+ log.debug("Netalyzr had an error, please see the log file: %s" % self.output_file)
+ finally:
+ self.clean_up()
+
+ def clean_up(self):
+ self.report['netalyzr_report'] = self.output_file
+ log.debug("finished running NetalzrWrapper")
+ log.debug("Please check %s for Netalyzr output" % self.output_file)
+
+ def test_run_netalyzr(self):
+ """
+ This test simply wraps netalyzr and runs it from command line
+ """
+ log.msg("Running NetalyzrWrapper (this will take some time, be patient)")
+ log.debug("with command '%s'" % self.run_me)
+ # XXX we probably want to use a processprotocol here to obtain the
+ # stdout from Netalyzr. This would allows us to visualize progress
+ # (currently there is no progress because the stdout of os.system is
+ # trapped by twisted) and to include the link to the netalyzr report
+ # directly in the OONI report, perhaps even downloading it.
+ reactor.callInThread(self.blocking_call)
diff --git a/nettests/__init__.py b/nettests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/blocking/__init__.py b/nettests/blocking/__init__.py
deleted file mode 100644
index 8b13789..0000000
--- a/nettests/blocking/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/nettests/blocking/dnsconsistency.py b/nettests/blocking/dnsconsistency.py
deleted file mode 100644
index 7b6e7b9..0000000
--- a/nettests/blocking/dnsconsistency.py
+++ /dev/null
@@ -1,173 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# dnsconsistency
-# **************
-#
-# The test reports censorship if the cardinality of the intersection of
-# the query result set from the control server and the query result set
-# from the experimental server is zero, which is to say, if the two sets
-# have no matching results whatsoever.
-#
-# NOTE: This test frequently results in false positives due to GeoIP-based
-# load balancing on major global sites such as google, facebook, and
-# youtube, etc.
-#
-# :authors: Arturo Filastò, Isis Lovecruft
-# :licence: see LICENSE
-
-import pdb
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import dnst
-
-from ooni import nettest
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', '8.8.8.8:53',
- 'The OONI backend that runs the DNS resolver'],
- ['testresolvers', 'T', None,
- 'File containing list of DNS resolvers to test against'],
- ['testresolver', 't', None,
- 'Specify a single test resolver to use for testing']
- ]
-
-class DNSConsistencyTest(dnst.DNSTest):
-
- name = "DNS Consistency"
- description = "DNS censorship detection test"
- version = "0.5"
- authors = "Arturo Filastò, Isis Lovecruft"
- requirements = None
-
- inputFile = ['file', 'f', None,
- 'Input file of list of hostnames to attempt to resolve']
-
- usageOptions = UsageOptions
- requiredOptions = ['backend', 'file']
-
- def setUp(self):
- if (not self.localOptions['testresolvers'] and \
- not self.localOptions['testresolver']):
- raise usage.UsageError("You did not specify a testresolver")
-
- elif self.localOptions['testresolvers']:
- test_resolvers_file = self.localOptions['testresolvers']
-
- elif self.localOptions['testresolver']:
- self.test_resolvers = [self.localOptions['testresolver']]
-
- try:
- with open(test_resolvers_file) as f:
- self.test_resolvers = [x.split('#')[0].strip() for x in f.readlines()]
- self.report['test_resolvers'] = self.test_resolvers
- f.close()
-
- except IOError, e:
- log.exception(e)
- raise usage.UsageError("Invalid test resolvers file")
-
- except NameError:
- log.debug("No test resolver file configured")
-
- dns_ip, dns_port = self.localOptions['backend'].split(':')
- self.control_dns_server = (dns_ip, int(dns_port))
-
- self.report['control_resolver'] = self.control_dns_server
-
- @defer.inlineCallbacks
- def test_a_lookup(self):
- """
- We perform an A lookup on the DNS test servers for the domains to be
- tested and an A lookup on the known good DNS server.
-
- We then compare the results from test_resolvers and that from
- control_resolver and see if the match up.
- If they match up then no censorship is happening (tampering: false).
-
- If they do not we do a reverse lookup (PTR) on the test_resolvers and
- the control resolver for every IP address we got back and check to see
- if anyone of them matches the control ones.
-
- If they do then we take not of the fact that censorship is probably not
- happening (tampering: reverse-match).
-
- If they do not match then censorship is probably going on (tampering:
- true).
- """
- log.msg("Doing the test lookups on %s" % self.input)
- list_of_ds = []
- hostname = self.input
-
- self.report['tampering'] = {}
-
- control_answers = yield self.performALookup(hostname, self.control_dns_server)
- if not control_answers:
- log.err("Got no response from control DNS server %s," \
- " perhaps the DNS resolver is down?" % self.control_dns_server[0])
- self.report['tampering'][self.control_dns_server] = 'no_answer'
- return
-
- for test_resolver in self.test_resolvers:
- log.msg("Testing resolver: %s" % test_resolver)
- test_dns_server = (test_resolver, 53)
-
- try:
- experiment_answers = yield self.performALookup(hostname, test_dns_server)
- except Exception, e:
- log.err("Problem performing the DNS lookup")
- log.exception(e)
- self.report['tampering'][test_resolver] = 'dns_lookup_error'
- continue
-
- if not experiment_answers:
- log.err("Got no response, perhaps the DNS resolver is down?")
- self.report['tampering'][test_resolver] = 'no_answer'
- continue
- else:
- log.debug("Got the following A lookup answers %s from %s" % (experiment_answers, test_resolver))
-
- def lookup_details():
- """
- A closure useful for printing test details.
- """
- log.msg("test resolver: %s" % test_resolver)
- log.msg("experiment answers: %s" % experiment_answers)
- log.msg("control answers: %s" % control_answers)
-
- log.debug("Comparing %s with %s" % (experiment_answers, control_answers))
- if set(experiment_answers) & set(control_answers):
- lookup_details()
- log.msg("tampering: false")
- self.report['tampering'][test_resolver] = False
- else:
- log.msg("Trying to do reverse lookup")
-
- experiment_reverse = yield self.performPTRLookup(experiment_answers[0], test_dns_server)
- control_reverse = yield self.performPTRLookup(control_answers[0], self.control_dns_server)
-
- if experiment_reverse == control_reverse:
- log.msg("Further testing has eliminated false positives")
- lookup_details()
- log.msg("tampering: reverse_match")
- self.report['tampering'][test_resolver] = 'reverse_match'
- else:
- log.msg("Reverse lookups do not match")
- lookup_details()
- log.msg("tampering: true")
- self.report['tampering'][test_resolver] = True
-
- def inputProcessor(self, filename=None):
- """
- This inputProcessor extracts domain names from urls
- """
- log.debug("Running dnsconsistency default processor")
- if filename:
- fp = open(filename)
- for x in fp.readlines():
- yield x.strip().split('//')[-1].split('/')[0]
- fp.close()
- else:
- pass
diff --git a/nettests/blocking/http_requests.py b/nettests/blocking/http_requests.py
deleted file mode 100644
index 9208739..0000000
--- a/nettests/blocking/http_requests.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import random
-from twisted.internet import defer
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils.net import userAgents
-from ooni.templates import httpt
-from ooni.errors import failureToString, handleAllFailures
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['url', 'u', None, 'Specify a single URL to test.'],
- ['factor', 'f', 0.8, 'What factor should be used for triggering censorship (0.8 == 80%)']
- ]
-
-class HTTPRequestsTest(httpt.HTTPTest):
- """
- Performs a two GET requests to the set of sites to be tested for
- censorship, one over a known good control channel (Tor), the other over the
- test network.
-
- We check to see if the response headers match and if the response body
- lengths match.
- """
- name = "HTTP Requests Test"
- author = "Arturo Filastò"
- version = "0.2.3"
-
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of URLS to perform GET and POST requests to']
-
- # These values are used for determining censorship based on response body
- # lengths
- control_body_length = None
- experiment_body_length = None
-
- def setUp(self):
- """
- Check for inputs.
- """
- if self.input:
- self.url = self.input
- elif self.localOptions['url']:
- self.url = self.localOptions['url']
- else:
- raise Exception("No input specified")
-
- self.factor = self.localOptions['factor']
- self.report['control_failure'] = None
- self.report['experiment_failure'] = None
-
- def compare_body_lengths(self, body_length_a, body_length_b):
-
- if body_length_b == 0 and body_length_a != 0:
- rel = float(body_length_b)/float(body_length_a)
- elif body_length_b == 0 and body_length_a == 0:
- rel = float(1)
- else:
- rel = float(body_length_a)/float(body_length_b)
-
- if rel > 1:
- rel = 1/rel
-
- self.report['body_proportion'] = rel
- self.report['factor'] = self.factor
- if rel > self.factor:
- log.msg("The two body lengths appear to match")
- log.msg("censorship is probably not happening")
- self.report['body_length_match'] = True
- else:
- log.msg("The two body lengths appear to not match")
- log.msg("censorship could be happening")
- self.report['body_length_match'] = False
-
- def compare_headers(self, headers_a, headers_b):
- diff = headers_a.getDiff(headers_b)
- if diff:
- log.msg("Headers appear to *not* match")
- self.report['headers_diff'] = diff
- self.report['headers_match'] = False
- else:
- log.msg("Headers appear to match")
- self.report['headers_diff'] = diff
- self.report['headers_match'] = True
-
- def test_get(self):
- def callback(res):
- experiment, control = res
- experiment_succeeded, experiment_result = experiment
- control_succeeded, control_result = control
-
- if control_succeeded and experiment_succeeded:
- self.compare_body_lengths(len(experiment_result.body),
- len(control_result.body))
-
- self.compare_headers(control_result.headers,
- experiment_result.headers)
-
- if not control_succeeded:
- self.report['control_failure'] = failureToString(control_result)
-
- if not experiment_succeeded:
- self.report['experiment_failure'] = failureToString(experiment_result)
-
- headers = {'User-Agent': [random.choice(userAgents)]}
-
- l = []
- log.msg("Performing GET request to %s" % self.url)
- experiment_request = self.doRequest(self.url, method="GET",
- headers=headers)
-
- log.msg("Performing GET request to %s via Tor" % self.url)
- control_request = self.doRequest(self.url, method="GET",
- use_tor=True, headers=headers)
-
- l.append(experiment_request)
- l.append(control_request)
-
- dl = defer.DeferredList(l, consumeErrors=True)
- dl.addCallback(callback)
-
- return dl
-
diff --git a/nettests/blocking/tcpconnect.py b/nettests/blocking/tcpconnect.py
deleted file mode 100644
index 3b22427..0000000
--- a/nettests/blocking/tcpconnect.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from twisted.internet.error import ConnectionRefusedError
-from twisted.internet.error import TCPTimedOutError, TimeoutError
-
-from ooni import nettest
-from ooni.errors import handleAllFailures
-from ooni.utils import log
-
-class TCPFactory(Factory):
- def buildProtocol(self, addr):
- return Protocol()
-
-class TCPConnectTest(nettest.NetTestCase):
- name = "TCP Connect"
- author = "Arturo Filastò"
- version = "0.1"
- inputFile = ['file', 'f', None,
- 'File containing the IP:PORT combinations to be tested, one per line']
-
- requiredOptions = ['file']
- def test_connect(self):
- """
- This test performs a TCP connection to the remote host on the specified port.
- the report will contains the string 'success' if the test has
- succeeded, or the reason for the failure if it has failed.
- """
- host, port = self.input.split(":")
- def connectionSuccess(protocol):
- protocol.transport.loseConnection()
- log.debug("Got a connection to %s" % self.input)
- self.report["connection"] = 'success'
-
- def connectionFailed(failure):
- self.report['connection'] = handleAllFailures(failure)
-
- from twisted.internet import reactor
- point = TCP4ClientEndpoint(reactor, host, int(port))
- d = point.connect(TCPFactory())
- d.addCallback(connectionSuccess)
- d.addErrback(connectionFailed)
- return d
-
diff --git a/nettests/examples/__init__.py b/nettests/examples/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/examples/example_dns_http.py b/nettests/examples/example_dns_http.py
deleted file mode 100644
index 9b76775..0000000
--- a/nettests/examples/example_dns_http.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from twisted.internet import defer
-from ooni.templates import httpt, dnst
-
-class TestDNSandHTTP(httpt.HTTPTest, dnst.DNSTest):
-
- @defer.inlineCallbacks
- def test_http_and_dns(self):
- yield self.doRequest('http://torproject.org')
- yield self.performALookup('torproject.org', ('8.8.8.8', 53))
-
-
diff --git a/nettests/examples/example_dnst.py b/nettests/examples/example_dnst.py
deleted file mode 100644
index 6905637..0000000
--- a/nettests/examples/example_dnst.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from ooni.templates import dnst
-
-class ExampleDNSTest(dnst.DNSTest):
- inputFile = ['file', 'f', None, 'foobar']
-
- def test_a_lookup(self):
- def gotResult(result):
- # Result is an array containing all the A record lookup results
- print result
-
- d = self.performALookup('torproject.org', ('8.8.8.8', 53))
- d.addCallback(gotResult)
- return d
diff --git a/nettests/examples/example_http_checksum.py b/nettests/examples/example_http_checksum.py
deleted file mode 100644
index 9226b52..0000000
--- a/nettests/examples/example_http_checksum.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Aaron Gibson
-# :licence: see LICENSE
-
-from ooni.utils import log
-from ooni.templates import httpt
-from hashlib import sha256
-
-class SHA256HTTPBodyTest(httpt.HTTPTest):
- name = "ChecksumHTTPBodyTest"
- author = "Aaron Gibson"
- version = 0.1
-
- inputFile = ['file', 'f', None,
- 'List of URLS to perform GET requests to']
-
- def test_http(self):
- if self.input:
- url = self.input
- return self.doRequest(url)
- else:
- raise Exception("No input specified")
-
- def processResponseBody(self, body):
- body_sha256sum = sha256(body).digest()
- self.report['checksum'] = body_sha256sum
diff --git a/nettests/examples/example_httpt.py b/nettests/examples/example_httpt.py
deleted file mode 100644
index e76aed4..0000000
--- a/nettests/examples/example_httpt.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni.utils import log
-from ooni.templates import httpt
-
-class ExampleHTTP(httpt.HTTPTest):
- name = "Example HTTP Test"
- author = "Arturo Filastò"
- version = 0.1
-
- inputs = ['http://google.com/', 'http://wikileaks.org/',
- 'http://torproject.org/']
-
- def test_http(self):
- if self.input:
- url = self.input
- return self.doRequest(url)
- else:
- raise Exception("No input specified")
-
- def processResponseBody(self, body):
- # XXX here shall go your logic
- # for processing the body
- if 'blocked' in body:
- self.report['censored'] = True
- else:
- self.report['censored'] = False
-
- def processResponseHeaders(self, headers):
- # XXX place in here all the logic for handling the processing of HTTP
- # Headers.
- pass
-
diff --git a/nettests/examples/example_myip.py b/nettests/examples/example_myip.py
deleted file mode 100644
index 70cf773..0000000
--- a/nettests/examples/example_myip.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni.templates import httpt
-class MyIP(httpt.HTTPTest):
- inputs = ['https://check.torproject.org']
-
- def test_lookup(self):
- return self.doRequest(self.input)
-
- def processResponseBody(self, body):
- import re
- regexp = "Your IP address appears to be: <b>(.+?)<\/b>"
- match = re.search(regexp, body)
- try:
- self.report['myip'] = match.group(1)
- except:
- self.report['myip'] = None
-
diff --git a/nettests/examples/example_scapyt.py b/nettests/examples/example_scapyt.py
deleted file mode 100644
index ba04072..0000000
--- a/nettests/examples/example_scapyt.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :licence: see LICENSE
-
-from twisted.python import usage
-
-from scapy.all import IP, ICMP
-
-from ooni.templates import scapyt
-
-class UsageOptions(usage.Options):
- optParameters = [['target', 't', '8.8.8.8', "Specify the target to ping"]]
-
-class ExampleICMPPingScapy(scapyt.BaseScapyTest):
- name = "Example ICMP Ping Test"
-
- usageOptions = UsageOptions
-
- def test_icmp_ping(self):
- def finished(packets):
- print packets
- answered, unanswered = packets
- for snd, rcv in answered:
- rcv.show()
-
- packets = IP(dst=self.localOptions['target'])/ICMP()
- d = self.sr(packets)
- d.addCallback(finished)
- return d
diff --git a/nettests/examples/example_scapyt_yield.py b/nettests/examples/example_scapyt_yield.py
deleted file mode 100644
index 311b5aa..0000000
--- a/nettests/examples/example_scapyt_yield.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from scapy.all import IP, ICMP
-
-from ooni.templates import scapyt
-
-class UsageOptions(usage.Options):
- optParameters = [['target', 't', self.localOptions['target'], "Specify the target to ping"]]
-
-class ExampleICMPPingScapyYield(scapyt.BaseScapyTest):
- name = "Example ICMP Ping Test"
-
- usageOptions = UsageOptions
-
- @defer.inlineCallbacks
- def test_icmp_ping(self):
- packets = IP(dst=self.localOptions['target'])/ICMP()
- answered, unanswered = yield self.sr(packets)
- for snd, rcv in answered:
- rcv.show()
diff --git a/nettests/examples/example_simple.py b/nettests/examples/example_simple.py
deleted file mode 100644
index 24de5a6..0000000
--- a/nettests/examples/example_simple.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from twisted.internet import defer
-from ooni import nettest
-
-class MyIP(nettest.NetTestCase):
- def test_simple(self):
- self.report['foobar'] = 'antani'
- return defer.succeed(42)
-
diff --git a/nettests/examples/example_tcpt.py b/nettests/examples/example_tcpt.py
deleted file mode 100644
index 613160b..0000000
--- a/nettests/examples/example_tcpt.py
+++ /dev/null
@@ -1,21 +0,0 @@
-
-from twisted.internet.error import ConnectionRefusedError
-from ooni.utils import log
-from ooni.templates import tcpt
-
-class ExampleTCPT(tcpt.TCPTest):
- def test_hello_world(self):
- def got_response(response):
- print "Got this data %s" % response
-
- def connection_failed(failure):
- failure.trap(ConnectionRefusedError)
- print "Connection Refused"
-
- self.address = "127.0.0.1"
- self.port = 57002
- payload = "Hello World!\n\r"
- d = self.sendPayload(payload)
- d.addErrback(connection_failed)
- d.addCallback(got_response)
- return d
diff --git a/nettests/experimental/__init__.py b/nettests/experimental/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/experimental/bridge_reachability/bridget.py b/nettests/experimental/bridge_reachability/bridget.py
deleted file mode 100644
index acf3dff..0000000
--- a/nettests/experimental/bridge_reachability/bridget.py
+++ /dev/null
@@ -1,462 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-# +-----------+
-# | BRIDGET |
-# | +--------------------------------------------+
-# +--------| Use a Tor process to test making a Tor |
-# | connection to a list of bridges or relays. |
-# +--------------------------------------------+
-#
-# :authors: Isis Lovecruft, Arturo Filasto
-# :licence: see included LICENSE
-# :version: 0.1.0-alpha
-
-from __future__ import with_statement
-from functools import partial
-from random import randint
-
-import os
-import sys
-
-from twisted.python import usage
-from twisted.internet import defer, error, reactor
-
-from ooni import nettest
-
-from ooni.utils import log, date
-from ooni.utils.config import ValueChecker
-
-from ooni.utils.onion import TxtorconImportError
-from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
-
-
-try:
- from ooni.utils.onion import parse_data_dir
-except:
- log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
-
-class MissingAssetException(Exception):
- pass
-
-class RandomPortException(Exception):
- """Raised when using a random port conflicts with configured ports."""
- def __init__(self):
- log.msg("Unable to use random and specific ports simultaneously")
- return sys.exit()
-
-class BridgetArgs(usage.Options):
- """Commandline options."""
- allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
- sock_check = ValueChecker(allowed % "SocksPort").port_check
- ctrl_check = ValueChecker(allowed % "ControlPort").port_check
-
- optParameters = [
- ['bridges', 'b', None,
- 'File listing bridge IP:ORPorts to test'],
- ['relays', 'f', None,
- 'File listing relay IPs to test'],
- ['socks', 's', 9049, None, sock_check],
- ['control', 'c', 9052, None, ctrl_check],
- ['torpath', 'p', None,
- 'Path to the Tor binary to use'],
- ['datadir', 'd', None,
- 'Tor DataDirectory to use'],
- ['transport', 't', None,
- 'Tor ClientTransportPlugin'],
- ['resume', 'r', 0,
- 'Resume at this index']]
- optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
-
- def postOptions(self):
- if not self['bridges'] and not self['relays']:
- raise MissingAssetException(
- "Bridget can't run without bridges or relays to test!")
- if self['transport']:
- ValueChecker.uid_check(
- "Can't run bridget as root with pluggable transports!")
- if not self['bridges']:
- raise PTNoBridgesException
- if self['socks'] or self['control']:
- if self['random']:
- raise RandomPortException
- if self['datadir']:
- ValueChecker.dir_check(self['datadir'])
- if self['torpath']:
- ValueChecker.file_check(self['torpath'])
-
-class BridgetTest(nettest.NetTestCase):
- """
- XXX fill me in
-
- :ivar config:
- An :class:`ooni.lib.txtorcon.TorConfig` instance.
- :ivar relays:
- A list of all provided relays to test.
- :ivar bridges:
- A list of all provided bridges to test.
- :ivar socks_port:
- Integer for Tor's SocksPort.
- :ivar control_port:
- Integer for Tor's ControlPort.
- :ivar transport:
- String defining the Tor's ClientTransportPlugin, for testing
- a bridge's pluggable transport functionality.
- :ivar tor_binary:
- Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
- """
- name = "bridget"
- author = "Isis Lovecruft <isis(a)torproject.org>"
- version = "0.1"
- description = "Use a Tor process to test connecting to bridges or relays"
- usageOptions = BridgetArgs
-
- def setUp(self):
- """
- Extra initialization steps. We only want one child Tor process
- running, so we need to deal with most of the TorConfig() only once,
- before the experiment runs.
- """
- self.socks_port = 9049
- self.control_port = 9052
- self.circuit_timeout = 90
- self.tor_binary = '/usr/sbin/tor'
- self.data_directory = None
-
- def read_from_file(filename):
- log.msg("Loading information from %s ..." % opt)
- with open(filename) as fp:
- lst = []
- for line in fp.readlines():
- if line.startswith('#'):
- continue
- else:
- lst.append(line.replace('\n',''))
- return lst
-
- def __count_remaining__(which):
- total, reach, unreach = map(lambda x: which[x],
- ['all', 'reachable', 'unreachable'])
- count = len(total) - reach() - unreach()
- return count
-
- ## XXX should we do report['bridges_up'].append(self.bridges['current'])
- self.bridges = {}
- self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
- ([] for i in range(3))
- self.bridges['reachable'] = lambda: len(self.bridges['up'])
- self.bridges['unreachable'] = lambda: len(self.bridges['down'])
- self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
- self.bridges['current'] = None
- self.bridges['pt_type'] = None
- self.bridges['use_pt'] = False
-
- self.relays = {}
- self.relays['all'], self.relays['up'], self.relays['down'] = \
- ([] for i in range(3))
- self.relays['reachable'] = lambda: len(self.relays['up'])
- self.relays['unreachable'] = lambda: len(self.relays['down'])
- self.relays['remaining'] = lambda: __count_remaining__(self.relays)
- self.relays['current'] = None
-
- if self.localOptions:
- try:
- from txtorcon import TorConfig
- except ImportError:
- raise TxtorconImportError
- else:
- self.config = TorConfig()
- finally:
- options = self.localOptions
-
- if options['bridges']:
- self.config.UseBridges = 1
- self.bridges['all'] = read_from_file(options['bridges'])
- if options['relays']:
- ## first hop must be in TorState().guards
- # XXX where is this defined?
- self.config.EntryNodes = ','.join(relay_list)
- self.relays['all'] = read_from_file(options['relays'])
- if options['socks']:
- self.socks_port = options['socks']
- if options['control']:
- self.control_port = options['control']
- if options['random']:
- log.msg("Using randomized ControlPort and SocksPort ...")
- self.socks_port = randint(1024, 2**16)
- self.control_port = randint(1024, 2**16)
- if options['torpath']:
- self.tor_binary = options['torpath']
- if options['datadir']:
- self.data_directory = parse_data_dir(options['datadir'])
- if options['transport']:
- ## ClientTransportPlugin transport exec pathtobinary [options]
- ## XXX we need a better way to deal with all PTs
- log.msg("Using ClientTransportPlugin %s" % options['transport'])
- self.bridges['use_pt'] = True
- [self.bridges['pt_type'], pt_exec] = \
- options['transport'].split(' ', 1)
-
- if self.bridges['pt_type'] == "obfs2":
- self.config.ClientTransportPlugin = \
- self.bridges['pt_type'] + " " + pt_exec
- else:
- raise PTNotFoundException
-
- self.config.SocksPort = self.socks_port
- self.config.ControlPort = self.control_port
- self.config.CookieAuthentication = 1
-
- def test_bridget(self):
- """
- if bridges:
- 1. configure first bridge line
- 2a. configure data_dir, if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor } if any of these
- 4. remove bridges which are public relays } fail, add current
- 5. SIGHUP for each bridge } bridge to unreach-
- } able bridges.
- if relays:
- 1a. configure the data_dir, if it doesn't exist
- 1b. write torrc to a tempfile in data_dir
- 2. start tor
- 3. remove any of our relays which are already part of current
- circuits
- 4a. attach CustomCircuit() to self.state
- 4b. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
- 5.
- if bridges and relays:
- 1. configure first bridge line
- 2a. configure data_dir if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor
- 4. remove bridges which are public relays
- 5. remove any of our relays which are already part of current
- circuits
- 6a. attach CustomCircuit() to self.state
- 6b. for each bridge, build three circuits, with three
- relays each
- 6c. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
-
- :param args:
- The :class:`BridgetAsset` line currently being used. Except that it
- in Bridget it doesn't, so it should be ignored and avoided.
- """
- try:
- from ooni.utils import process
- from ooni.utils.onion import remove_public_relays, start_tor
- from ooni.utils.onion import start_tor_filter_nodes
- from ooni.utils.onion import setup_fail, setup_done
- from ooni.utils.onion import CustomCircuit
- from ooni.utils.timer import deferred_timeout, TimeoutError
- from ooni.lib.txtorcon import TorConfig, TorState
- except ImportError:
- raise TxtorconImportError
- except TxtorconImportError, tie:
- log.err(tie)
- sys.exit()
-
- def reconfigure_done(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['up'].
- """
- log.msg("Reconfiguring with 'Bridge %s' successful"
- % bridges['current'])
- bridges['up'].append(bridges['current'])
- return state
-
- def reconfigure_fail(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['down'].
- """
- log.msg("Reconfiguring TorConfig with parameters %s failed"
- % state)
- bridges['down'].append(bridges['current'])
- return state
-
- @defer.inlineCallbacks
- def reconfigure_bridge(state, bridges):
- """
- Rewrite the Bridge line in our torrc. If use of pluggable
- transports was specified, rewrite the line as:
- Bridge <transport_type> <IP>:<ORPort>
- Otherwise, rewrite in the standard form:
- Bridge <IP>:<ORPort>
-
- :param state:
- A fully bootstrapped instance of
- :class:`ooni.lib.txtorcon.TorState`.
- :param bridges:
- A dictionary of bridges containing the following keys:
-
- bridges['remaining'] :: A function returning and int for the
- number of remaining bridges to test.
- bridges['current'] :: A string containing the <IP>:<ORPort>
- of the current bridge.
- bridges['use_pt'] :: A boolean, True if we're testing
- bridges with a pluggable transport;
- False otherwise.
- bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
- this is a string containing the type
- of pluggable transport to test.
- :return:
- :param:`state`
- """
- log.msg("Current Bridge: %s" % bridges['current'])
- log.msg("We now have %d bridges remaining to test..."
- % bridges['remaining']())
- try:
- if bridges['use_pt'] is False:
- controller_response = yield state.protocol.set_conf(
- 'Bridge', bridges['current'])
- elif bridges['use_pt'] and bridges['pt_type'] is not None:
- controller_reponse = yield state.protocol.set_conf(
- 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
- else:
- raise PTNotFoundException
-
- if controller_response == 'OK':
- finish = yield reconfigure_done(state, bridges)
- else:
- log.err("SETCONF for %s responded with error:\n %s"
- % (bridges['current'], controller_response))
- finish = yield reconfigure_fail(state, bridges)
-
- defer.returnValue(finish)
-
- except Exception, e:
- log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
- % (bridges['current'], e))
- defer.returnValue(None)
-
- def attacher_extend_circuit(attacher, deferred, router):
- ## XXX todo write me
- ## state.attacher.extend_circuit
- raise NotImplemented
- #attacher.extend_circuit
-
- def state_attach(state, path):
- log.msg("Setting up custom circuit builder...")
- attacher = CustomCircuit(state)
- state.set_attacher(attacher, reactor)
- state.add_circuit_listener(attacher)
- return state
-
- ## OLD
- #for circ in state.circuits.values():
- # for relay in circ.path:
- # try:
- # relay_list.remove(relay)
- # except KeyError:
- # continue
- ## XXX how do we attach to circuits with bridges?
- d = defer.Deferred()
- attacher.request_circuit_build(d)
- return d
-
- def state_attach_fail(state):
- log.err("Attaching custom circuit builder failed: %s" % state)
-
- log.msg("Bridget: initiating test ... ") ## Start the experiment
-
- ## if we've at least one bridge, and our config has no 'Bridge' line
- if self.bridges['remaining']() >= 1 \
- and not 'Bridge' in self.config.config:
-
- ## configure our first bridge line
- self.bridges['current'] = self.bridges['all'][0]
- self.config.Bridge = self.bridges['current']
- ## avoid starting several
- self.config.save() ## processes
- assert self.config.config.has_key('Bridge'), "No Bridge Line"
-
- ## start tor and remove bridges which are public relays
- from ooni.utils.onion import start_tor_filter_nodes
- state = start_tor_filter_nodes(reactor, self.config,
- self.control_port, self.tor_binary,
- self.data_directory, self.bridges)
- #controller = defer.Deferred()
- #controller.addCallback(singleton_semaphore, tor)
- #controller.addErrback(setup_fail)
- #bootstrap = defer.gatherResults([controller, filter_bridges],
- # consumeErrors=True)
-
- if state is not None:
- log.debug("state:\n%s" % state)
- log.debug("Current callbacks on TorState():\n%s"
- % state.callbacks)
-
- ## if we've got more bridges
- if self.bridges['remaining']() >= 2:
- #all = []
- for bridge in self.bridges['all'][1:]:
- self.bridges['current'] = bridge
- #new = defer.Deferred()
- #new.addCallback(reconfigure_bridge, state, self.bridges)
- #all.append(new)
- #check_remaining = defer.DeferredList(all, consumeErrors=True)
- #state.chainDeferred(check_remaining)
- state.addCallback(reconfigure_bridge, self.bridges)
-
- if self.relays['remaining']() > 0:
- while self.relays['remaining']() >= 3:
- #path = list(self.relays.pop() for i in range(3))
- #log.msg("Trying path %s" % '->'.join(map(lambda node:
- # node, path)))
- self.relays['current'] = self.relays['all'].pop()
- for circ in state.circuits.values():
- for node in circ.path:
- if node == self.relays['current']:
- self.relays['up'].append(self.relays['current'])
- if len(circ.path) < 3:
- try:
- ext = attacher_extend_circuit(state.attacher, circ,
- self.relays['current'])
- ext.addCallback(attacher_extend_circuit_done,
- state.attacher, circ,
- self.relays['current'])
- except Exception, e:
- log.err("Extend circuit failed: %s" % e)
- else:
- continue
-
- #state.callback(all)
- #self.reactor.run()
- return state
-
- def disabled_startTest(self, args):
- """
- Local override of :meth:`OONITest.startTest` to bypass calling
- self.control.
-
- :param args:
- The current line of :class:`Asset`, not used but kept for
- compatibility reasons.
- :return:
- A fired deferred which callbacks :meth:`experiment` and
- :meth:`OONITest.finished`.
- """
- self.start_time = date.now()
- self.d = self.experiment(args)
- self.d.addErrback(log.err)
- self.d.addCallbacks(self.finished, log.err)
- return self.d
-
-## ISIS' NOTES
-## -----------
-## TODO:
-## x cleanup documentation
-## x add DataDirectory option
-## x check if bridges are public relays
-## o take bridge_desc file as input, also be able to give same
-## format as output
-## x Add asynchronous timeout for deferred, so that we don't wait
-## o Add assychronous timout for deferred, so that we don't wait
-## forever for bridges that don't work.
diff --git a/nettests/experimental/bridge_reachability/echo.py b/nettests/experimental/bridge_reachability/echo.py
deleted file mode 100644
index d4033dd..0000000
--- a/nettests/experimental/bridge_reachability/echo.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-# +---------+
-# | echo.py |
-# +---------+
-# A simple ICMP-8 ping test.
-#
-# @authors: Isis Lovecruft, <isis(a)torproject.org>
-# @version: 0.0.2-pre-alpha
-# @license: copyright (c) 2012 Isis Lovecruft
-# see attached LICENCE file
-#
-
-import os
-import sys
-
-from twisted.python import usage
-from twisted.internet import reactor, defer
-from ooni import nettest
-from ooni.utils import log, net, Storage, txscapy
-
-try:
- from scapy.all import IP, ICMP
- from scapy.all import sr1
- from ooni.lib import txscapy
- from ooni.lib.txscapy import txsr, txsend
- from ooni.templates.scapyt import BaseScapyTest
-except:
- log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['dst', 'd', None, 'Host IP to ping'],
- ['file', 'f', None, 'File of list of IPs to ping'],
- ['interface', 'i', None, 'Network interface to use'],
- ['count', 'c', 1, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 'l', 25, 'Set the IP Time to Live', int],
- ['timeout', 't', 2, 'Seconds until timeout if no response', int],
- ['pcap', 'p', None, 'Save pcap to this file'],
- ['receive', 'r', True, 'Receive response packets']]
-
-class EchoTest(nettest.NetTestCase):
- """
- xxx fill me in
- """
- name = 'echo'
- author = 'Isis Lovecruft <isis(a)torproject.org>'
- description = 'A simple ping test to see if a host is reachable.'
- version = '0.0.2'
- requiresRoot = True
-
- usageOptions = UsageOptions
- #requiredOptions = ['dst']
-
- def setUp(self, *a, **kw):
- self.destinations = {}
-
- if self.localOptions:
- for key, value in self.localOptions.items():
- log.debug("setting self.%s = %s" % (key, value))
- setattr(self, key, value)
-
- self.timeout *= 1000 ## convert to milliseconds
-
- if not self.interface:
- try:
- iface = txscapy.getDefaultIface()
- except Exception, e:
- log.msg("No network interface specified!")
- log.err(e)
- else:
- log.msg("Using system default interface: %s" % iface)
- self.interface = iface
-
- if self.pcap:
- try:
- self.pcapfile = open(self.pcap, 'a+')
- except:
- log.msg("Unable to write to pcap file %s" % self.pcap)
- else:
- self.pcap = net.capturePacket(self.pcapfile)
-
- if not self.dst:
- if self.file:
- self.dstProcessor(self.file)
- for key, value in self.destinations.items():
- for label, data in value.items():
- if not 'ans' in data:
- self.dst = label
- else:
- self.addDest(self.dst)
- log.debug("self.dst is now: %s" % self.dst)
-
- log.debug("Initialization of %s test completed." % self.name)
-
- def addDest(self, dest):
- d = dest.strip()
- self.destinations[d] = {'dst_ip': d}
-
- def dstProcessor(self, inputfile):
- from ipaddr import IPAddress
-
- if os.path.isfile(inputfile):
- with open(inputfile) as f:
- for line in f.readlines():
- if line.startswith('#'):
- continue
- self.addDest(line)
-
- def test_icmp(self):
- def process_response(echo_reply, dest):
- ans, unans = echo_reply
- if ans:
- log.msg("Recieved echo reply from %s: %s" % (dest, ans))
- else:
- log.msg("No reply was received from %s. Possible censorship event." % dest)
- log.debug("Unanswered packets: %s" % unans)
- self.report[dest] = echo_reply
-
- for label, data in self.destinations.items():
- reply = sr1(IP(dst=lebal)/ICMP())
- process = process_reponse(reply, label)
-
- #(ans, unans) = ping
- #self.destinations[self.dst].update({'ans': ans,
- # 'unans': unans,
- # 'response_packet': ping})
- #return ping
-
- #return reply
diff --git a/nettests/experimental/chinatrigger.py b/nettests/experimental/chinatrigger.py
deleted file mode 100644
index de1f64d..0000000
--- a/nettests/experimental/chinatrigger.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from twisted.python import usage
-from ooni.templates.scapyt import BaseScapyTest
-
-class UsageOptions(usage.Options):
- optParameters = [['dst', 'd', None, 'Specify the target address'],
- ['port', 'p', None, 'Specify the target port']
- ]
-
-class ChinaTriggerTest(BaseScapyTest):
- """
- This test is a OONI based implementation of the C tool written
- by Philipp Winter to engage chinese probes in active scanning.
-
- Example of running it:
- ./bin/ooniprobe chinatrigger -d 127.0.0.1 -p 8080
- """
-
- name = "chinatrigger"
- usageOptions = UsageOptions
- requiredOptions = ['dst', 'port']
- timeout = 2
-
- def setUp(self):
- self.dst = self.localOptions['dst']
- self.port = int(self.localOptions['port'])
-
- @staticmethod
- def set_random_servername(pkt):
- ret = pkt[:121]
- for i in range(16):
- ret += random.choice(string.ascii_lowercase)
- ret += pkt[121+16:]
- return ret
-
- @staticmethod
- def set_random_time(pkt):
- ret = pkt[:11]
- ret += struct.pack('!I', int(time.time()))
- ret += pkt[11+4:]
- return ret
-
- @staticmethod
- def set_random_field(pkt):
- ret = pkt[:15]
- for i in range(28):
- ret += chr(random.randint(0, 255))
- ret += pkt[15+28:]
- return ret
-
- @staticmethod
- def mutate(pkt, idx):
- """
- Slightly changed mutate function.
- """
- ret = pkt[:idx-1]
- mutation = chr(random.randint(0, 255))
- while mutation == pkt[idx]:
- mutation = chr(random.randint(0, 255))
- ret += mutation
- ret += pkt[idx:]
- return ret
-
- @staticmethod
- def set_all_random_fields(pkt):
- pkt = ChinaTriggerTest.set_random_servername(pkt)
- pkt = ChinaTriggerTest.set_random_time(pkt)
- pkt = ChinaTriggerTest.set_random_field(pkt)
- return pkt
-
- def test_send_mutations(self):
- from scapy.all import IP, TCP
- pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
- "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
- "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
- "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
- "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
- "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
- "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
- "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
- "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
- "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
- "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
- "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
- "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
- "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
- "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
- "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
- "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
- "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
- "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
- "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
- "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
- "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
- "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
- "\x00\x00"
-
- pkt = ChinaTriggerTest.set_all_random_fields(pkt)
- pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
- for x in range(len(pkt)):
- mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
- pkts.append(mutation)
- return self.sr(pkts, timeout=2)
-
diff --git a/nettests/experimental/dns_injection.py b/nettests/experimental/dns_injection.py
deleted file mode 100644
index 97233cf..0000000
--- a/nettests/experimental/dns_injection.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import dnst
-from ooni import nettest
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['resolver', 'r', '8.8.8.1', 'an invalid DNS resolver'],
- ['timeout', 't', 3, 'timeout after which we should consider the query failed']
- ]
-
-class DNSInjectionTest(dnst.DNSTest):
- """
- This test detects DNS spoofed DNS responses by performing UDP based DNS
- queries towards an invalid DNS resolver.
-
- For it to work we must be traversing the network segment of a machine that
- is actively injecting DNS query answers.
- """
- name = "DNS Injection"
- description = "Checks for injection of spoofed DNS answers"
- version = "0.1"
- authors = "Arturo Filastò"
-
- inputFile = ['file', 'f', None,
- 'Input file of list of hostnames to attempt to resolve']
-
- usageOptions = UsageOptions
- requiredOptions = ['resolver', 'file']
-
- def setUp(self):
- self.resolver = (self.localOptions['resolver'], 53)
- self.queryTimeout = [self.localOptions['timeout']]
-
- def inputProcessor(self, filename):
- fp = open(filename)
- for line in fp:
- if line.startswith('http://'):
- yield line.replace('http://', '').replace('/', '').strip()
- else:
- yield line.strip()
- fp.close()
-
- def test_injection(self):
- self.report['injected'] = None
-
- d = self.performALookup(self.input, self.resolver)
- @d.addCallback
- def cb(res):
- log.msg("The DNS query for %s is injected" % self.input)
- self.report['injected'] = True
-
- @d.addErrback
- def err(err):
- err.trap(defer.TimeoutError)
- log.msg("The DNS query for %s is not injected" % self.input)
- self.report['injected'] = False
-
- return d
-
diff --git a/nettests/experimental/domclass_collector.py b/nettests/experimental/domclass_collector.py
deleted file mode 100644
index c1866f2..0000000
--- a/nettests/experimental/domclass_collector.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# The purpose of this collector is to compute the eigenvector for the input
-# file containing a list of sites.
-#
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import threads, defer
-
-from ooni.kit import domclass
-from ooni.templates import httpt
-
-class DOMClassCollector(httpt.HTTPTest):
- name = "DOM class collector"
- author = "Arturo Filastò"
- version = 0.1
-
- followRedirects = True
-
- inputFile = ['file', 'f', None, 'The list of urls to build a domclass for']
-
- def test_collect(self):
- if self.input:
- url = self.input
- return self.doRequest(url)
- else:
- raise Exception("No input specified")
-
- def processResponseBody(self, body):
- eigenvalues = domclass.compute_eigenvalues_from_DOM(content=body)
- self.report['eigenvalues'] = eigenvalues.tolist()
diff --git a/nettests/experimental/http_filtering_bypassing.py b/nettests/experimental/http_filtering_bypassing.py
deleted file mode 100644
index dc103db..0000000
--- a/nettests/experimental/http_filtering_bypassing.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', '127.0.0.1',
- 'The OONI backend that runs a TCP echo server'],
- ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPFilteringBypass(tcpt.TCPTest):
- name = "HTTPFilteringBypass"
- version = "0.1"
- authors = "xx"
-
- inputFile = ['file', 'f', None,
- 'Specify a list of hostnames to use as inputs']
-
- usageOptions = UsageOptions
- requiredOptions = ['backend']
-
- def setUp(self):
- self.port = int(self.localOptions['backendport'])
- self.address = self.localOptions['backend']
-
- def check_for_manipulation(self, response, payload):
- log.debug("Checking if %s == %s" % (response, payload))
- if response != payload:
- self.report['tampering'] = True
- else:
- self.report['tampering'] = False
-
- def test_prepend_newline(self):
- payload = "\nGET / HTTP/1.1\n\r"
- payload += "Host: %s\n\r" % self.input
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_tab_trick(self):
- payload = "GET / HTTP/1.1\n\r"
- payload += "Host: %s\t\n\r" % self.input
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_subdomain_blocking(self):
- payload = "GET / HTTP/1.1\n\r"
- payload += "Host: %s\n\r" % randomStr(10) + '.' + self.input
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_fuzzy_domain_blocking(self):
- hostname_field = randomStr(10) + '.' + self.input + '.' + randomStr(10)
- payload = "GET / HTTP/1.1\n\r"
- payload += "Host: %s\n\r" % hostname_field
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_fuzzy_match_blocking(self):
- hostname_field = randomStr(10) + self.input + randomStr(10)
- payload = "GET / HTTP/1.1\n\r"
- payload += "Host: %s\n\r" % hostname_field
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_normal_request(self):
- payload = "GET / HTTP/1.1\n\r"
- payload += "Host: %s\n\r" % self.input
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
diff --git a/nettests/experimental/http_keyword_filtering.py b/nettests/experimental/http_keyword_filtering.py
deleted file mode 100644
index 0ae9c52..0000000
--- a/nettests/experimental/http_keyword_filtering.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
- 'URL of the test backend to use']]
-
-class HTTPKeywordFiltering(httpt.HTTPTest):
- """
- This test involves performing HTTP requests containing to be tested for
- censorship keywords.
-
- It does not detect censorship on the client, but just logs the response from the
- HTTP backend server.
- """
- name = "HTTP Keyword Filtering"
- author = "Arturo Filastò"
- version = "0.1.1"
-
- inputFile = ['file', 'f', None, 'List of keywords to use for censorship testing']
-
- usageOptions = UsageOptions
-
- requiredOptions = ['backend']
-
- def test_get(self):
- """
- Perform a HTTP GET request to the backend containing the keyword to be
- tested inside of the request body.
- """
- return self.doRequest(self.localOptions['backend'], method="GET", body=self.input)
-
- def test_post(self):
- """
- Perform a HTTP POST request to the backend containing the keyword to be
- tested inside of the request body.
- """
- return self.doRequest(self.localOptions['backend'], method="POST", body=self.input)
-
diff --git a/nettests/experimental/http_trix.py b/nettests/experimental/http_trix.py
deleted file mode 100644
index 85a4ba2..0000000
--- a/nettests/experimental/http_trix.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', '127.0.0.1',
- 'The OONI backend that runs a TCP echo server'],
- ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPTrix(tcpt.TCPTest):
- name = "HTTPTrix"
- version = "0.1"
- authors = "Arturo Filastò"
-
- usageOptions = UsageOptions
- requiredOptions = ['backend']
-
- def setUp(self):
- self.port = int(self.localOptions['backendport'])
- self.address = self.localOptions['backend']
-
- def check_for_manipulation(self, response, payload):
- log.debug("Checking if %s == %s" % (response, payload))
- if response != payload:
- self.report['tampering'] = True
- else:
- self.report['tampering'] = False
-
- def test_for_squid_cache_object(self):
- """
- This detects the presence of a squid transparent HTTP proxy by sending
- a request for cache_object://localhost/info.
-
- This tests for the presence of a Squid Transparent proxy by sending:
-
- GET cache_object://localhost/info HTTP/1.1
- """
- payload = 'GET cache_object://localhost/info HTTP/1.1'
- payload += '\n\r'
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
diff --git a/nettests/experimental/http_uk_mobile_networks.py b/nettests/experimental/http_uk_mobile_networks.py
deleted file mode 100644
index 784a9e9..0000000
--- a/nettests/experimental/http_uk_mobile_networks.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- encoding: utf-8 -*-
-import yaml
-
-from twisted.python import usage
-from twisted.plugin import IPlugin
-
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- """
- See https://github.com/hellais/ooni-inputs/processed/uk_mobile_networks_redirec…
- to see how the rules file should look like.
- """
- optParameters = [
- ['rules', 'y', None,
- 'Specify the redirect rules file ']
- ]
-
-class HTTPUKMobileNetworksTest(httpt.HTTPTest):
- """
- This test was thought of by Open Rights Group and implemented with the
- purpose of detecting censorship in the UK.
- For more details on this test see:
- https://trac.torproject.org/projects/tor/ticket/6437
- XXX port the knowledge from the trac ticket into this test docstring
- """
- name = "HTTP UK mobile network redirect test"
-
- usageOptions = UsageOptions
-
- followRedirects = True
-
- inputFile = ['urls', 'f', None, 'List of urls one per line to test for censorship']
- requiredOptions = ['urls']
-
- def testPattern(self, value, pattern, type):
- if type == 'eq':
- return value == pattern
- elif type == 're':
- import re
- if re.match(pattern, value):
- return True
- else:
- return False
- else:
- return None
-
- def testPatterns(self, patterns, location):
- test_result = False
-
- if type(patterns) == list:
- for pattern in patterns:
- test_result |= self.testPattern(location, pattern['value'], pattern['type'])
- rules_file = self.localOptions['rules']
-
- return test_result
-
- def testRules(self, rules, location):
- result = {}
- blocked = False
- for rule, value in rules.items():
- current_rule = {}
- current_rule['name'] = value['name']
- current_rule['patterns'] = value['patterns']
- current_rule['test'] = self.testPatterns(value['patterns'], location)
- blocked |= current_rule['test']
- result[rule] = current_rule
- result['blocked'] = blocked
- return result
-
- def processRedirect(self, location):
- self.report['redirect'] = None
- rules_file = self.localOptions['rules']
-
- fp = open(rules_file)
- rules = yaml.safe_load(fp)
- fp.close()
-
- log.msg("Testing rules %s" % rules)
- redirect = self.testRules(rules, location)
- self.report['redirect'] = redirect
-
-
-
diff --git a/nettests/experimental/keyword_filtering.py b/nettests/experimental/keyword_filtering.py
deleted file mode 100644
index 9eec4ff..0000000
--- a/nettests/experimental/keyword_filtering.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.utils import log
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['backend', 'b', '127.0.0.1:57002', 'Test backend running TCP echo'],
- ['timeout', 't', 5, 'Timeout after which to give up waiting for RST packets']
- ]
-
-class KeywordFiltering(scapyt.BaseScapyTest):
- name = "Keyword Filtering detection based on RST packets"
- author = "Arturo Filastò"
- version = "0.1"
-
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of keywords to use for censorship testing']
-
- def test_tcp_keyword_filtering(self):
- """
- Places the keyword to be tested in the payload of a TCP packet.
- XXX need to implement bisection method for enumerating keywords.
- though this should not be an issue since we are testing all
- the keywords in parallel.
- """
- def finished(packets):
- log.debug("Finished running TCP traceroute test on port %s" % port)
- answered, unanswered = packets
- self.report['rst_packets'] = []
- for snd, rcv in answered:
- # The received packet has the RST flag
- if rcv[TCP].flags == 4:
- self.report['rst_packets'].append(rcv)
-
- backend_ip, backend_port = self.localOptions['backend']
- keyword_to_test = str(self.input)
- packets = IP(dst=backend_ip,id=RandShort())/TCP(dport=backend_port)/keyword_to_test
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished)
- return d
-
diff --git a/nettests/experimental/parasitictraceroute.py b/nettests/experimental/parasitictraceroute.py
deleted file mode 100644
index 631c24b..0000000
--- a/nettests/experimental/parasitictraceroute.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', 'google.com', 'Test backend to use'],
- ['timeout', 't', 5, 'The timeout for the traceroute test'],
- ['maxttl', 'm', 64, 'The maximum value of ttl to set on packets'],
- ['dstport', 'd', 80, 'Set the destination port of the traceroute test'],
- ['srcport', 'p', None, 'Set the source port to a specific value']]
-
-class ParasiticalTracerouteTest(scapyt.BaseScapyTest):
- name = "Parasitic TCP Traceroute Test"
- author = "Arturo Filastò"
- version = "0.1"
-
- usageOptions = UsageOptions
-
- def setUp(self):
- def get_sport():
- if self.localOptions['srcport']:
- return int(self.localOptions['srcport'])
- else:
- return random.randint(1024, 65535)
- self.get_sport = get_sport
-
- self.dst_ip = socket.gethostbyaddr(self.localOptions['backend'])[2][0]
-
- self.dport = int(self.localOptions['dstport'])
- self.max_ttl = int(self.localOptions['maxttl'])
-
- @defer.inlineCallbacks
- def test_parasitic_tcp_traceroute(self):
- """
- Establishes a TCP stream, then sequentially sends TCP packets with
- increasing TTL until we reach the ttl of the destination.
-
- Requires the backend to respond with an ACK to our SYN packet (i.e.
- the port must be open)
-
- XXX this currently does not work properly. The problem lies in the fact
- that we are currently using the scapy layer 3 socket. This socket makes
- packets received be trapped by the kernel TCP stack, therefore when we
- send out a SYN and get back a SYN-ACK the kernel stack will reply with
- a RST because it did not send a SYN.
-
- The quick fix to this would be to establish a TCP stream using socket
- calls and then "cannibalizing" the TCP session with scapy.
-
- The real fix is to make scapy use libpcap instead of raw sockets
- obviously as we previously did... arg.
- """
- sport = self.get_sport()
- dport = self.dport
- ipid = int(RandShort())
-
- ip_layer = IP(dst=self.dst_ip,
- id=ipid, ttl=self.max_ttl)
-
- syn = ip_layer/TCP(sport=sport, dport=dport, flags="S", seq=0)
-
- log.msg("Sending...")
- syn.show2()
-
- synack = yield self.sr1(syn)
-
- log.msg("Got response...")
- synack.show2()
-
- if not synack:
- log.err("Got no response. Try increasing max_ttl")
- return
-
- if synack[TCP].flags == 11:
- log.msg("Got back a FIN ACK. The destination port is closed")
- return
-
- elif synack[TCP].flags == 18:
- log.msg("Got a SYN ACK. All is well.")
- else:
- log.err("Got an unexpected result")
- return
-
- ack = ip_layer/TCP(sport=synack.dport,
- dport=dport, flags="A",
- seq=synack.ack, ack=synack.seq + 1)
-
- yield self.send(ack)
-
- self.report['hops'] = []
- # For the time being we make the assumption that we are NATted and
- # that the NAT will forward the packet to the destination even if the TTL has
- for ttl in range(1, self.max_ttl):
- log.msg("Sending packet with ttl of %s" % ttl)
- ip_layer.ttl = ttl
- empty_tcp_packet = ip_layer/TCP(sport=synack.dport,
- dport=dport, flags="A",
- seq=synack.ack, ack=synack.seq + 1)
-
- answer = yield self.sr1(empty_tcp_packet)
- if not answer:
- log.err("Got no response for ttl %s" % ttl)
- continue
-
- try:
- icmp = answer[ICMP]
- report = {'ttl': empty_tcp_packet.ttl,
- 'address': answer.src,
- 'rtt': answer.time - empty_tcp_packet.time
- }
- log.msg("%s: %s" % (dport, report))
- self.report['hops'].append(report)
-
- except IndexError:
- if answer.src == self.dst_ip:
- answer.show()
- log.msg("Reached the destination. We have finished the traceroute")
- return
-
diff --git a/nettests/experimental/squid.py b/nettests/experimental/squid.py
deleted file mode 100644
index 777bc3e..0000000
--- a/nettests/experimental/squid.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# Squid transparent HTTP proxy detector
-# *************************************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from ooni import utils
-from ooni.utils import log
-from ooni.templates import httpt
-
-class SquidTest(httpt.HTTPTest):
- """
- This test aims at detecting the presence of a squid based transparent HTTP
- proxy. It also tries to detect the version number.
- """
- name = "Squid test"
- author = "Arturo Filastò"
- version = "0.1"
-
- optParameters = [['backend', 'b', 'http://ooni.nu/test/', 'Test backend to use']]
-
- #inputFile = ['urls', 'f', None, 'Urls file']
- inputs =['http://google.com']
- def test_cacheobject(self):
- """
- This detects the presence of a squid transparent HTTP proxy by sending
- a request for cache_object://localhost/info.
-
- The response to this request will usually also contain the squid
- version number.
- """
- log.debug("Running")
- def process_body(body):
- if "Access Denied." in body:
- self.report['transparent_http_proxy'] = True
- else:
- self.report['transparent_http_proxy'] = False
-
- log.msg("Testing Squid proxy presence by sending a request for "\
- "cache_object")
- headers = {}
- #headers["Host"] = [self.input]
- self.report['trans_http_proxy'] = None
- method = "GET"
- body = "cache_object://localhost/info"
- return self.doRequest(self.localOptions['backend'], method=method, body=body,
- headers=headers, body_processor=process_body)
-
- def test_search_bad_request(self):
- """
- Attempts to perform a request with a random invalid HTTP method.
-
- If we are being MITMed by a Transparent Squid HTTP proxy we will get
- back a response containing the X-Squid-Error header.
- """
- def process_headers(headers):
- log.debug("Processing headers in test_search_bad_request")
- if 'X-Squid-Error' in headers:
- log.msg("Detected the presence of a transparent HTTP "\
- "squid proxy")
- self.report['trans_http_proxy'] = True
- else:
- log.msg("Did not detect the presence of transparent HTTP "\
- "squid proxy")
- self.report['transparent_http_proxy'] = False
-
- log.msg("Testing Squid proxy presence by sending a random bad request")
- headers = {}
- #headers["Host"] = [self.input]
- method = utils.randomSTR(10, True)
- self.report['transparent_http_proxy'] = None
- return self.doRequest(self.localOptions['backend'], method=method,
- headers=headers, headers_processor=process_headers)
-
- def test_squid_headers(self):
- """
- Detects the presence of a squid transparent HTTP proxy based on the
- response headers it adds to the responses to requests.
- """
- def process_headers(headers):
- """
- Checks if any of the headers that squid is known to add match the
- squid regexp.
-
- We are looking for something that looks like this:
-
- via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
- x-cache: MISS from cache_server
- x-cache-lookup: MISS from cache_server:3128
- """
- squid_headers = {'via': r'.* \((squid.*)\)',
- 'x-cache': r'MISS from (\w+)',
- 'x-cache-lookup': r'MISS from (\w+:?\d+?)'
- }
-
- self.report['transparent_http_proxy'] = False
- for key in squid_headers.keys():
- if key in headers:
- log.debug("Found %s in headers" % key)
- m = re.search(squid_headers[key], headers[key])
- if m:
- log.msg("Detected the presence of squid transparent"\
- " HTTP Proxy")
- self.report['transparent_http_proxy'] = True
-
- log.msg("Testing Squid proxy by looking at response headers")
- headers = {}
- #headers["Host"] = [self.input]
- method = "GET"
- self.report['transparent_http_proxy'] = None
- d = self.doRequest(self.localOptions['backend'], method=method,
- headers=headers, headers_processor=process_headers)
- return d
-
-
diff --git a/nettests/manipulation/__init__.py b/nettests/manipulation/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/manipulation/captiveportal.py b/nettests/manipulation/captiveportal.py
deleted file mode 100644
index a0f8c6b..0000000
--- a/nettests/manipulation/captiveportal.py
+++ /dev/null
@@ -1,650 +0,0 @@
-# -*- coding: utf-8 -*-
-# captiveportal
-# *************
-#
-# This test is a collection of tests to detect the presence of a
-# captive portal. Code is taken, in part, from the old ooni-probe,
-# which was written by Jacob Appelbaum and Arturo Filastò.
-#
-# This module performs multiple tests that match specific vendor captive
-# portal tests. This is a basic internet captive portal filter tester written
-# for RECon 2011.
-#
-# Read the following URLs to understand the captive portal detection process
-# for various vendors:
-#
-# http://technet.microsoft.com/en-us/library/cc766017%28WS.10%29.aspx
-# http://blog.superuser.com/2011/05/16/windows-7-network-awareness/
-# http://isc.sans.org/diary.html?storyid=10312&
-# http://src.chromium.org/viewvc/chrome?view=rev&revision=74608
-# http://code.google.com/p/chromium-os/issues/detail?3281ttp,
-# http://crbug.com/52489
-# http://crbug.com/71736
-# https://bugzilla.mozilla.org/show_bug.cgi?id=562917
-# https://bugzilla.mozilla.org/show_bug.cgi?id=603505
-# http://lists.w3.org/Archives/Public/ietf-http-wg/2011JanMar/0086.html
-# http://tools.ietf.org/html/draft-nottingham-http-portal-02
-#
-# :authors: Jacob Appelbaum, Arturo Filastò, Isis Lovecruft
-# :license: see LICENSE for more details
-
-import base64
-import os
-import random
-import re
-import string
-import urllib2
-from urlparse import urlparse
-
-from twisted.python import usage
-from twisted.internet import defer, threads
-
-from ooni import nettest
-from ooni.templates import httpt
-from ooni.utils import net
-from ooni.utils import log
-
-try:
- from dns import resolver
-except ImportError:
- print "The dnspython module was not found:"
- print "See https://crate.io/packages/dnspython/"
- resolver = None
-
-__plugoo__ = "captiveportal"
-__desc__ = "Captive portal detection test"
-
-class UsageOptions(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file'],
- ['experiment-url', 'e', 'http://google.com/', 'Experiment URL'],
- ['user-agent', 'u', random.choice(net.userAgents),
- 'User agent for HTTP requests']
- ]
-
-class CaptivePortal(nettest.NetTestCase):
- """
- Compares content and status codes of HTTP responses, and attempts
- to determine if content has been altered.
- """
-
- name = "captivep"
- description = "Captive Portal Test"
- version = '0.2'
- author = "Isis Lovecruft"
- usageOptions = UsageOptions
-
- def http_fetch(self, url, headers={}):
- """
- Parses an HTTP url, fetches it, and returns a urllib2 response
- object.
- """
- url = urlparse(url).geturl()
- request = urllib2.Request(url, None, headers)
- #XXX: HTTP Error 302: The HTTP server returned a redirect error that
- #would lead to an infinite loop. The last 30x error message was: Found
- try:
- response = urllib2.urlopen(request)
- response_headers = dict(response.headers)
- return response, response_headers
- except urllib2.HTTPError, e:
- log.err("HTTPError: %s" % e)
- return None, None
-
- def http_content_match_fuzzy_opt(self, experimental_url, control_result,
- headers=None, fuzzy=False):
- """
- Makes an HTTP request on port 80 for experimental_url, then
- compares the response_content of experimental_url with the
- control_result. Optionally, if the fuzzy parameter is set to
- True, the response_content is compared with a regex of the
- control_result. If the response_content from the
- experimental_url and the control_result match, returns True
- with the HTTP status code and headers; False, status code, and
- headers if otherwise.
- """
-
- if headers is None:
- default_ua = self.local_options['user-agent']
- headers = {'User-Agent': default_ua}
-
- response, response_headers = self.http_fetch(experimental_url, headers)
-
- response_content = response.read() if response else None
- response_code = response.code if response else None
- if response_content is None:
- log.err("HTTP connection appears to have failed.")
- return False, False, False
-
- if fuzzy:
- pattern = re.compile(control_result)
- match = pattern.search(response_content)
- log.msg("Fuzzy HTTP content comparison for experiment URL")
- log.msg("'%s'" % experimental_url)
- if not match:
- log.msg("does not match!")
- return False, response_code, response_headers
- else:
- log.msg("and the expected control result yielded a match.")
- return True, response_code, response_headers
- else:
- if str(response_content) != str(control_result):
- log.msg("HTTP content comparison of experiment URL")
- log.msg("'%s'" % experimental_url)
- log.msg("and the expected control result do not match.")
- return False, response_code, response_headers
- else:
- return True, response_code, response_headers
-
- def http_status_code_match(self, experiment_code, control_code):
- """
- Compare two HTTP status codes, returns True if they match.
- """
- return int(experiment_code) == int(control_code)
-
- def http_status_code_no_match(self, experiment_code, control_code):
- """
- Compare two HTTP status codes, returns True if they do not match.
- """
- return int(experiment_code) != int(control_code)
-
- def dns_resolve(self, hostname, nameserver=None):
- """
- Resolves hostname(s) though nameserver to corresponding
- address(es). hostname may be either a single hostname string,
- or a list of strings. If nameserver is not given, use local
- DNS resolver, and if that fails try using 8.8.8.8.
- """
- if not resolver:
- log.msg("dnspython is not installed.\
- Cannot perform DNS Resolve test")
- return []
- if isinstance(hostname, str):
- hostname = [hostname]
-
- if nameserver is not None:
- res = resolver.Resolver(configure=False)
- res.nameservers = [nameserver]
- else:
- res = resolver.Resolver()
-
- response = []
- answer = None
-
- for hn in hostname:
- try:
- answer = res.query(hn)
- except resolver.NoNameservers:
- res.nameservers = ['8.8.8.8']
- try:
- answer = res.query(hn)
- except resolver.NXDOMAIN:
- log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
- response.append('NXDOMAIN')
- except resolver.NXDOMAIN:
- log.msg("DNS resolution for %s returned NXDOMAIN" % hn)
- response.append('NXDOMAIN')
- finally:
- if not answer:
- return response
- for addr in answer:
- response.append(addr.address)
- return response
-
- def dns_resolve_match(self, experiment_hostname, control_address):
- """
- Resolve experiment_hostname, and check to see that it returns
- an experiment_address which matches the control_address. If
- they match, returns True and experiment_address; otherwise
- returns False and experiment_address.
- """
- experiment_address = self.dns_resolve(experiment_hostname)
- if not experiment_address:
- log.debug("dns_resolve() for %s failed" % experiment_hostname)
- return None, experiment_address
-
- if len(set(experiment_address) & set([control_address])) > 0:
- return True, experiment_address
- else:
- log.msg("DNS comparison of control '%s' does not" % control_address)
- log.msg("match experiment response '%s'" % experiment_address)
- return False, experiment_address
-
- def get_auth_nameservers(self, hostname):
- """
- Many CPs set a nameserver to be used. Let's query that
- nameserver for the authoritative nameservers of hostname.
-
- The equivalent of:
- $ dig +short NS ooni.nu
- """
- if not resolver:
- log.msg("dnspython not installed.")
- log.msg("Cannot perform test.")
- return []
-
- res = resolver.Resolver()
- answer = res.query(hostname, 'NS')
- auth_nameservers = []
- for auth in answer:
- auth_nameservers.append(auth.to_text())
- return auth_nameservers
-
- def hostname_to_0x20(self, hostname):
- """
- MaKEs yOur HOsTnaME lOoK LiKE THis.
-
- For more information, see:
- D. Dagon, et. al. "Increased DNS Forgery Resistance
- Through 0x20-Bit Encoding". Proc. CSS, 2008.
- """
- hostname_0x20 = ''
- for char in hostname:
- l33t = random.choice(['caps', 'nocaps'])
- if l33t == 'caps':
- hostname_0x20 += char.capitalize()
- else:
- hostname_0x20 += char.lower()
- return hostname_0x20
-
- def check_0x20_to_auth_ns(self, hostname, sample_size=None):
- """
- Resolve a 0x20 DNS request for hostname over hostname's
- authoritative nameserver(s), and check to make sure that
- the capitalization in the 0x20 request matches that of the
- response. Also, check the serial numbers of the SOA (Start
- of Authority) records on the authoritative nameservers to
- make sure that they match.
-
- If sample_size is given, a random sample equal to that number
- of authoritative nameservers will be queried; default is 5.
- """
- log.msg("")
- log.msg("Testing random capitalization of DNS queries...")
- log.msg("Testing that Start of Authority serial numbers match...")
-
- auth_nameservers = self.get_auth_nameservers(hostname)
-
- if sample_size is None:
- sample_size = 5
- resolved_auth_ns = random.sample(self.dns_resolve(auth_nameservers),
- sample_size)
-
- querynames = []
- answernames = []
- serials = []
-
- # Even when gevent monkey patching is on, the requests here
- # are sent without being 0x20'd, so we need to 0x20 them.
- hostname = self.hostname_to_0x20(hostname)
-
- for auth_ns in resolved_auth_ns:
- res = resolver.Resolver(configure=False)
- res.nameservers = [auth_ns]
- try:
- answer = res.query(hostname, 'SOA')
- except resolver.Timeout:
- continue
- querynames.append(answer.qname.to_text())
- answernames.append(answer.rrset.name.to_text())
- for soa in answer:
- serials.append(str(soa.serial))
-
- if len(set(querynames).intersection(answernames)) == 1:
- log.msg("Capitalization in DNS queries and responses match.")
- name_match = True
- else:
- log.msg("The random capitalization '%s' used in" % hostname)
- log.msg("DNS queries to that hostname's authoritative")
- log.msg("nameservers does not match the capitalization in")
- log.msg("the response.")
- name_match = False
-
- if len(set(serials)) == 1:
- log.msg("Start of Authority serial numbers all match.")
- serial_match = True
- else:
- log.msg("Some SOA serial numbers did not match the rest!")
- serial_match = False
-
- ret = name_match, serial_match, querynames, answernames, serials
-
- if name_match and serial_match:
- log.msg("Your DNS queries do not appear to be tampered.")
- return ret
- elif name_match or serial_match:
- log.msg("Something is tampering with your DNS queries.")
- return ret
- elif not name_match and not serial_match:
- log.msg("Your DNS queries are definitely being tampered with.")
- return ret
-
- def get_random_url_safe_string(self, length):
- """
- Returns a random url-safe string of specified length, where
- 0 < length <= 256. The returned string will always start with
- an alphabetic character.
- """
- if (length <= 0):
- length = 1
- elif (length > 256):
- length = 256
-
- random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
- while not random_ascii[:1].isalpha():
- random_ascii = base64.urlsafe_b64encode(os.urandom(int(length)))
-
- three_quarters = int((len(random_ascii)) * (3.0/4.0))
- random_string = random_ascii[:three_quarters]
- return random_string
-
- def get_random_hostname(self, length=None):
- """
- Returns a random hostname with SLD of specified length. If
- length is unspecified, length=32 is used.
-
- These *should* all resolve to NXDOMAIN. If they actually
- resolve to a box that isn't part of a captive portal that
- would be rather interesting.
- """
- if length is None:
- length = 32
-
- random_sld = self.get_random_url_safe_string(length)
-
- # if it doesn't start with a letter, chuck it.
- while not random_sld[:1].isalpha():
- random_sld = self.get_random_url_safe_string(length)
-
- tld_list = ['.com', '.net', '.org', '.info', '.test', '.invalid']
- random_tld = urllib2.random.choice(tld_list)
- random_hostname = random_sld + random_tld
- return random_hostname
-
- def compare_random_hostnames(self, hostname_count=None, hostname_length=None):
- """
- Get hostname_count number of random hostnames with SLD length
- of hostname_length, and then attempt DNS resolution. If no
- arguments are given, default to three hostnames of 32 bytes
- each. These random hostnames *should* resolve to NXDOMAIN,
- except in the case where a user is presented with a captive
- portal and remains unauthenticated, in which case the captive
- portal may return the address of the authentication page.
-
- If the cardinality of the intersection of the set of resolved
- random hostnames and the single element control set
- (['NXDOMAIN']) are equal to one, then DNS properly resolved.
-
- Returns true if only NXDOMAINs were returned, otherwise returns
- False with the relative complement of the control set in the
- response set.
- """
- if hostname_count is None:
- hostname_count = 3
-
- log.msg("Generating random hostnames...")
- log.msg("Resolving DNS for %d random hostnames..." % hostname_count)
-
- control = ['NXDOMAIN']
- responses = []
-
- for x in range(hostname_count):
- random_hostname = self.get_random_hostname(hostname_length)
- response_match, response_address = self.dns_resolve_match(random_hostname,
- control[0])
- for address in response_address:
- if response_match is False:
- log.msg("Strangely, DNS resolution of the random hostname")
- log.msg("%s actually points to %s"
- % (random_hostname, response_address))
- responses = responses + [address]
- else:
- responses = responses + [address]
-
- intersection = set(responses) & set(control)
- relative_complement = set(responses) - set(control)
- r = set(responses)
-
- if len(intersection) == 1:
- log.msg("All %d random hostnames properly resolved to NXDOMAIN."
- % hostname_count)
- return True, relative_complement
- elif (len(intersection) == 1) and (len(r) > 1):
- log.msg("Something odd happened. Some random hostnames correctly")
- log.msg("resolved to NXDOMAIN, but several others resolved to")
- log.msg("to the following addresses: %s" % relative_complement)
- return False, relative_complement
- elif (len(intersection) == 0) and (len(r) == 1):
- log.msg("All random hostnames resolved to the IP address ")
- log.msg("'%s', which is indicative of a captive portal." % r)
- return False, relative_complement
- else:
- log.debug("Apparently, pigs are flying on your network, 'cause a")
- log.debug("bunch of hostnames made from 32-byte random strings")
- log.debug("just magically resolved to a bunch of random addresses.")
- log.debug("That is definitely highly improbable. In fact, my napkin")
- log.debug("tells me that the probability of just one of those")
- log.debug("hostnames resolving to an address is 1.68e-59, making")
- log.debug("it nearly twice as unlikely as an MD5 hash collision.")
- log.debug("Either someone is seriously messing with your network,")
- log.debug("or else you are witnessing the impossible. %s" % r)
- return False, relative_complement
-
- def google_dns_cp_test(self):
- """
- Google Chrome resolves three 10-byte random hostnames.
- """
- subtest = "Google Chrome DNS-based"
- log.msg("Running the Google Chrome DNS-based captive portal test...")
-
- gmatch, google_dns_result = self.compare_random_hostnames(3, 10)
-
- if gmatch:
- log.msg("Google Chrome DNS-based captive portal test did not")
- log.msg("detect a captive portal.")
- return google_dns_result
- else:
- log.msg("Google Chrome DNS-based captive portal test believes")
- log.msg("you are in a captive portal, or else something very")
- log.msg("odd is happening with your DNS.")
- return google_dns_result
-
- def ms_dns_cp_test(self):
- """
- Microsoft "phones home" to a server which will always resolve
- to the same address.
- """
- subtest = "Microsoft NCSI DNS-based"
-
- log.msg("")
- log.msg("Running the Microsoft NCSI DNS-based captive portal")
- log.msg("test...")
-
- msmatch, ms_dns_result = self.dns_resolve_match("dns.msftncsi.com",
- "131.107.255.255")
- if msmatch:
- log.msg("Microsoft NCSI DNS-based captive portal test did not")
- log.msg("detect a captive portal.")
- return ms_dns_result
- else:
- log.msg("Microsoft NCSI DNS-based captive portal test ")
- log.msg("believes you are in a captive portal.")
- return ms_dns_result
-
- def run_vendor_dns_tests(self):
- """
- Run the vendor DNS tests.
- """
- report = {}
- report['google_dns_cp'] = self.google_dns_cp_test()
- report['ms_dns_cp'] = self.ms_dns_cp_test()
-
- return report
-
- def run_vendor_tests(self, *a, **kw):
- """
- These are several vendor tests used to detect the presence of
- a captive portal. Each test compares HTTP status code and
- content to the control results and has its own User-Agent
- string, in order to emulate the test as it would occur on the
- device it was intended for. Vendor tests are defined in the
- format:
- [exp_url, ctrl_result, ctrl_code, ua, test_name]
- """
-
- vendor_tests = [['http://www.apple.com/library/test/success.html',
- 'Success',
- '200',
- 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
- 'Apple HTTP Captive Portal'],
- ['http://tools.ietf.org/html/draft-nottingham-http-portal-02',
- '428 Network Authentication Required',
- '428',
- 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0',
- 'W3 Captive Portal'],
- ['http://www.msftncsi.com/ncsi.txt',
- 'Microsoft NCSI',
- '200',
- 'Microsoft NCSI',
- 'MS HTTP Captive Portal',]]
-
- cm = self.http_content_match_fuzzy_opt
- sm = self.http_status_code_match
- snm = self.http_status_code_no_match
-
- def compare_content(status_func, fuzzy, experiment_url, control_result,
- control_code, headers, test_name):
- log.msg("")
- log.msg("Running the %s test..." % test_name)
-
- content_match, experiment_code, experiment_headers = cm(experiment_url,
- control_result,
- headers, fuzzy)
- status_match = status_func(experiment_code, control_code)
-
- if status_match and content_match:
- log.msg("The %s test was unable to detect" % test_name)
- log.msg("a captive portal.")
- return True
- else:
- log.msg("The %s test shows that your network" % test_name)
- log.msg("is filtered.")
- return False
-
- result = []
- for vt in vendor_tests:
- report = {}
- report['vt'] = vt
-
- experiment_url = vt[0]
- control_result = vt[1]
- control_code = vt[2]
- headers = {'User-Agent': vt[3]}
- test_name = vt[4]
-
- args = (experiment_url, control_result, control_code, headers, test_name)
-
- if test_name == "MS HTTP Captive Portal":
- report['result'] = compare_content(sm, False, *args)
-
- elif test_name == "Apple HTTP Captive Portal":
- report['result'] = compare_content(sm, True, *args)
-
- elif test_name == "W3 Captive Portal":
- report['result'] = compare_content(snm, True, *args)
-
- else:
- log.err("Ooni is trying to run an undefined CP vendor test.")
- result.append(report)
- return result
-
- def control(self, experiment_result, args):
- """
- Compares the content and status code of the HTTP response for
- experiment_url with the control_result and control_code
- respectively. If the status codes match, but the experimental
- content and control_result do not match, fuzzy matching is enabled
- to determine if the control_result is at least included somewhere
- in the experimental content. Returns True if matches are found,
- and False if otherwise.
- """
- # XXX put this back to being parametrized
- #experiment_url = self.local_options['experiment-url']
- experiment_url = 'http://google.com/'
- control_result = 'XX'
- control_code = 200
- ua = self.local_options['user-agent']
-
- cm = self.http_content_match_fuzzy_opt
- sm = self.http_status_code_match
- snm = self.http_status_code_no_match
-
- log.msg("Running test for '%s'..." % experiment_url)
- content_match, experiment_code, experiment_headers = cm(experiment_url,
- control_result)
- status_match = sm(experiment_code, control_code)
- if status_match and content_match:
- log.msg("The test for '%s'" % experiment_url)
- log.msg("was unable to detect a captive portal.")
-
- self.report['result'] = True
-
- elif status_match and not content_match:
- log.msg("Retrying '%s' with fuzzy match enabled."
- % experiment_url)
- fuzzy_match, experiment_code, experiment_headers = cm(experiment_url,
- control_result,
- fuzzy=True)
- if fuzzy_match:
- self.report['result'] = True
- else:
- log.msg("Found modified content on '%s'," % experiment_url)
- log.msg("which could indicate a captive portal.")
-
- self.report['result'] = False
- else:
- log.msg("The content comparison test for ")
- log.msg("'%s'" % experiment_url)
- log.msg("shows that your HTTP traffic is filtered.")
-
- self.report['result'] = False
-
- @defer.inlineCallbacks
- def test_captive_portal(self):
- """
- Runs the CaptivePortal(Test).
-
- CONFIG OPTIONS
- --------------
-
- If "do_captive_portal_vendor_tests" is set to "true", then vendor
- specific captive portal HTTP-based tests will be run.
-
- If "do_captive_portal_dns_tests" is set to "true", then vendor
- specific captive portal DNS-based tests will be run.
-
- If "check_dns_requests" is set to "true", then Ooni-probe will
- attempt to check that your DNS requests are not being tampered with
- by a captive portal.
-
- If "captive_portal" = "yourfilename.txt", then user-specified tests
- will be run.
-
- Any combination of the above tests can be run.
- """
-
- log.msg("")
- log.msg("Running vendor tests...")
- self.report['vendor_tests'] = yield threads.deferToThread(self.run_vendor_tests)
-
- log.msg("")
- log.msg("Running vendor DNS-based tests...")
- self.report['vendor_dns_tests'] = yield threads.deferToThread(self.run_vendor_dns_tests)
-
- log.msg("")
- log.msg("Checking that DNS requests are not being tampered...")
- self.report['check0x20'] = yield threads.deferToThread(self.check_0x20_to_auth_ns, 'ooni.nu')
-
- log.msg("")
- log.msg("Captive portal test finished!")
-
diff --git a/nettests/manipulation/daphne.py b/nettests/manipulation/daphne.py
deleted file mode 100644
index 09279fa..0000000
--- a/nettests/manipulation/daphne.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-from twisted.internet import protocol, endpoints, reactor
-
-from ooni import nettest
-from ooni.kit import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
- def nextStep(self):
- log.debug("Moving on to next step in the state walk")
- self.current_data_received = 0
- if self.current_step >= (len(self.steps) - 1):
- log.msg("Reached the end of the state machine")
- log.msg("Censorship fingerpint bisected!")
- step_idx, mutation_idx = self.factory.mutation
- log.msg("step_idx: %s | mutation_id: %s" % (step_idx, mutation_idx))
- #self.transport.loseConnection()
- if self.report:
- self.report['mutation_idx'] = mutation_idx
- self.report['step_idx'] = step_idx
- self.d.callback(None)
- return
- else:
- self.current_step += 1
- if self._current_step_role() == self.role:
- # We need to send more data because we are again responsible for
- # doing so.
- self.sendPayload()
-
-
-class Daphn3ClientFactory(protocol.ClientFactory):
- protocol = daphn3.Daphn3Protocol
- mutation = [0,0]
- steps = None
-
- def buildProtocol(self, addr):
- p = self.protocol()
- p.steps = self.steps
- p.factory = self
- return p
-
- def startedConnecting(self, connector):
- log.msg("Started connecting %s" % connector)
-
- def clientConnectionFailed(self, reason, connector):
- log.err("We failed connecting the the OONIB")
- log.err("Cannot perform test. Perhaps it got blocked?")
- log.err("Please report this to tor-assistants(a)torproject.org")
-
- def clientConnectionLost(self, reason, connector):
- log.err("Daphn3 client connection lost")
- print reason
-
-class daphn3Args(usage.Options):
- optParameters = [
- ['host', 'h', '127.0.0.1', 'Target Hostname'],
- ['port', 'p', 57003, 'Target port number']]
-
- optFlags = [['pcap', 'c', 'Specify that the input file is a pcap file'],
- ['yaml', 'y', 'Specify that the input file is a YAML file (default)']]
-
-class daphn3Test(nettest.NetTestCase):
-
- name = "Daphn3"
- usageOptions = daphn3Args
- inputFile = ['file', 'f', None,
- 'Specify the pcap or YAML file to be used as input to the test']
-
- #requiredOptions = ['file']
-
- steps = None
-
- def inputProcessor(self, filename):
- """
- step_idx is the step in the packet exchange
- ex.
- [.X.] are packets sent by a client or a server
-
- client: [.1.] [.3.] [.4.]
- server: [.2.] [.5.]
-
- mutation_idx: is the sub index of the packet as in the byte of the
- packet at the step_idx that is to be mutated
-
- """
- if self.localOptions['pcap']:
- daphn3Steps = daphn3.read_pcap(filename)
- else:
- daphn3Steps = daphn3.read_yaml(filename)
- log.debug("Loaded these steps %s" % daphn3Steps)
- yield daphn3Steps
-
- def test_daphn3(self):
- host = self.localOptions['host']
- port = int(self.localOptions['port'])
-
- def failure(failure):
- log.msg("Failed to connect")
- self.report['censored'] = True
- self.report['mutation'] = 0
- raise Exception("Error in connection, perhaps the backend is censored")
- return
-
- def success(protocol):
- log.msg("Successfully connected")
- protocol.sendPayload()
- return protocol.d
-
- log.msg("Connecting to %s:%s" % (host, port))
- endpoint = endpoints.TCP4ClientEndpoint(reactor, host, port)
- daphn3_factory = Daphn3ClientFactory()
- daphn3_factory.steps = self.input
- daphn3_factory.report = self.report
- d = endpoint.connect(daphn3_factory)
- d.addErrback(failure)
- d.addCallback(success)
- return d
-
diff --git a/nettests/manipulation/dnsspoof.py b/nettests/manipulation/dnsspoof.py
deleted file mode 100644
index 5c50c2f..0000000
--- a/nettests/manipulation/dnsspoof.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from twisted.internet import defer
-from twisted.python import usage
-
-from scapy.all import IP, UDP, DNS, DNSQR
-
-from ooni.templates import scapyt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [['resolver', 'r', None,
- 'Specify the resolver that should be used for DNS queries (ip:port)'],
- ['hostname', 'h', None,
- 'Specify the hostname of a censored site'],
- ['backend', 'b', '8.8.8.8:53',
- 'Specify the IP address of a good DNS resolver (ip:port)']
- ]
-
-
-class DNSSpoof(scapyt.ScapyTest):
- name = "DNS Spoof"
- timeout = 2
-
- usageOptions = UsageOptions
-
- requiredOptions = ['hostname', 'resolver']
-
- def setUp(self):
- self.resolverAddr, self.resolverPort = self.localOptions['resolver'].split(':')
- self.resolverPort = int(self.resolverPort)
-
- self.controlResolverAddr, self.controlResolverPort = self.localOptions['backend'].split(':')
- self.controlResolverPort = int(self.controlResolverPort)
-
- self.hostname = self.localOptions['hostname']
-
- def postProcessor(self, report):
- """
- This is not tested, but the concept is that if the two responses
- match up then spoofing is occuring.
- """
- try:
- test_answer = report['test_a_lookup']['answered_packets'][0][1]
- control_answer = report['test_control_a_lookup']['answered_packets'][0][1]
- except IndexError:
- self.report['spoofing'] = 'no_answer'
- return
-
- if test_answer[UDP] == control_answer[UDP]:
- self.report['spoofing'] = True
- else:
- self.report['spoofing'] = False
- return
-
- @defer.inlineCallbacks
- def test_a_lookup(self):
- question = IP(dst=self.resolverAddr)/UDP()/DNS(rd=1,
- qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
- log.msg("Performing query to %s with %s:%s" % (self.hostname, self.resolverAddr, self.resolverPort))
- yield self.sr1(question)
-
- @defer.inlineCallbacks
- def test_control_a_lookup(self):
- question = IP(dst=self.controlResolverAddr)/UDP()/DNS(rd=1,
- qd=DNSQR(qtype="A", qclass="IN", qname=self.hostname))
- log.msg("Performing query to %s with %s:%s" % (self.hostname,
- self.controlResolverAddr, self.controlResolverPort))
- yield self.sr1(question)
-
-
diff --git a/nettests/manipulation/http_header_field_manipulation.py b/nettests/manipulation/http_header_field_manipulation.py
deleted file mode 100644
index 509f4ef..0000000
--- a/nettests/manipulation/http_header_field_manipulation.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import random
-import json
-import yaml
-
-from twisted.python import usage
-
-from ooni.utils import log, net, randomStr
-from ooni.templates import httpt
-from ooni.utils.txagentwithsocks import TrueHeaders
-
-def random_capitalization(string):
- output = ""
- original_string = string
- string = string.swapcase()
- for i in range(len(string)):
- if random.randint(0, 1):
- output += string[i].swapcase()
- else:
- output += string[i]
- if original_string == output:
- return random_capitalization(output)
- else:
- return output
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['backend', 'b', 'http://127.0.0.1:57001',
- 'URL of the backend to use for sending the requests'],
- ['headers', 'h', None,
- 'Specify a yaml formatted file from which to read the request headers to send']
- ]
-
-class HTTPHeaderFieldManipulation(httpt.HTTPTest):
- """
- It performes HTTP requests with request headers that vary capitalization
- towards a backend. If the headers reported by the server differ from
- the ones we sent, then we have detected tampering.
- """
- name = "HTTP Header Field Manipulation"
- author = "Arturo Filastò"
- version = "0.1.3"
-
- randomizeUA = False
- usageOptions = UsageOptions
-
- requiredOptions = ['backend']
-
- def get_headers(self):
- headers = {}
- if self.localOptions['headers']:
- try:
- f = open(self.localOptions['headers'])
- except IOError:
- raise Exception("Specified input file does not exist")
- content = ''.join(f.readlines())
- f.close()
- headers = yaml.safe_load(content)
- return headers
- else:
- # XXX generate these from a random choice taken from whatheaders.com
- # http://s3.amazonaws.com/data.whatheaders.com/whatheaders-latest.xml.zip
- headers = {"User-Agent": [random.choice(net.userAgents)],
- "Accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
- "Accept-Encoding": ["gzip,deflate,sdch"],
- "Accept-Language": ["en-US,en;q=0.8"],
- "Accept-Charset": ["ISO-8859-1,utf-8;q=0.7,*;q=0.3"],
- "Host": [randomStr(15)+'.com']
- }
- return headers
-
- def get_random_caps_headers(self):
- headers = {}
- normal_headers = self.get_headers()
- for k, v in normal_headers.items():
- new_key = random_capitalization(k)
- headers[new_key] = v
- return headers
-
- def processInputs(self):
- if self.localOptions['backend']:
- self.url = self.localOptions['backend']
- else:
- raise Exception("No backend specified")
-
- def processResponseBody(self, data):
- self.check_for_tampering(data)
-
- def check_for_tampering(self, data):
- """
- Here we do checks to verify if the request we made has been tampered
- with. We have 3 categories of tampering:
-
- * **total** when the response is not a json object and therefore we were not
- able to reach the ooniprobe test backend
-
- * **request_line_capitalization** when the HTTP Request line (e.x. GET /
- HTTP/1.1) does not match the capitalization we set.
-
- * **header_field_number** when the number of headers we sent does not match
- with the ones the backend received
-
- * **header_name_capitalization** when the header field names do not match
- those that we sent.
-
- * **header_field_value** when the header field value does not match with the
- one we transmitted.
- """
- log.msg("Checking for tampering on %s" % self.url)
-
- self.report['tampering'] = {
- 'total': False,
- 'request_line_capitalization': False,
- 'header_name_capitalization': False,
- 'header_field_value': False,
- 'header_field_number': False
- }
- try:
- response = json.loads(data)
- except ValueError:
- self.report['tampering']['total'] = True
- return
-
- request_request_line = "%s / HTTP/1.1" % self.request_method
-
- try:
- response_request_line = response['request_line']
- response_headers_dict = response['headers_dict']
- except KeyError:
- self.report['tampering']['total'] = True
- return
-
- if request_request_line != response_request_line:
- self.report['tampering']['request_line_capitalization'] = True
-
- request_headers = TrueHeaders(self.request_headers)
- diff = request_headers.getDiff(TrueHeaders(response_headers_dict),
- ignore=['Connection'])
- if diff:
- self.report['tampering']['header_field_name'] = True
- else:
- self.report['tampering']['header_field_name'] = False
- self.report['tampering']['header_name_diff'] = list(diff)
- log.msg(" total: %(total)s" % self.report['tampering'])
- log.msg(" request_line_capitalization: %(request_line_capitalization)s" % self.report['tampering'])
- log.msg(" header_name_capitalization: %(header_name_capitalization)s" % self.report['tampering'])
- log.msg(" header_field_value: %(header_field_value)s" % self.report['tampering'])
- log.msg(" header_field_number: %(header_field_number)s" % self.report['tampering'])
-
- def test_get(self):
- self.request_method = "GET"
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_get_random_capitalization(self):
- self.request_method = random_capitalization("GET")
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_post(self):
- self.request_method = "POST"
- self.request_headers = self.get_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_post_random_capitalization(self):
- self.request_method = random_capitalization("POST")
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_put(self):
- self.request_method = "PUT"
- self.request_headers = self.get_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
- def test_put_random_capitalization(self):
- self.request_method = random_capitalization("PUT")
- self.request_headers = self.get_random_caps_headers()
- return self.doRequest(self.url, self.request_method,
- headers=self.request_headers)
-
diff --git a/nettests/manipulation/http_host.py b/nettests/manipulation/http_host.py
deleted file mode 100644
index d95d836..0000000
--- a/nettests/manipulation/http_host.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# HTTP Host Test
-# **************
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-import json
-from twisted.python import usage
-
-from ooni.utils import randomStr, randomSTR
-
-from ooni.utils import log
-from ooni.templates import httpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', 'http://127.0.0.1:57001',
- 'URL of the test backend to use. Should be \
- listening on port 80 and be a \
- HTTPReturnJSONHeadersHelper'],
- ['content', 'c', None, 'The file to read \
- from containing the content of a block page']]
-
-class HTTPHost(httpt.HTTPTest):
- """
- This test is aimed at detecting the presence of a transparent HTTP proxy
- and enumerating the sites that are being censored by it.
-
- It places inside of the Host header field the hostname of the site that is
- to be tested for censorship and then determines if the probe is behind a
- transparent HTTP proxy (because the response from the backend server does
- not match) and if the site is censorsed, by checking if the page that it
- got back matches the input block page.
- """
- name = "HTTP Host"
- author = "Arturo Filastò"
- version = "0.2.3"
-
- randomizeUA = False
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of hostnames to test for censorship']
-
- requiredOptions = ['backend']
-
- def test_filtering_prepend_newline_to_method(self):
- headers = {}
- headers["Host"] = [self.input]
- return self.doRequest(self.localOptions['backend'], method="\nGET",
- headers=headers)
-
- def test_filtering_add_tab_to_host(self):
- headers = {}
- headers["Host"] = [self.input + '\t']
- return self.doRequest(self.localOptions['backend'],
- headers=headers)
-
- def test_filtering_of_subdomain(self):
- headers = {}
- headers["Host"] = [randomStr(10) + '.' + self.input]
- return self.doRequest(self.localOptions['backend'],
- headers=headers)
-
- def test_filtering_via_fuzzy_matching(self):
- headers = {}
- headers["Host"] = [randomStr(10) + self.input + randomStr(10)]
- return self.doRequest(self.localOptions['backend'],
- headers=headers)
-
- def test_send_host_header(self):
- """
- Stuffs the HTTP Host header field with the site to be tested for
- censorship and does an HTTP request of this kind to our backend.
-
- We randomize the HTTP User Agent headers.
- """
- headers = {}
- headers["Host"] = [self.input]
- return self.doRequest(self.localOptions['backend'],
- headers=headers)
-
- def check_for_censorship(self, body):
- """
- If we have specified what a censorship page looks like here we will
- check if the page we are looking at matches it.
-
- XXX this is not tested, though it is basically what was used to detect
- censorship in the palestine case.
- """
- if self.localOptions['content']:
- self.report['censored'] = True
- censorship_page = open(self.localOptions['content'])
- response_page = iter(body.split("\n"))
-
- for censorship_line in censorship_page.xreadlines():
- response_line = response_page.next()
- if response_line != censorship_line:
- self.report['censored'] = False
- break
-
- censorship_page.close()
- else:
- self.report['censored'] = None
-
- def processResponseBody(self, body):
- """
- XXX this is to be filled in with either a domclass based classified or
- with a rule that will allow to detect that the body of the result is
- that of a censored site.
- """
- # If we don't see a json array we know that something is wrong for
- # sure
- if not body.startswith("{"):
- log.msg("This does not appear to be JSON")
- self.report['transparent_http_proxy'] = True
- self.check_for_censorship(body)
- return
- try:
- content = json.loads(body)
- except:
- log.msg("The json does not parse, this is not what we expected")
- self.report['transparent_http_proxy'] = True
- self.check_for_censorship(body)
- return
-
- # We base the determination of the presence of a transparent HTTP
- # proxy on the basis of the response containing the json that is to be
- # returned by a HTTP Request Test Helper
- if 'request_headers' in content and \
- 'request_line' in content and \
- 'headers_dict' in content:
- log.msg("Found the keys I expected in %s" % content)
- self.report['transparent_http_proxy'] = False
- self.report['censored'] = False
- else:
- log.msg("Did not find the keys I expected in %s" % content)
- self.report['transparent_http_proxy'] = True
- self.check_for_censorship(body)
-
diff --git a/nettests/manipulation/http_invalid_request_line.py b/nettests/manipulation/http_invalid_request_line.py
deleted file mode 100644
index 2482282..0000000
--- a/nettests/manipulation/http_invalid_request_line.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- encoding: utf-8 -*-
-from twisted.python import usage
-
-from ooni.utils import log
-from ooni.utils import randomStr, randomSTR
-from ooni.templates import tcpt
-
-class UsageOptions(usage.Options):
- optParameters = [['backend', 'b', '127.0.0.1',
- 'The OONI backend that runs a TCP echo server'],
- ['backendport', 'p', 80, 'Specify the port that the TCP echo server is running (should only be set for debugging)']]
-
-class HTTPInvalidRequestLine(tcpt.TCPTest):
- """
- The goal of this test is to do some very basic and not very noisy fuzzing
- on the HTTP request line. We generate a series of requests that are not
- valid HTTP requests.
-
- Unless elsewhere stated 'Xx'*N refers to N*2 random upper or lowercase
- ascii letters or numbers ('XxXx' will be 4).
- """
- name = "HTTP Invalid Request Line"
- version = "0.1.4"
- authors = "Arturo Filastò"
-
- usageOptions = UsageOptions
- requiredOptions = ['backend']
-
- def setUp(self):
- self.port = int(self.localOptions['backendport'])
- self.address = self.localOptions['backend']
-
- def check_for_manipulation(self, response, payload):
- log.debug("Checking if %s == %s" % (response, payload))
- if response != payload:
- self.report['tampering'] = True
- else:
- self.report['tampering'] = False
-
- def test_random_invalid_method(self):
- """
- We test sending data to a TCP echo server listening on port 80, if what
- we get back is not what we have sent then there is tampering going on.
- This is for example what squid will return when performing such
- request:
-
- HTTP/1.0 400 Bad Request
- Server: squid/2.6.STABLE21
- Date: Sat, 23 Jul 2011 02:22:44 GMT
- Content-Type: text/html
- Content-Length: 1178
- Expires: Sat, 23 Jul 2011 02:22:44 GMT
- X-Squid-Error: ERR_INVALID_REQ 0
- X-Cache: MISS from cache_server
- X-Cache-Lookup: NONE from cache_server:3128
- Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
- Proxy-Connection: close
-
- """
- payload = randomSTR(4) + " / HTTP/1.1\n\r"
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_random_invalid_field_count(self):
- """
- This generates a request that looks like this:
-
- XxXxX XxXxX XxXxX XxXxX
-
- This may trigger some bugs in the HTTP parsers of transparent HTTP
- proxies.
- """
- payload = ' '.join(randomStr(5) for x in range(4))
- payload += "\n\r"
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_random_big_request_method(self):
- """
- This generates a request that looks like this:
-
- Xx*512 / HTTP/1.1
- """
- payload = randomStr(1024) + ' / HTTP/1.1\n\r'
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
- def test_random_invalid_version_number(self):
- """
- This generates a request that looks like this:
-
- GET / HTTP/XxX
- """
- payload = 'GET / HTTP/' + randomStr(3)
- payload += '\n\r'
-
- d = self.sendPayload(payload)
- d.addCallback(self.check_for_manipulation, payload)
- return d
-
diff --git a/nettests/manipulation/traceroute.py b/nettests/manipulation/traceroute.py
deleted file mode 100644
index 3f6f17b..0000000
--- a/nettests/manipulation/traceroute.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.python import usage
-from twisted.internet import defer
-
-from ooni.templates import scapyt
-
-from scapy.all import *
-
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [
- ['backend', 'b', '8.8.8.8', 'Test backend to use'],
- ['timeout', 't', 5, 'The timeout for the traceroute test'],
- ['maxttl', 'm', 30, 'The maximum value of ttl to set on packets'],
- ['srcport', 'p', None, 'Set the source port to a specific value (only applies to TCP and UDP)']
- ]
-
-class TracerouteTest(scapyt.BaseScapyTest):
- name = "Multi Protocol Traceroute Test"
- author = "Arturo Filastò"
- version = "0.1.1"
-
- usageOptions = UsageOptions
- dst_ports = [0, 22, 23, 53, 80, 123, 443, 8080, 65535]
-
- def setUp(self):
- def get_sport(protocol):
- if self.localOptions['srcport']:
- return int(self.localOptions['srcport'])
- else:
- return random.randint(1024, 65535)
-
- self.get_sport = get_sport
-
- def max_ttl_and_timeout(self):
- max_ttl = int(self.localOptions['maxttl'])
- timeout = int(self.localOptions['timeout'])
- self.report['max_ttl'] = max_ttl
- self.report['timeout'] = timeout
- return max_ttl, timeout
-
-
- def postProcessor(self, report):
- tcp_hops = report['test_tcp_traceroute']
- udp_hops = report['test_udp_traceroute']
- icmp_hops = report['test_icmp_traceroute']
-
-
- def test_tcp_traceroute(self):
- """
- Does a traceroute to the destination by sending TCP SYN packets
- with TTLs from 1 until max_ttl.
- """
- def finished(packets, port):
- log.debug("Finished running TCP traceroute test on port %s" % port)
- answered, unanswered = packets
- self.report['hops_'+str(port)] = []
- for snd, rcv in answered:
- try:
- sport = snd[UDP].sport
- except IndexError:
- log.err("Source port for this traceroute was not found. This is probably a bug")
- sport = -1
-
- report = {'ttl': snd.ttl,
- 'address': rcv.src,
- 'rtt': rcv.time - snd.time,
- 'sport': sport
- }
- log.debug("%s: %s" % (port, report))
- self.report['hops_'+str(port)].append(report)
-
- dl = []
- max_ttl, timeout = self.max_ttl_and_timeout()
- for port in self.dst_ports:
- packets = IP(dst=self.localOptions['backend'],
- ttl=(1,max_ttl),id=RandShort())/TCP(flags=0x2, dport=port,
- sport=self.get_sport('tcp'))
-
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished, port)
- dl.append(d)
- return defer.DeferredList(dl)
-
- def test_udp_traceroute(self):
- """
- Does a traceroute to the destination by sending UDP packets with empty
- payloads with TTLs from 1 until max_ttl.
- """
- def finished(packets, port):
- log.debug("Finished running UDP traceroute test on port %s" % port)
- answered, unanswered = packets
- self.report['hops_'+str(port)] = []
- for snd, rcv in answered:
- report = {'ttl': snd.ttl,
- 'address': rcv.src,
- 'rtt': rcv.time - snd.time,
- 'sport': snd[UDP].sport
- }
- log.debug("%s: %s" % (port, report))
- self.report['hops_'+str(port)].append(report)
- dl = []
- max_ttl, timeout = self.max_ttl_and_timeout()
- for port in self.dst_ports:
- packets = IP(dst=self.localOptions['backend'],
- ttl=(1,max_ttl),id=RandShort())/UDP(dport=port,
- sport=self.get_sport('udp'))
-
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished, port)
- dl.append(d)
- return defer.DeferredList(dl)
-
- def test_icmp_traceroute(self):
- """
- Does a traceroute to the destination by sending ICMP echo request
- packets with TTLs from 1 until max_ttl.
- """
- def finished(packets):
- log.debug("Finished running ICMP traceroute test")
- answered, unanswered = packets
- self.report['hops'] = []
- for snd, rcv in answered:
- report = {'ttl': snd.ttl,
- 'address': rcv.src,
- 'rtt': rcv.time - snd.time
- }
- log.debug("%s" % (report))
- self.report['hops'].append(report)
- dl = []
- max_ttl, timeout = self.max_ttl_and_timeout()
- packets = IP(dst=self.localOptions['backend'],
- ttl=(1,max_ttl), id=RandShort())/ICMP()
-
- d = self.sr(packets, timeout=timeout)
- d.addCallback(finished)
- return d
-
diff --git a/nettests/scanning/__init__.py b/nettests/scanning/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/scanning/http_url_list.py b/nettests/scanning/http_url_list.py
deleted file mode 100644
index 0accaae..0000000
--- a/nettests/scanning/http_url_list.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# :authors: Arturo Filastò
-# :licence: see LICENSE
-
-from twisted.internet import defer
-from twisted.python import usage
-from ooni.templates import httpt
-from ooni.utils import log
-
-class UsageOptions(usage.Options):
- optParameters = [['content', 'c', None,
- 'The file to read from containing the content of a block page'],
- ['url', 'u', None, 'Specify a single URL to test.']
- ]
-
-class HTTPURLList(httpt.HTTPTest):
- """
- Performs GET, POST and PUT requests to a list of URLs specified as
- input and checks if the page that we get back as a result matches that
- of a block page given as input.
-
- If no block page is given as input to the test it will simply collect the
- responses to the HTTP requests and write them to a report file.
- """
- name = "HTTP URL List"
- author = "Arturo Filastò"
- version = "0.1.3"
-
- usageOptions = UsageOptions
-
- inputFile = ['file', 'f', None,
- 'List of URLS to perform GET and POST requests to']
-
- def setUp(self):
- """
- Check for inputs.
- """
- if self.input:
- self.url = self.input
- elif self.localOptions['url']:
- self.url = self.localOptions['url']
- else:
- raise Exception("No input specified")
-
- def check_for_content_censorship(self, body):
- """
- If we have specified what a censorship page looks like here we will
- check if the page we are looking at matches it.
-
- XXX this is not tested, though it is basically what was used to detect
- censorship in the palestine case.
- """
- self.report['censored'] = True
-
- censorship_page = open(self.localOptions['content']).xreadlines()
- response_page = iter(body.split("\n"))
-
- # We first allign the two pages to the first HTML tag (something
- # starting with <). This is useful so that we can give as input to this
- # test something that comes from the output of curl -kis
- # http://the_page/
- for line in censorship_page:
- if line.strip().startswith("<"):
- break
- for line in response_page:
- if line.strip().startswith("<"):
- break
-
- for censorship_line in censorship_page:
- try:
- response_line = response_page.next()
- except StopIteration:
- # The censored page and the response we got do not match in
- # length.
- self.report['censored'] = False
- break
- censorship_line = censorship_line.replace("\n", "")
- if response_line != censorship_line:
- self.report['censored'] = False
-
- censorship_page.close()
-
- def processResponseBody(self, body):
- if self.localOptions['content']:
- log.msg("Checking for censorship in response body")
- self.check_for_content_censorship(body)
-
- def test_get(self):
- return self.doRequest(self.url, method="GET")
-
- def test_post(self):
- return self.doRequest(self.url, method="POST")
-
- def test_put(self):
- return self.doRequest(self.url, method="PUT")
-
-
diff --git a/nettests/third_party/Makefile b/nettests/third_party/Makefile
deleted file mode 100644
index 16adfe0..0000000
--- a/nettests/third_party/Makefile
+++ /dev/null
@@ -1,3 +0,0 @@
-fetch:
- wget http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
- chmod +x NetalyzrCLI.jar
diff --git a/nettests/third_party/README b/nettests/third_party/README
deleted file mode 100644
index d9e435f..0000000
--- a/nettests/third_party/README
+++ /dev/null
@@ -1,14 +0,0 @@
-There is no license for NetalyzrCLI.jar; so while we include it, it's just
-for ease of use.
-
-We currently support interfacing with the ICSI Netalyzr system by wrapping
-the NetalyzrCLI.jar client. It was downloaded on August 5th, 2011 from the
-following URL:
- http://netalyzr.icsi.berkeley.edu/NetalyzrCLI.jar
-
-More information about the client is available on the cli web page:
- http://netalyzr.icsi.berkeley.edu/cli.html
-
-After looking at NetalyzrCLI.jar, I discovered that '-d' runs it in a
-debugging mode that is quite useful for understanding their testing
-framework as it runs.
diff --git a/nettests/third_party/__init__.py b/nettests/third_party/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/nettests/third_party/netalyzr.py b/nettests/third_party/netalyzr.py
deleted file mode 100644
index 9b21831..0000000
--- a/nettests/third_party/netalyzr.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- encoding: utf-8 -*-
-#
-# This is a wrapper around the Netalyzer Java command line client
-#
-# :authors: Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo "hellais" Filastò <art(a)fuffa.org>
-# :licence: see LICENSE
-
-from ooni import nettest
-from ooni.utils import log
-import time
-import os
-from twisted.internet import reactor, threads, defer
-
-class NetalyzrWrapperTest(nettest.NetTestCase):
- name = "NetalyzrWrapper"
-
- def setUp(self):
- cwd = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
-
- # XXX set the output directory to something more uniform
- outputdir = os.path.join(cwd, '..', '..')
-
- program_path = os.path.join(cwd, 'NetalyzrCLI.jar')
- program = "java -jar %s -d" % program_path
-
- test_token = time.asctime(time.gmtime()).replace(" ", "_").strip()
-
- self.output_file = os.path.join(outputdir,
- "NetalyzrCLI_" + test_token + ".out")
- self.output_file.strip()
- self.run_me = program + " 2>&1 >> " + self.output_file
-
- def blocking_call(self):
- try:
- result = threads.blockingCallFromThread(reactor, os.system, self.run_me)
- except:
- log.debug("Netalyzr had an error, please see the log file: %s" % self.output_file)
- finally:
- self.clean_up()
-
- def clean_up(self):
- self.report['netalyzr_report'] = self.output_file
- log.debug("finished running NetalzrWrapper")
- log.debug("Please check %s for Netalyzr output" % self.output_file)
-
- def test_run_netalyzr(self):
- """
- This test simply wraps netalyzr and runs it from command line
- """
- log.msg("Running NetalyzrWrapper (this will take some time, be patient)")
- log.debug("with command '%s'" % self.run_me)
- # XXX we probably want to use a processprotocol here to obtain the
- # stdout from Netalyzr. This would allows us to visualize progress
- # (currently there is no progress because the stdout of os.system is
- # trapped by twisted) and to include the link to the netalyzr report
- # directly in the OONI report, perhaps even downloading it.
- reactor.callInThread(self.blocking_call)
1
0

[ooni-probe/develop] Add support for viewing test results and uploading inputs
by isis@torproject.org 26 Jun '13
by isis@torproject.org 26 Jun '13
26 Jun '13
commit 4f51cd3107bc1f9e65be587aeb3b41f7a584e3d7
Author: Arturo Filastò <art(a)fuffa.org>
Date: Thu Apr 25 13:33:46 2013 +0200
Add support for viewing test results and uploading inputs
---
data/ui/app/index.html | 1 +
data/ui/app/libs/ng-upload/ng-upload.js | 107 +++++++++++++++++++++++++++++++
data/ui/app/scripts/app.js | 21 +++---
data/ui/app/scripts/controllers.js | 36 ++++++++---
data/ui/app/scripts/services.js | 9 ++-
data/ui/app/styles/app.css | 5 ++
data/ui/app/views/inputs.html | 31 +++++++++
data/ui/app/views/sidebar.html | 4 ++
data/ui/app/views/test-list.html | 38 -----------
data/ui/app/views/test-status.html | 7 --
data/ui/app/views/test.html | 42 ++++++++++++
ooni/api/spec.py | 6 +-
12 files changed, 239 insertions(+), 68 deletions(-)
diff --git a/data/ui/app/index.html b/data/ui/app/index.html
index a903ceb..c304066 100644
--- a/data/ui/app/index.html
+++ b/data/ui/app/index.html
@@ -28,6 +28,7 @@
<script src="libs/angular/angular.js"></script>
<script src="libs/angular-resource/angular-resource.js"></script>
+ <script src="libs/ng-upload/ng-upload.js"></script>
<script src="scripts/app.js"></script>
<script src="scripts/services.js"></script>
<script src="scripts/controllers.js"></script>
diff --git a/data/ui/app/libs/ng-upload/ng-upload.js b/data/ui/app/libs/ng-upload/ng-upload.js
new file mode 100644
index 0000000..fae7a44
--- /dev/null
+++ b/data/ui/app/libs/ng-upload/ng-upload.js
@@ -0,0 +1,107 @@
+// Version 0.3.2
+// AngularJS simple file upload directive
+// this directive uses an iframe as a target
+// to enable the uploading of files without
+// losing focus in the ng-app.
+//
+// <div ng-app="app">
+// <div ng-controller="mainCtrl">
+// <form action="/uploads" ng-upload>
+// <input type="file" name="avatar"></input>
+// <input type="submit" value="Upload"
+// upload-submit="submited(content, completed)"></input>
+// </form>
+// </div>
+// </div>
+//
+// angular.module('app', ['ngUpload'])
+// .controller('mainCtrl', function($scope) {
+// $scope.submited = function(content, completed) {
+// if (completed) {
+// console.log(content);
+// }
+// }
+// });
+//
+angular.module('ngUpload', [])
+ .directive('uploadSubmit', ['$parse', function($parse) {
+ return {
+ restrict: 'AC',
+ link: function(scope, element, attrs) {
+ // Options (just 1 for now)
+ // Each option should be prefixed with 'upload-options-' or 'uploadOptions'
+ // {
+ // // specify whether to enable the submit button when uploading forms
+ // enableControls: bool
+ // }
+ var options = {};
+ options.enableControls = attrs.uploadOptionsEnableControls;
+
+ // submit the form - requires jQuery
+ var form = element.parents('form[ng-upload]') || element.parents('form.ng-upload');
+
+ // Retrieve the callback function
+ var fn = $parse(attrs.uploadSubmit);
+
+ if (!angular.isFunction(fn)) {
+ var message = "The expression on the ngUpload directive does not point to a valid function.";
+ throw message + "\n";
+ }
+
+ element.bind('click', function($event) {
+ // prevent default behavior of click
+ $event.preventDefault = true;
+ // create a new iframe
+ var iframe = angular.element("<iframe id='upload_iframe' name='upload_iframe' border='0' width='0' height='0' style='width: 0px; height: 0px; border: none; display: none' />");
+
+ // attach function to load event of the iframe
+ iframe.bind('load', function () {
+ // get content - requires jQuery
+ var content = iframe.contents().find('body').text();
+ // execute the upload response function in the active scope
+ scope.$apply(function () {
+ fn(scope, { content: content, completed: true});
+ });
+ // remove iframe
+ if (content !== "") { // Fixes a bug in Google Chrome that dispose the iframe before content is ready.
+ setTimeout(function () { iframe.remove(); }, 250);
+ }
+ element.attr('disabled', null);
+ element.attr('title', 'Click to start upload.');
+ });
+
+ // add the new iframe to application
+ form.parent().append(iframe);
+
+ scope.$apply(function () {
+ fn(scope, {content: "Please wait...", completed: false });
+ });
+
+ var enabled = true;
+ if (!options.enableControls) {
+ // disable the submit control on click
+ element.attr('disabled', 'disabled');
+ enabled = false;
+ }
+ // why do we need this???
+ element.attr('title', (enabled ? '[ENABLED]: ' : '[DISABLED]: ') + 'Uploading, please wait...');
+
+ form.submit();
+
+ }).attr('title', 'Click to start upload.');
+ }
+ };
+ }])
+ .directive('ngUpload', ['$parse', function ($parse) {
+ return {
+ restrict: 'AC',
+ link: function (scope, element, attrs) {
+ element.attr("target", "upload_iframe");
+ element.attr("method", "post");
+ // Append a timestamp field to the url to prevent browser caching results
+ element.attr("action", element.attr("action") + "?_t=" + new Date().getTime());
+ element.attr("enctype", "multipart/form-data");
+ element.attr("encoding", "multipart/form-data");
+ }
+ };
+ }]);
\ No newline at end of file
diff --git a/data/ui/app/scripts/app.js b/data/ui/app/scripts/app.js
index 5fb8fdf..d36a17c 100644
--- a/data/ui/app/scripts/app.js
+++ b/data/ui/app/scripts/app.js
@@ -2,28 +2,29 @@
// Declare app level module which depends on filters, and services
-var ooniprobe = angular.module('ooniprobe', ['ooniprobe.services']).
+var ooniprobe = angular.module('ooniprobe', ['ngUpload', 'ooniprobe.services']).
config(['$routeProvider', function($routeProvider) {
- $routeProvider.when('/test-status',
+
+ $routeProvider.when('/inputs',
{
- templateUrl: 'views/test-status.html',
- controller: 'PageCtrl'
+ templateUrl: 'views/inputs.html',
+ controller: 'InputsCtrl'
}
);
- $routeProvider.when('/test-list',
+ $routeProvider.when('/settings',
{
- templateUrl: 'views/test-list.html',
- controller: 'TestListCtrl'
+ templateUrl: 'views/settings.html',
+ controller: 'SettingsCtrl'
}
);
$routeProvider.when('/test/:testID',
{
- templateUrl: 'views/test-list.html',
- controller: 'TestListCtrl'
+ templateUrl: 'views/test.html',
+ controller: 'TestCtrl'
}
);
- $routeProvider.otherwise({redirectTo: '/test-status'});
+ $routeProvider.otherwise({redirectTo: '/settings'});
}]);
diff --git a/data/ui/app/scripts/controllers.js b/data/ui/app/scripts/controllers.js
index 489dfd2..5ba24e1 100644
--- a/data/ui/app/scripts/controllers.js
+++ b/data/ui/app/scripts/controllers.js
@@ -3,16 +3,16 @@
ooniprobe.controller('PageCtrl', ['$scope', function($scope) {
}]);
-ooniprobe.controller('TestListCtrl', ['$scope', '$routeParams', 'testStatus',
- function($scope, $routeParams, testStatus) {
+ooniprobe.controller('SettingsCtrl', ['$scope',
+ function($scope) {
+}]);
- var testID = $routeParams['testID'];
- $scope.updateTestStatus = function() {
- testStatus(testID).success(function(testDetails){
- $scope.testDetails = testDetails;
- });
+ooniprobe.controller('InputsCtrl', ['$scope', 'Inputs',
+ function($scope, Inputs) {
+ $scope.inputs = Inputs.query();
+ $scope.uploadComplete = function(contents, completed) {
+ return;
}
- $scope.updateTestStatus();
}]);
@@ -30,13 +30,31 @@ ooniprobe.controller('SideBarCtrl', ['$scope', 'listTests', '$location',
}]);
+ooniprobe.controller('TestCtrl', ['$scope', '$routeParams', 'testStatus', 'Inputs',
+ function($scope, $routeParams, testStatus, Inputs) {
+
+ var testID = $routeParams['testID'];
+
+ $scope.inputs = Inputs.query();
+
+ $scope.updateTestStatus = function() {
+ testStatus(testID).success(function(testDetails){
+ $scope.testDetails = testDetails;
+ });
+ }
+ $scope.updateTestStatus();
+
+
+}]);
+
ooniprobe.controller('TestBoxCtrl', ['$scope', 'startTest',
function($scope, startTest) {
$scope.startTest = function() {
var options = {};
- angular.forEach($scope.testDetails.arguments, function(option, key){
+ angular.forEach($scope.testDetails.arguments,
+ function(option, key) {
options[key] = option.value;
});
diff --git a/data/ui/app/scripts/services.js b/data/ui/app/scripts/services.js
index 3d6721d..ea013ee 100644
--- a/data/ui/app/scripts/services.js
+++ b/data/ui/app/scripts/services.js
@@ -2,7 +2,7 @@
angular.module('ooniprobe.services', ['ngResource']).
factory('listTests', ['$resource',
- function($resource){
+ function($resource) {
return $resource('/test');
}]).
factory('testStatus', ['$http', function($http){
@@ -10,11 +10,16 @@ angular.module('ooniprobe.services', ['ngResource']).
return $http.get('/test/' + testID);
}
}]).
- factory('startTest', ['$http', function($http){
+ factory('startTest', ['$http',
+ function($http) {
return function(testID, options) {
return $http.post('/test/' + testID + '/start', options);
}
}]).
+ factory('Inputs', ['$resource',
+ function($resource) {
+ return $resource('/inputs');
+}]).
factory('status', ['$resource',
function($resource) {
return $resource('/status');
diff --git a/data/ui/app/styles/app.css b/data/ui/app/styles/app.css
index 5fe454f..b4a0fb8 100644
--- a/data/ui/app/styles/app.css
+++ b/data/ui/app/styles/app.css
@@ -14,3 +14,8 @@
background-color: rgb(240, 240, 240);
}
+.testResult {
+ height: 200px;
+ overflow-y: scroll;
+ overflow-x: hidden;
+}
diff --git a/data/ui/app/views/inputs.html b/data/ui/app/views/inputs.html
new file mode 100644
index 0000000..767e00e
--- /dev/null
+++ b/data/ui/app/views/inputs.html
@@ -0,0 +1,31 @@
+<div class="row">
+ <div class="span8">
+ <h2>Inputs</h2>
+ <ul class="unstyled">
+ <li ng-repeat="input in inputs">{{input.filename}}
+ <!-- button class="btn btn-small btn-danger" ng-click="input.$delete()">delete</button -->
+ </li>
+ </ul>
+ <form ng-upload action="/inputs">
+
+ <h4>Add file</h4>
+ <label>file</label>
+ <input type="file" name="file" />
+ <br/>
+ <button class="btn"
+ upload-submit="uploadComplete(contents, completed)">Upload</button>
+
+ <!-- h4>Add filename</h4>
+ <label>filename</label>
+ <input type="text" ng-model="fileName" />
+
+ <label>content</label>
+ <textarea ng-model="fileContent"></textarea>
+ <br/>
+ <button class="btn">Add</button -->
+ </form>
+
+
+ </div>
+</div>
+
diff --git a/data/ui/app/views/settings.html b/data/ui/app/views/settings.html
new file mode 100644
index 0000000..e69de29
diff --git a/data/ui/app/views/sidebar.html b/data/ui/app/views/sidebar.html
index 172a51e..5ab4630 100644
--- a/data/ui/app/views/sidebar.html
+++ b/data/ui/app/views/sidebar.html
@@ -3,4 +3,8 @@
<li ng-repeat="test in test_list" ng-class="{'active': testSelected(test.id)}">
<a href="#/test/{{test.id}}">{{test.name}}</a>
</li>
+
+ <li class="nav-header">Configuration</li>
+ <li><a href="#/inputs">Inputs</a></li>
+ <li><a href="#/settings">Settings</a></li>
</ul>
diff --git a/data/ui/app/views/test-list.html b/data/ui/app/views/test-list.html
deleted file mode 100644
index ffcf3fb..0000000
--- a/data/ui/app/views/test-list.html
+++ /dev/null
@@ -1,38 +0,0 @@
-<div class="row">
- <div class="span8">
- <h2>{{testDetails.name}}</h2>
- <div class="netTest" ng-controller="TestBoxCtrl">
- version: <span class="badge badge-success">{{testDetails.version}}</span>
- <p>{{testDetails.description}}</p>
- <form name="testOptions">
- <div ng-repeat="(name, options) in testDetails.arguments">
- <div ng-switch on="options.type">
-
- <div ng-switch-when="file">
- <label>{{name}}</label>
- <input type="file" name="{{name}}">
- </div>
-
- <div ng-switch-default>
- <label>{{name}}</label>
- <input ng-model="testDetails.arguments[name].value" type="{{options.type}}"
- value="{{options.default}}">
- </div>
-
- </div>
- </div>
- </form>
- <button class="btn btn-primary" ng-click="startTest()">Start Test</button>
- </div>
- </div>
-</div>
-
-<div class="row">
- <div class="span8">
- <h3>Test results</h3>
- <div class="testResult" ng-repeat="result in testDetails.results">
- <h4>{{result.name}}</h4>
- <pre>{{result.content}}</pre>
- </div>
- </div>
-</div>
diff --git a/data/ui/app/views/test-status.html b/data/ui/app/views/test-status.html
deleted file mode 100644
index a457119..0000000
--- a/data/ui/app/views/test-status.html
+++ /dev/null
@@ -1,7 +0,0 @@
-<h2>Test Status</h2>
-<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
-tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
-quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
-consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
-cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
-proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
diff --git a/data/ui/app/views/test.html b/data/ui/app/views/test.html
new file mode 100644
index 0000000..f92461a
--- /dev/null
+++ b/data/ui/app/views/test.html
@@ -0,0 +1,42 @@
+<div class="row">
+ <div class="span8">
+ <h2>{{testDetails.name}}</h2>
+ <div class="netTest" ng-controller="TestBoxCtrl">
+ version: <span class="badge badge-success">{{testDetails.version}}</span>
+ <p>{{testDetails.description}}</p>
+ <form name="testOptions">
+ <div ng-repeat="(name, options) in testDetails.arguments">
+ <div ng-switch on="options.type">
+
+ <div ng-switch-when="file">
+ <label>{{name}}</label>
+ <select ng-model="testDetails.arguments[name].value">
+ <option ng-repeat="input in inputs" value="input.filename">{{input.filename}}</option>
+ </select>
+ </div>
+
+ <div ng-switch-default>
+ <label>{{name}}</label>
+ <input ng-model="testDetails.arguments[name].value" type="{{options.type}}"
+ value="{{options.default}}">
+ </div>
+
+ </div>
+ </div>
+ </form>
+ <button class="btn btn-primary" ng-click="startTest()">Start Test</button>
+ </div>
+ </div>
+</div>
+
+<div class="row">
+ <div class="span8">
+ <h3>Test results</h3>
+ <button class="btn" ng-click="updateTestStatus()">
+ <i class="icon-refresh"></i>Reload</button>
+ <div ng-repeat="result in testDetails.results">
+ <h4>{{result.name}}</h4>
+ <pre class="testResult">{{result.content}}</pre>
+ </div>
+ </div>
+</div>
diff --git a/ooni/api/spec.py b/ooni/api/spec.py
index ec4cf4b..39df2fd 100644
--- a/ooni/api/spec.py
+++ b/ooni/api/spec.py
@@ -48,14 +48,15 @@ class Inputs(ORequestHandler):
self.write(input_list)
def post(self):
- filename = self.get_argument("fullname", None)
+ input_file = self.request.files.get("file")[0]
+ filename = input_file['filename']
+
if not filename or not re.match('(\w.*\.\w.*).*', filename):
raise InvalidInputFilename
if os.path.exists(filename):
raise FilenameExists
- input_file = self.request.files.get("input_file")
content_type = input_file["content_type"]
body = input_file["body"]
@@ -120,6 +121,7 @@ def get_test_results(test_id):
test_content = ''.join(f.readlines())
test_results.append({'name': test_result,
'content': test_content})
+ test_results.reverse()
return test_results
class TestStatus(ORequestHandler):
1
0

26 Jun '13
commit 3b889359329f6a90fd6d5538f6bf4e417cdf9b00
Author: Arturo Filastò <art(a)fuffa.org>
Date: Mon Apr 29 13:17:37 2013 +0200
Improve setup.py, fixup paths for reporting.
---
data/ooniprobe.conf.sample | 13 +++++-----
decks/before_i_commit.testdeck | 8 +++---
ooni/__init__.py | 2 +-
ooni/api/spec.py | 55 +++++++++++++++++++++++++++++++++++++---
ooni/nettest.py | 8 ++++--
ooni/reporter.py | 6 +++++
setup.py | 17 ++++++++++---
7 files changed, 89 insertions(+), 20 deletions(-)
diff --git a/data/ooniprobe.conf.sample b/data/ooniprobe.conf.sample
index 5528199..2b79d3e 100644
--- a/data/ooniprobe.conf.sample
+++ b/data/ooniprobe.conf.sample
@@ -4,7 +4,7 @@
basic:
# Where OONIProbe should be writing it's log file
- logfile: ooniprobe.log
+ logfile: /var/log/ooniprobe.log
privacy:
# Should we include the IP address of the probe in the report?
includeip: false
@@ -18,14 +18,13 @@ privacy:
includepcap: false
reports:
# This is a packet capture file (.pcap) to load as a test:
- pcap: Null
+ pcap: null
+ collector: 'httpo://nkvphnp3p6agi5qq.onion'
advanced:
- # XXX change this to point to the directory where you have stored the GeoIP
- # database file. This should be the directory in which OONI is installed
- # /path/to/ooni-probe/data/
- geoip_data_dir: /usr/share/GeoIP/
+ geoip_data_dir: /usr/share/ooni/
debug: true
- tor_binary: /usr/sbin/tor
+ # enable if auto detection fails
+ #tor_binary: /usr/sbin/tor
# For auto detection
interface: auto
# Of specify a specific interface
diff --git a/decks/before_i_commit.testdeck b/decks/before_i_commit.testdeck
index d1b4062..1159a0d 100644
--- a/decks/before_i_commit.testdeck
+++ b/decks/before_i_commit.testdeck
@@ -5,7 +5,7 @@
pcapfile: null
reportfile: reports/captive_portal_test.yamloo
subargs: []
- test_file: nettests/manipulation/captiveportal.py
+ test_file: data/nettests/manipulation/captiveportal.py
- options:
collector: null
help: 0
@@ -13,7 +13,7 @@
pcapfile: null
reportfile: reports/dns_tamper_test.yamloo
subargs: [-T, example_inputs/dns_tamper_test_resolvers.txt, -f, example_inputs/dns_tamper_file.txt]
- test_file: nettests/blocking/dnsconsistency.py
+ test_file: data/nettests/blocking/dnsconsistency.py
- options:
collector: null
help: 0
@@ -21,7 +21,7 @@
pcapfile: null
reportfile: reports/http_host.yamloo
subargs: [-b, 'http://93.95.227.200', -f, example_inputs/http_host_file.txt]
- test_file: nettests/manipulation/http_host.py
+ test_file: data/nettests/manipulation/http_host.py
- options:
collector: null
help: 0
@@ -29,4 +29,4 @@
pcapfile: null
reportfile: reports/header_field_manipulation.yamloo
subargs: [-b, 'http://93.95.227.200']
- test_file: nettests/manipulation/http_header_field_manipulation.py
+ test_file: data/nettests/manipulation/http_header_field_manipulation.py
diff --git a/ooni/__init__.py b/ooni/__init__.py
index 815e16e..1810a7f 100644
--- a/ooni/__init__.py
+++ b/ooni/__init__.py
@@ -7,7 +7,7 @@ from . import templates
from . import utils
__author__ = "Arturo Filastò"
-__version__ = "0.0.11"
+__version__ = "0.0.12"
__all__ = ['config', 'inputunit', 'kit',
'lib', 'nettest', 'oonicli', 'reporter',
diff --git a/ooni/api/spec.py b/ooni/api/spec.py
index 39df2fd..071beaf 100644
--- a/ooni/api/spec.py
+++ b/ooni/api/spec.py
@@ -7,7 +7,7 @@ import types
from twisted.python import usage
from cyclone import web, escape
-from ooni.reporter import YAMLReporter, OONIBReporter
+from ooni.reporter import YAMLReporter, OONIBReporter, collector_supported
from ooni import errors
from ooni.nettest import NetTestLoader, MissingRequiredOption
from ooni.settings import config
@@ -43,11 +43,21 @@ def list_inputs():
return input_list
class Inputs(ORequestHandler):
+ """
+ This handler is responsible for listing and adding new inputs.
+ """
def get(self):
+ """
+ Obtain the list of currently installed inputs. Inputs are stored inside
+ of $OONI_HOME/inputs/.
+ """
input_list = list_inputs()
self.write(input_list)
def post(self):
+ """
+ Add a new input to the currently installed inputs.
+ """
input_file = self.request.files.get("file")[0]
filename = input_file['filename']
@@ -72,6 +82,15 @@ class ListTests(ORequestHandler):
self.write(test_list)
def get_net_test_loader(test_options, test_file):
+ """
+ Args:
+ test_options: (dict) containing as keys the option names.
+
+ test_file: (string) the path to the test_file to be run.
+ Returns:
+ an instance of :class:`ooni.nettest.NetTestLoader` with the specified
+ test_file and the specified options.
+ """
options = []
for k, v in test_options.items():
options.append('--'+k)
@@ -82,10 +101,26 @@ def get_net_test_loader(test_options, test_file):
return net_test_loader
def get_reporters(net_test_loader):
+ """
+ Determines which reports are able to run and returns an instance of them.
+
+ We always report to flat file via the :class:`ooni.reporters.YAMLReporter`
+ and the :class:`ooni.reporters.OONIBReporter`.
+
+ The later will be used only if we determine that Tor is running.
+
+ Returns:
+ a list of reporter instances
+ """
test_details = net_test_loader.testDetails
+ reporters = []
yaml_reporter = YAMLReporter(test_details, config.reports_directory)
- #oonib_reporter = OONIBReporter(test_details, collector)
- return [yaml_reporter]
+ reporters.append(yaml_reporter)
+
+ if config.reports.collector and collector_supported(config.reports.collector):
+ oonib_reporter = OONIBReporter(test_details, collector)
+ reporters.append(oonib_reporter)
+ return reporters
class StartTest(ORequestHandler):
def post(self, test_name):
@@ -114,6 +149,16 @@ class StopTest(ORequestHandler):
pass
def get_test_results(test_id):
+ """
+ Returns:
+ a list of test dicts that correspond to the test results for the given
+ test_id.
+ The dict is made like so:
+ {
+ 'name': The name of the report,
+ 'content': The content of the report
+ }
+ """
test_results = []
for test_result in os.listdir(config.reports_directory):
if test_result.startswith('report-'+test_id):
@@ -126,6 +171,10 @@ def get_test_results(test_id):
class TestStatus(ORequestHandler):
def get(self, test_id):
+ """
+ Returns the requested test_id details and the stored results for such
+ test.
+ """
try:
test = copy.deepcopy(oonidApplication.director.netTests[test_id])
test.pop('path')
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 964bee3..66639f8 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -238,6 +238,10 @@ class NetTestLoader(object):
if not hasattr(usage_options, 'optParameters'):
usage_options.optParameters = []
+ else:
+ for parameter in usage_options.optParameters:
+ if len(parameter) == 5:
+ parameter.pop()
if klass.inputFile:
usage_options.optParameters.append(klass.inputFile)
@@ -267,8 +271,8 @@ class NetTestLoader(object):
def loadNetTestString(self, net_test_string):
"""
Load NetTest from a string.
- WARNING input to this function *MUST* be sanitized and *NEVER* be
- untrusted.
+ WARNING input to this function *MUST* be sanitized and *NEVER* take
+ untrusted input.
Failure to do so will result in code exec.
net_test_string:
diff --git a/ooni/reporter.py b/ooni/reporter.py
index 109eccf..1095cb1 100644
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@ -227,6 +227,12 @@ class YAMLReporter(OReporter):
def finish(self):
self._stream.close()
+def collector_supported(collector_address):
+ if collector_address.startswith('httpo') \
+ and (not (config.tor_state or config.tor.socks_port)):
+ return False
+ return True
+
class OONIBReporter(OReporter):
def __init__(self, test_details, collector_address):
self.collectorAddress = collector_address
diff --git a/setup.py b/setup.py
index 41b9050..454984d 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
#-*- coding: utf-8 -*-
-from setuptools import setup
+import os
+import sys
+from distutils.core import setup
install_requires = [
'txsocksx>=0.0.2',
@@ -16,6 +18,14 @@ dependency_links = [
'https://github.com/hellais/pypcap/archive/v1.1.1.tar.gz#egg=pypcap-1.1.1'
]
+files = []
+for root, dirs, file_names in os.walk('data/'):
+ for file_name in file_names:
+ if not file_name.endswith('.pyc'):
+ files.append(os.path.join(root, file_name))
+
+data_files = [('/usr/share/ooni/', files)]
+
with open('requirements.txt') as f:
for line in f:
if line.startswith("#") or line.startswith('http'):
@@ -24,12 +34,13 @@ with open('requirements.txt') as f:
setup(
name="ooni-probe",
- version="0.0.11",
+ version="0.0.12",
author="Arturo Filastò",
author_email = "art(a)torproject.org",
url="https://ooni.torproject.org/",
package_dir={'ooni': 'ooni'},
- packages=['ooni', 'ooni.templates', 'ooni.utils'],
+ data_files=data_files,
+ packages=['ooni', 'ooni.api', 'ooni.templates', 'ooni.tests', 'ooni.utils'],
scripts=["bin/ooniprobe"],
dependency_links=dependency_links,
install_requires=install_requires,
1
0
commit c7f65ae7df71c469f9cf5db126f12bf1c11c49da
Author: Arturo Filastò <art(a)fuffa.org>
Date: Tue Apr 23 16:59:24 2013 +0200
Improve configuration management
* Install ooniprobe settings in the users home directory (~/.ooni)
* Define the paths for ooniprobe data directories
---
ooni/__init__.py | 2 -
ooni/api/spec.py | 2 +-
ooni/config.py | 98 -------------------------------------------
ooni/director.py | 15 ++++---
ooni/geoip.py | 3 +-
ooni/managers.py | 18 +++++---
ooni/nettest.py | 2 +-
ooni/oonicli.py | 11 ++++-
ooni/oonid.py | 2 +-
ooni/reporter.py | 2 +-
ooni/settings.py | 92 ++++++++++++++++++++++++++++++++++++++++
ooni/tasks.py | 10 ++---
ooni/templates/httpt.py | 2 +-
ooni/templates/scapyt.py | 2 +-
ooni/tests/mocks.py | 2 +-
ooni/tests/test_managers.py | 8 ++++
ooni/tests/test_nettest.py | 5 ++-
ooni/utils/geodata.py | 2 +-
ooni/utils/log.py | 2 +-
ooni/utils/txscapy.py | 2 +-
20 files changed, 149 insertions(+), 133 deletions(-)
diff --git a/ooni/__init__.py b/ooni/__init__.py
index cc4bf03..815e16e 100644
--- a/ooni/__init__.py
+++ b/ooni/__init__.py
@@ -1,7 +1,5 @@
# -*- encoding: utf-8 -*-
-from . import config
-from . import kit
from . import nettest
from . import oonicli
from . import reporter
diff --git a/ooni/api/spec.py b/ooni/api/spec.py
index af238f4..5b538b2 100644
--- a/ooni/api/spec.py
+++ b/ooni/api/spec.py
@@ -5,7 +5,7 @@ import types
from cyclone import web, escape
-from ooni import config
+from ooni.settings import config
class InvalidInputFilename(Exception):
pass
diff --git a/ooni/config.py b/ooni/config.py
deleted file mode 100644
index 5aeb49d..0000000
--- a/ooni/config.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import os
-import yaml
-
-from twisted.internet import reactor, threads, defer
-
-from ooni import otime
-from ooni.utils import Storage
-
-class TestFilenameNotSet(Exception):
- pass
-
-def get_root_path():
- this_directory = os.path.dirname(__file__)
- root = os.path.join(this_directory, '..')
- root = os.path.abspath(root)
- return root
-
-def createConfigFile():
- """
- XXX implement me
- """
- sample_config_file = os.path.join(get_root_path(), 'ooniprobe.conf.sample')
-
-def generatePcapFilename():
- if cmd_line_options['pcapfile']:
- reports.pcap = cmd_line_options['pcapfile']
- else:
- if cmd_line_options['test']:
- test_filename = os.path.basename(cmd_line_options['test'])
- else:
- test_filename = os.path.basename(cmd_line_options['testdeck'])
-
- test_name = '.'.join(test_filename.split(".")[:-1])
- frm_str = "report_%s_"+otime.timestamp()+".%s"
- reports.pcap = frm_str % (test_name, "pcap")
-
-class ConfigurationSetting(Storage):
- def __init__(self, key):
- config_file = os.path.join(get_root_path(), 'ooniprobe.conf')
- try:
- f = open(config_file)
- except IOError:
- createConfigFile()
- raise Exception("Unable to open config file. "\
- "Copy ooniprobe.conf.sample to ooniprobe.conf")
-
- config_file_contents = '\n'.join(f.readlines())
- configuration = yaml.safe_load(config_file_contents)
-
- try:
- for k, v in configuration[key].items():
- self[k] = v
- except AttributeError:
- pass
-
-basic = ConfigurationSetting('basic')
-advanced = ConfigurationSetting('advanced')
-privacy = ConfigurationSetting('privacy')
-tor = ConfigurationSetting('tor')
-
-data_directory = os.path.join(get_root_path(), 'data')
-nettest_directory = os.path.join(get_root_path(), 'nettests')
-inputs_directory = os.path.join(get_root_path(), 'inputs')
-
-reports = Storage()
-state = Storage()
-scapyFactory = None
-stateDict = None
-
-# XXX refactor this to use a database
-resume_lock = defer.DeferredLock()
-
-cmd_line_options = None
-resume_filename = None
-
-# XXX-Twisted this is used to check if we have started the reactor or not. It
-# is necessary because if the tests are already concluded because we have
-# resumed a test session then it will call reactor.run() even though there is
-# no condition that will ever stop it.
-# There should be a more twisted way of doing this.
-start_reactor = True
-tor_state = None
-tor_control = None
-config_file = None
-sample_config_file = None
-# This is used to store the probes IP address obtained via Tor
-probe_ip = None
-# This is used to keep track of the state of the sniffer
-sniffer_running = None
-
-logging = True
-
-if not resume_filename:
- resume_filename = os.path.join(get_root_path(), 'ooniprobe.resume')
- try:
- with open(resume_filename) as f: pass
- except IOError as e:
- with open(resume_filename, 'w+') as f: pass
diff --git a/ooni/director.py b/ooni/director.py
index 5f23668..1ef878c 100644
--- a/ooni/director.py
+++ b/ooni/director.py
@@ -3,13 +3,13 @@ import sys
import os
import re
-from ooni import config
from ooni import geoip
from ooni.managers import ReportEntryManager, MeasurementManager
from ooni.reporter import Report
from ooni.utils import log, checkForRoot
from ooni.utils.net import randomFreePort
-from ooni.nettest import NetTest
+from ooni.nettest import NetTest, getNetTestInformation
+from ooni.settings import config
from ooni import errors
from txtorcon import TorConfig
@@ -85,6 +85,10 @@ class Director(object):
self.torControlProtocol = None
+ # This deferred is fired once all the measurements and their reporting
+ # tasks are completed.
+ self.allTestsDone = defer.Deferred()
+
def getNetTests(self):
nettests = {}
def is_nettest(filename):
@@ -108,16 +112,12 @@ class Director(object):
return nettests
- # This deferred is fired once all the measurements and their reporting
- # tasks are completed.
- self.allTestsDone = defer.Deferred()
-
@defer.inlineCallbacks
def start(self):
if config.privacy.includepcap:
log.msg("Starting")
if not config.reports.pcap:
- config.reports.pcap = config.generatePcapFilename()
+ config.generate_pcap_filename()
self.startSniffing()
if config.advanced.start_tor:
@@ -324,7 +324,6 @@ class Director(object):
log.debug("Setting SOCKS port as %s" % tor_config.SocksPort)
d = launch_tor(tor_config, reactor,
- tor_binary=config.advanced.tor_binary,
progress_updates=updates)
d.addCallback(setup_complete)
d.addErrback(setup_failed)
diff --git a/ooni/geoip.py b/ooni/geoip.py
index 1e6f6e9..c1987fc 100644
--- a/ooni/geoip.py
+++ b/ooni/geoip.py
@@ -7,7 +7,8 @@ from ooni.utils.net import userAgents, BodyReceiver
from twisted.internet import reactor, defer, protocol
from ooni.utils import log, net, checkForRoot
-from ooni import config, errors
+from ooni.settings import config
+from ooni import errors
try:
from pygeoip import GeoIP
diff --git a/ooni/managers.py b/ooni/managers.py
index 9f8366f..ff7c2f2 100644
--- a/ooni/managers.py
+++ b/ooni/managers.py
@@ -2,7 +2,7 @@ import itertools
from twisted.internet import defer
from ooni.utils import log
-from ooni import config
+from ooni.settings import config
def makeIterable(item):
"""
@@ -141,8 +141,12 @@ class MeasurementManager(TaskManager):
NetTest on the contrary is aware of the typology of measurements that it is
dispatching as they are logically grouped by test file.
"""
- retries = config.advanced.measurement_retries
- concurrency = config.advanced.measurement_concurrency
+ def __init__(self):
+ if config.advanced.measurement_retries:
+ self.retries = config.advanced.measurement_retries
+ if config.advanced.measurement_concurrency:
+ self.concurrency = config.advanced.measurement_concurrency
+ super(MeasurementManager, self).__init__()
def succeeded(self, result, measurement):
log.debug("Successfully performed measurement %s" % measurement)
@@ -152,8 +156,12 @@ class MeasurementManager(TaskManager):
pass
class ReportEntryManager(TaskManager):
- retries = config.advanced.reporting_retries
- concurrency = config.advanced.reporting_concurrency
+ def __init__(self):
+ if config.advanced.reporting_retries:
+ self.retries = config.advanced.reporting_retries
+ if config.advanced.reporting_concurrency:
+ self.concurrency = config.advanced.reporting_concurrency
+ super(ReportEntryManager, self).__init__()
def succeeded(self, result, task):
log.debug("Successfully performed report %s" % task)
diff --git a/ooni/nettest.py b/ooni/nettest.py
index d9bc94d..0d0e889 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -8,8 +8,8 @@ from twisted.python import usage, reflect
from ooni import geoip
from ooni.tasks import Measurement
from ooni.utils import log, checkForRoot, geodata
-from ooni import config
from ooni import otime
+from ooni.settings import config
from ooni import errors as e
diff --git a/ooni/oonicli.py b/ooni/oonicli.py
index b5e8e27..40453b1 100644
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@ -12,7 +12,7 @@ from twisted.python.util import spewer
from ooni import errors
-from ooni import config
+from ooni.settings import config
from ooni.director import Director
from ooni.reporter import YAMLReporter, OONIBReporter
from ooni.nettest import NetTestLoader, MissingRequiredOption
@@ -40,6 +40,10 @@ class Options(usage.Options):
["logfile", "l", None, "log file name"],
["pcapfile", "O", None, "pcap file name"],
["parallelism", "p", "10", "input parallelism"],
+ ["configfile", "f", None,
+ "Specify a path to the ooniprobe configuration file"],
+ ["datadir", "d", None,
+ "Specify a path to the ooniprobe data directory"]
]
compData = usage.Completions(
@@ -100,8 +104,11 @@ def runWithDirector():
test!
"""
global_options = parseOptions()
- log.start(global_options['logfile'])
+ config.global_options = global_options
+ config.set_paths()
+ config.read_config_file()
+ log.start(global_options['logfile'])
# contains (test_cases, options, cmd_line_options)
test_list = []
if global_options['no-collector']:
diff --git a/ooni/oonid.py b/ooni/oonid.py
index dde768e..cc71d47 100644
--- a/ooni/oonid.py
+++ b/ooni/oonid.py
@@ -4,7 +4,7 @@ import random
from twisted.application import service, internet
from twisted.web import static, server
-from ooni import config
+from ooni.settings import config
from ooni.api.spec import oonidApplication
from ooni.director import Director
from ooni.reporter import YAMLReporter, OONIBReporter
diff --git a/ooni/reporter.py b/ooni/reporter.py
index b04b46b..109eccf 100644
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@ -32,7 +32,7 @@ from ooni import otime
from ooni.utils import geodata, pushFilenameStack
from ooni.utils.net import BodyReceiver, StringProducer, userAgents
-from ooni import config
+from ooni.settings import config
from ooni.tasks import ReportEntry, TaskTimedOut
diff --git a/ooni/settings.py b/ooni/settings.py
new file mode 100644
index 0000000..acb7502
--- /dev/null
+++ b/ooni/settings.py
@@ -0,0 +1,92 @@
+import os
+import yaml
+from shutil import copyfile
+from os.path import abspath, expanduser
+
+from twisted.internet import reactor, threads, defer
+
+from ooni import otime
+from ooni.utils import Storage
+
+class OConfig(object):
+ def __init__(self):
+ self.global_options = {}
+ self.reports = Storage()
+ self.scapyFactory = None
+ self.tor_state = None
+ # This is used to store the probes IP address obtained via Tor
+ self.probe_ip = None
+ # This is used to keep track of the state of the sniffer
+ self.sniffer_running = None
+ self.logging = True
+ self.basic = Storage()
+ self.advanced = Storage()
+ self.tor = Storage()
+ self.privacy = Storage()
+ self.set_paths()
+ self.initialize_ooni_home()
+
+ def set_paths(self):
+ if self.global_options.get('datadir'):
+ self.data_directory = abspath(expanduser(self.global_options['datadir']))
+ else:
+ self.data_directory = '/usr/share/ooni/'
+ self.nettest_directory = os.path.join(self.data_directory, 'nettests')
+
+ self.ooni_home = os.path.join(expanduser('~'), '.ooni')
+ self.inputs_directory = os.path.join(self.ooni_home, 'inputs')
+
+ if self.global_options.get('configfile'):
+ config_file = global_options['configfile']
+ else:
+ config_file = os.path.join('~', '.ooni', 'ooniprobe.conf')
+ self.config_file = expanduser(config_file)
+
+ def initialize_ooni_home(self):
+ if not os.path.isdir(self.ooni_home):
+ print "Ooni home directory does not exist."
+ print "Creating it in '%s'." % self.ooni_home
+ os.mkdir(self.ooni_home)
+ os.mkdir(self.inputs_directory)
+
+ def _create_config_file(self):
+ sample_config_file = os.path.join(self.data_directory,
+ 'ooniprobe.conf.sample')
+ target_config_file = os.path.join(self.ooni_home,
+ 'ooniprobe.conf')
+ print "Creating it for you in '%s'." % target_config_file
+ copyfile(sample_config_file, target_config_file)
+
+ def read_config_file(self):
+ try:
+ with open(self.config_file) as f: pass
+ except IOError:
+ print "Configuration file does not exist."
+ self._create_config_file()
+ self.read_config_file()
+
+ with open(self.config_file) as f:
+ config_file_contents = '\n'.join(f.readlines())
+ configuration = yaml.safe_load(config_file_contents)
+
+ for setting in ['basic', 'advanced', 'privacy', 'tor']:
+ try:
+ for k, v in configuration[setting].items():
+ getattr(self, setting)[k] = v
+ except AttributeError:
+ pass
+
+ def generate_pcap_filename():
+ if self.global_options.get('pcapfile'):
+ self.reports.pcap = self.global_options['pcapfile']
+ else:
+ if self.global_options.get('test'):
+ test_filename = os.path.basename(self.global_options['test'])
+ else:
+ test_filename = os.path.basename(self.global_options['testdeck'])
+
+ test_name = '.'.join(test_filename.split(".")[:-1])
+ frm_str = "report_%s_"+otime.timestamp()+".%s"
+ self.reports.pcap = frm_str % (test_name, "pcap")
+
+config = OConfig()
diff --git a/ooni/tasks.py b/ooni/tasks.py
index 829d11e..f686a9c 100644
--- a/ooni/tasks.py
+++ b/ooni/tasks.py
@@ -1,6 +1,6 @@
import time
-from ooni import config
+from ooni.settings import config
from twisted.internet import defer, reactor
class BaseTask(object):
@@ -91,8 +91,6 @@ class TaskWithTimeout(BaseTask):
return BaseTask.start(self)
class Measurement(TaskWithTimeout):
- timeout = config.advanced.measurement_timeout
-
def __init__(self, test_class, test_method, test_input):
"""
test_class:
@@ -117,6 +115,8 @@ class Measurement(TaskWithTimeout):
self.netTestMethod = getattr(self.testInstance, test_method)
+ if config.advanced.measurement_timeout:
+ self.timeout = config.advanced.measurement_timeout
TaskWithTimeout.__init__(self)
def succeeded(self, result):
@@ -130,12 +130,12 @@ class Measurement(TaskWithTimeout):
return d
class ReportEntry(TaskWithTimeout):
- timeout = config.advanced.reporting_timeout
-
def __init__(self, reporter, measurement):
self.reporter = reporter
self.measurement = measurement
+ if config.advanced.reporting_timeout:
+ self.timeout = config.advanced.reporting_timeout
TaskWithTimeout.__init__(self)
def run(self):
diff --git a/ooni/templates/httpt.py b/ooni/templates/httpt.py
index e8891c7..0bca5df 100644
--- a/ooni/templates/httpt.py
+++ b/ooni/templates/httpt.py
@@ -13,7 +13,7 @@ from twisted.web._newclient import Request, Response, ResponseNeverReceived
from ooni.nettest import NetTestCase
from ooni.utils import log
-from ooni import config
+from ooni.settings import config
from ooni.utils.net import BodyReceiver, StringProducer, userAgents
diff --git a/ooni/templates/scapyt.py b/ooni/templates/scapyt.py
index d5d6564..fdc5a24 100644
--- a/ooni/templates/scapyt.py
+++ b/ooni/templates/scapyt.py
@@ -8,7 +8,7 @@ from scapy.all import send, sr, IP, TCP, config
from ooni.reporter import createPacketReport
from ooni.nettest import NetTestCase
from ooni.utils import log
-from ooni import config
+from ooni.settings import config
from ooni.utils.txscapy import ScapySender, getDefaultIface, ScapyFactory
from ooni.utils.txscapy import hasRawSocketPermission
diff --git a/ooni/tests/mocks.py b/ooni/tests/mocks.py
index 4c4a015..f849344 100644
--- a/ooni/tests/mocks.py
+++ b/ooni/tests/mocks.py
@@ -1,7 +1,7 @@
from twisted.python import failure
from twisted.internet import defer
-from ooni import config
+from ooni.settings import config
from ooni.tasks import BaseTask, TaskWithTimeout
from ooni.nettest import NetTest
from ooni.managers import TaskManager
diff --git a/ooni/tests/test_managers.py b/ooni/tests/test_managers.py
index e2af7b3..c290155 100644
--- a/ooni/tests/test_managers.py
+++ b/ooni/tests/test_managers.py
@@ -1,3 +1,5 @@
+import os
+
from twisted.trial import unittest
from twisted.python import failure
from twisted.internet import defer, task
@@ -11,6 +13,8 @@ from ooni.tests.mocks import MockTimeoutOnceTask, MockFailTaskWithTimeout
from ooni.tests.mocks import MockTaskManager, mockFailure, MockDirector
from ooni.tests.mocks import MockNetTest, MockMeasurement, MockSuccessMeasurement
from ooni.tests.mocks import MockFailMeasurement, MockFailOnceMeasurement
+from ooni.settings import config
+
class TestTaskManager(unittest.TestCase):
timeout = 1
@@ -22,6 +26,10 @@ class TestTaskManager(unittest.TestCase):
self.measurementManager.start()
self.clock = task.Clock()
+ data_dir = os.path.dirname(os.path.abspath(__file__))
+ data_dir = os.path.join(data_dir, '..', '..', 'data')
+ config.global_options['datadir'] = data_dir
+ config.set_paths()
def schedule_successful_tasks(self, task_type, number=1):
all_done = []
diff --git a/ooni/tests/test_nettest.py b/ooni/tests/test_nettest.py
index 3b59cc4..77b8a1c 100644
--- a/ooni/tests/test_nettest.py
+++ b/ooni/tests/test_nettest.py
@@ -11,7 +11,6 @@ from ooni.nettest import NetTestLoader, FailureToLoadNetTest
from ooni.tasks import BaseTask
from ooni.director import Director
-
from ooni.managers import TaskManager
from ooni.tests.mocks import MockMeasurement, MockMeasurementFailOnce
@@ -100,6 +99,9 @@ class TestNetTest(unittest.TestCase):
for i in range(10):
f.write("%s\n" % i)
+ from ooni.settings import config
+ config.read_config_file()
+
def assertCallable(self, thing):
self.assertIn('__call__', dir(thing))
@@ -226,7 +228,6 @@ class TestNetTest(unittest.TestCase):
@d.addCallback
def complete(result):
- print "IN here y0"
self.assertEqual(result, None)
self.assertEqual(director.successfulMeasurements, 20)
diff --git a/ooni/utils/geodata.py b/ooni/utils/geodata.py
index 2acfdb0..c8a9a3a 100644
--- a/ooni/utils/geodata.py
+++ b/ooni/utils/geodata.py
@@ -5,8 +5,8 @@ from twisted.web.client import Agent
from twisted.internet import reactor, defer, protocol
from ooni.utils import log, net
-from ooni import config
from ooni.errors import GeoIPDataFilesNotFound
+from ooni.settings import config
try:
import pygeoip
diff --git a/ooni/utils/log.py b/ooni/utils/log.py
index 141116e..067d6a6 100644
--- a/ooni/utils/log.py
+++ b/ooni/utils/log.py
@@ -9,7 +9,7 @@ from twisted.python.failure import Failure
from twisted.python.logfile import DailyLogFile
from ooni import otime
-from ooni import config
+from ooni.settings import config
## Get rid of the annoying "No route found for
## IPv6 destination warnings":
diff --git a/ooni/utils/txscapy.py b/ooni/utils/txscapy.py
index 80dd1c2..e02de60 100644
--- a/ooni/utils/txscapy.py
+++ b/ooni/utils/txscapy.py
@@ -12,7 +12,7 @@ from zope.interface import implements
from scapy.config import conf
from ooni.utils import log
-from ooni import config
+from ooni.settings import config
class LibraryNotInstalledError(Exception):
pass
1
0

[ooni-probe/develop] Implement in-browser input file selection views
by isis@torproject.org 26 Jun '13
by isis@torproject.org 26 Jun '13
26 Jun '13
commit 13a65fd6adb798f6c2bdcc4584a5cff52e694d7c
Author: Arturo Filastò <art(a)fuffa.org>
Date: Mon Apr 29 18:54:01 2013 +0200
Implement in-browser input file selection views
This will allow the user to copy and paste the input file into a textarea.
---
data/ui/app/scripts/controllers.js | 28 ++++++++++++++++++++++++++++
data/ui/app/views/test.html | 12 +++++++++---
ooni/api/spec.py | 33 +++++++++++++++++++++++++++++++--
ooni/nettest.py | 5 ++++-
4 files changed, 72 insertions(+), 6 deletions(-)
diff --git a/data/ui/app/scripts/controllers.js b/data/ui/app/scripts/controllers.js
index 5ba24e1..c2a9c0d 100644
--- a/data/ui/app/scripts/controllers.js
+++ b/data/ui/app/scripts/controllers.js
@@ -49,7 +49,17 @@ ooniprobe.controller('TestCtrl', ['$scope', '$routeParams', 'testStatus', 'Input
ooniprobe.controller('TestBoxCtrl', ['$scope', 'startTest',
function($scope, startTest) {
+ function hasAttributes(obj) {
+ var count = 0;
+ for (var i in obj)
+ count +=1;
+ if ( count == 0 ) {
+ return false;
+ }
+ return true;
+ }
+ $scope.manualFileInput = {};
$scope.startTest = function() {
var options = {};
@@ -58,6 +68,13 @@ ooniprobe.controller('TestBoxCtrl', ['$scope', 'startTest',
options[key] = option.value;
});
+ if (hasAttributes($scope.manualFileInput)) {
+ options['manual_input'] = {};
+ angular.forEach($scope.manualFileInput, function(value, key) {
+ options['manual_input'][key] = value;
+ });
+ }
+
startTest($scope.testDetails.id, options).success(function(){
$scope.updateTestStatus();
});
@@ -65,4 +82,15 @@ ooniprobe.controller('TestBoxCtrl', ['$scope', 'startTest',
}]);
+ooniprobe.controller('FileInput', ['$scope',
+ function($scope) {
+
+ $scope.manualShow = false;
+ $scope.toggleManualInput = function() {
+ if ($scope.manualShow)
+ $scope.manualShow = false;
+ else
+ $scope.manualShow = true;
+ }
+}]);
diff --git a/data/ui/app/views/test.html b/data/ui/app/views/test.html
index f92461a..fdd5931 100644
--- a/data/ui/app/views/test.html
+++ b/data/ui/app/views/test.html
@@ -8,11 +8,17 @@
<div ng-repeat="(name, options) in testDetails.arguments">
<div ng-switch on="options.type">
- <div ng-switch-when="file">
- <label>{{name}}</label>
- <select ng-model="testDetails.arguments[name].value">
+ <div ng-switch-when="file" ng-controller="FileInput">
+ <label>{{name}}
+ <button class="btn btn-small" ng-click="toggleManualInput()">Toggle manual input</button>
+ </label>
+ <select ng-model="testDetails.arguments[name].value" ng-hide="manualShow">
<option ng-repeat="input in inputs" value="input.filename">{{input.filename}}</option>
</select>
+ <br/>
+ <div class="manualFileInput" ng-show="manualShow">
+ <textarea ng-model="manualFileInput[name]"></textarea>
+ </div>
</div>
<div ng-switch-default>
diff --git a/ooni/api/spec.py b/ooni/api/spec.py
index 071beaf..fe18ccc 100644
--- a/ooni/api/spec.py
+++ b/ooni/api/spec.py
@@ -3,6 +3,7 @@ import re
import copy
import json
import types
+import tempfile
from twisted.python import usage
from cyclone import web, escape
@@ -122,6 +123,20 @@ def get_reporters(net_test_loader):
reporters.append(oonib_reporter)
return reporters
+def write_temporary_input(content):
+ """
+ Creates a temporary file for the given content.
+
+ Returns:
+ the path to the temporary file.
+ """
+ fd, path = tempfile.mkstemp()
+ with open(path, 'w') as f:
+ f.write(content)
+ f.close()
+ print "This is the path %s" % path
+ return fd, path
+
class StartTest(ORequestHandler):
def post(self, test_name):
"""
@@ -129,11 +144,25 @@ class StartTest(ORequestHandler):
"""
test_file = oonidApplication.director.netTests[test_name]['path']
test_options = json.loads(self.request.body)
+ tmp_files = []
+ if (test_options['manual_input']):
+ for option, content in test_options['manual_input'].items():
+ fd, path = write_temporary_input(content)
+ test_options[option] = path
+ tmp_files.append((fd, path))
+ test_options.pop('manual_input')
+
net_test_loader = get_net_test_loader(test_options, test_file)
try:
net_test_loader.checkOptions()
- oonidApplication.director.startNetTest(net_test_loader,
- get_reporters(net_test_loader))
+ d = oonidApplication.director.startNetTest(net_test_loader,
+ get_reporters(net_test_loader))
+ @d.addBoth
+ def cleanup(result):
+ for fd, path in tmp_files:
+ os.close(fd)
+ os.remove(path)
+
except MissingRequiredOption, option_name:
self.write({'error':
'Missing required option: "%s"' % option_name})
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 66639f8..601fa24 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -132,8 +132,11 @@ def getArguments(test_class):
for opt_parameter in test_class.usageOptions.optParameters:
option_name = opt_parameter[0]
+ opt_type="text"
+ if opt_parameter[3].lower().startswith("file"):
+ opt_type="file"
arguments[option_name] = getOption(opt_parameter,
- test_class.requiredOptions)
+ test_class.requiredOptions, type=opt_type)
return arguments
1
0

26 Jun '13
commit 367e58128a9b7cb6c63897cfd24f6a246bcedd8a
Author: Arturo Filastò <art(a)fuffa.org>
Date: Tue Apr 23 16:58:59 2013 +0200
Move sample config file to data subdirectory
---
data/ooniprobe.conf.sample | 56 ++++++++++++++++++++++++++++++++++++++++++++
ooniprobe.conf.sample | 56 --------------------------------------------
2 files changed, 56 insertions(+), 56 deletions(-)
diff --git a/data/ooniprobe.conf.sample b/data/ooniprobe.conf.sample
new file mode 100644
index 0000000..5528199
--- /dev/null
+++ b/data/ooniprobe.conf.sample
@@ -0,0 +1,56 @@
+# This is the configuration file for OONIProbe
+# This file follows the YAML markup format: http://yaml.org/spec/1.2/spec.html
+# Keep in mind that indentation matters.
+
+basic:
+ # Where OONIProbe should be writing it's log file
+ logfile: ooniprobe.log
+privacy:
+ # Should we include the IP address of the probe in the report?
+ includeip: false
+ # Should we include the ASN of the probe in the report?
+ includeasn: false
+ # Should we include the country as reported by GeoIP in the report?
+ includecountry: false
+ # Should we include the city as reported by GeoIP in the report?
+ includecity: false
+ # Should we collect a full packet capture on the client?
+ includepcap: false
+reports:
+ # This is a packet capture file (.pcap) to load as a test:
+ pcap: Null
+advanced:
+ # XXX change this to point to the directory where you have stored the GeoIP
+ # database file. This should be the directory in which OONI is installed
+ # /path/to/ooni-probe/data/
+ geoip_data_dir: /usr/share/GeoIP/
+ debug: true
+ tor_binary: /usr/sbin/tor
+ # For auto detection
+ interface: auto
+ # Of specify a specific interface
+ #interface: wlan0
+ # If you do not specify start_tor, you will have to have Tor running and
+ # explicitly set the control port and SOCKS port
+ start_tor: true
+ # After how many seconds we should give up on a particular measurement
+ measurement_timeout: 30
+ # After how many retries we should give up on a measurement
+ measurement_retries: 2
+ # How many measurments to perform concurrently
+ measurement_concurrency: 100
+ # After how may seconds we should give up reporting
+ reporting_timeout: 30
+ # After how many retries to give up on reporting
+ reporting_retries: 3
+ # How many reports to perform concurrently
+ reporting_concurrency: 20
+tor:
+ #socks_port: 9050
+ #control_port: 9051
+ # Specify the absolute path to the Tor bridges to use for testing
+ #bridges: bridges.list
+ # Specify path of the tor datadirectory.
+ # This should be set to something to avoid having Tor download each time
+ # the descriptors and consensus data.
+ #data_dir: ~/.tor/
diff --git a/ooniprobe.conf.sample b/ooniprobe.conf.sample
deleted file mode 100644
index 27a4fb3..0000000
--- a/ooniprobe.conf.sample
+++ /dev/null
@@ -1,56 +0,0 @@
-# This is the configuration file for OONIProbe
-# This file follows the YAML markup format: http://yaml.org/spec/1.2/spec.html
-# Keep in mind that indentation matters.
-
-basic:
- # Where OONIProbe should be writing it's log file
- logfile: ooniprobe.log
-privacy:
- # Should we include the IP address of the probe in the report?
- includeip: false
- # Should we include the ASN of the probe in the report?
- includeasn: false
- # Should we include the country as reported by GeoIP in the report?
- includecountry: false
- # Should we include the city as reported by GeoIP in the report?
- includecity: false
- # Should we collect a full packet capture on the client?
- includepcap: false
-reports:
- # This is a packet capture file (.pcap) to load as a test:
- pcap: Null
-advanced:
- # XXX change this to point to the directory where you have stored the GeoIP
- # database file. This should be the directory in which OONI is installed
- # /path/to/ooni-probe/data/
- geoip_data_dir: /usr/share/GeoIP/
- debug: true
- tor_binary: '/usr/sbin/tor'
- # For auto detection
- interface: auto
- # Of specify a specific interface
- #interface: wlan0
- # If you do not specify start_tor, you will have to have Tor running and
- # explicitly set the control port and SOCKS port
- start_tor: true
- # After how many seconds we should give up on a particular measurement
- measurement_timeout: 30
- # After how many retries we should give up on a measurement
- measurement_retries: 2
- # How many measurments to perform concurrently
- measurement_concurrency: 100
- # After how may seconds we should give up reporting
- reporting_timeout: 30
- # After how many retries to give up on reporting
- reporting_retries: 3
- # How many reports to perform concurrently
- reporting_concurrency: 20
-tor:
- #socks_port: 9050
- #control_port: 9051
- # Specify the absolute path to the Tor bridges to use for testing
- bridges: bridges.list
- # Specify path of the tor datadirectory.
- # This should be set to something to avoid having Tor download each time
- # the descriptors and consensus data.
- data_dir: ~/.tor/
1
0

[ooni-probe/develop] Implement starting and stopping of tests via the HTTP API
by isis@torproject.org 26 Jun '13
by isis@torproject.org 26 Jun '13
26 Jun '13
commit 6f8ee232576dc3a983dcafcdd42a71c0c1cfc0c2
Author: Arturo Filastò <art(a)fuffa.org>
Date: Wed Apr 24 21:14:59 2013 +0200
Implement starting and stopping of tests via the HTTP API
---
ooni/api/spec.py | 66 +++++++++++++++++++++++++++++++++++++++++---
ooni/director.py | 7 ++---
ooni/nettest.py | 5 ++--
ooni/oonicli.py | 4 ++-
ooni/settings.py | 6 ++++
ooni/tests/test_nettest.py | 2 +-
6 files changed, 78 insertions(+), 12 deletions(-)
diff --git a/ooni/api/spec.py b/ooni/api/spec.py
index 5b538b2..ec4cf4b 100644
--- a/ooni/api/spec.py
+++ b/ooni/api/spec.py
@@ -1,10 +1,15 @@
import os
import re
+import copy
import json
import types
+from twisted.python import usage
from cyclone import web, escape
+from ooni.reporter import YAMLReporter, OONIBReporter
+from ooni import errors
+from ooni.nettest import NetTestLoader, MissingRequiredOption
from ooni.settings import config
class InvalidInputFilename(Exception):
@@ -39,6 +44,7 @@ def list_inputs():
class Inputs(ORequestHandler):
def get(self):
+ input_list = list_inputs()
self.write(input_list)
def post(self):
@@ -59,23 +65,75 @@ class Inputs(ORequestHandler):
class ListTests(ORequestHandler):
def get(self):
- self.write(oonidApplication.director.netTests)
+ test_list = copy.deepcopy(oonidApplication.director.netTests)
+ for test_id in test_list.keys():
+ test_list[test_id].pop('path')
+ self.write(test_list)
+
+def get_net_test_loader(test_options, test_file):
+ options = []
+ for k, v in test_options.items():
+ options.append('--'+k)
+ options.append(v)
+
+ net_test_loader = NetTestLoader(options,
+ test_file=test_file)
+ return net_test_loader
+
+def get_reporters(net_test_loader):
+ test_details = net_test_loader.testDetails
+ yaml_reporter = YAMLReporter(test_details, config.reports_directory)
+ #oonib_reporter = OONIBReporter(test_details, collector)
+ return [yaml_reporter]
class StartTest(ORequestHandler):
def post(self, test_name):
"""
Starts a test with the specified options.
"""
- json.decode(self.request.body)
+ test_file = oonidApplication.director.netTests[test_name]['path']
+ test_options = json.loads(self.request.body)
+ net_test_loader = get_net_test_loader(test_options, test_file)
+ try:
+ net_test_loader.checkOptions()
+ oonidApplication.director.startNetTest(net_test_loader,
+ get_reporters(net_test_loader))
+ except MissingRequiredOption, option_name:
+ self.write({'error':
+ 'Missing required option: "%s"' % option_name})
+ except usage.UsageError, e:
+ self.write({'error':
+ 'Error in parsing options'})
+ except errors.InsufficientPrivileges:
+ self.write({'error':
+ 'Insufficient priviledges'})
class StopTest(ORequestHandler):
def delete(self, test_name):
pass
+def get_test_results(test_id):
+ test_results = []
+ for test_result in os.listdir(config.reports_directory):
+ if test_result.startswith('report-'+test_id):
+ with open(os.path.join(config.reports_directory, test_result)) as f:
+ test_content = ''.join(f.readlines())
+ test_results.append({'name': test_result,
+ 'content': test_content})
+ return test_results
+
class TestStatus(ORequestHandler):
def get(self, test_id):
- pass
-
+ try:
+ test = copy.deepcopy(oonidApplication.director.netTests[test_id])
+ test.pop('path')
+ test['results'] = get_test_results(test_id)
+ self.write(test)
+ except KeyError:
+ self.write({'error':
+ 'Test with such ID not found!'})
+
+config.read_config_file()
oonidAPI = [
(r"/status", Status),
(r"/inputs", Inputs),
diff --git a/ooni/director.py b/ooni/director.py
index 1ef878c..bb02201 100644
--- a/ooni/director.py
+++ b/ooni/director.py
@@ -65,7 +65,6 @@ class Director(object):
def __init__(self):
self.activeNetTests = []
- self.netTests = self.getNetTests()
self.measurementManager = MeasurementManager()
self.measurementManager.director = self
@@ -114,6 +113,8 @@ class Director(object):
@defer.inlineCallbacks
def start(self):
+ self.netTests = self.getNetTests()
+
if config.privacy.includepcap:
log.msg("Starting")
if not config.reports.pcap:
@@ -207,15 +208,13 @@ class Director(object):
self.allTestsDone = defer.Deferred()
@defer.inlineCallbacks
- def startNetTest(self, _, net_test_loader, reporters):
+ def startNetTest(self, net_test_loader, reporters):
"""
Create the Report for the NetTest and start the report NetTest.
Args:
net_test_loader:
an instance of :class:ooni.nettest.NetTestLoader
-
- _: #XXX very dirty hack
"""
report = Report(reporters, self.reportEntryManager)
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 0d0e889..964bee3 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -115,7 +115,7 @@ def getOption(opt_parameter, required_options, type='text'):
required = False
return {'description': description,
- 'default': default, 'required': required,
+ 'value': default, 'required': required,
'type': type
}
@@ -157,7 +157,8 @@ def getNetTestInformation(net_test_file):
'name': test_class.name,
'description': test_class.description,
'version': test_class.version,
- 'arguments': getArguments(test_class)
+ 'arguments': getArguments(test_class),
+ 'path': net_test_file
}
return information
diff --git a/ooni/oonicli.py b/ooni/oonicli.py
index 40453b1..16c02e8 100644
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@ -178,7 +178,9 @@ def runWithDirector():
raise e
log.debug("adding callback for startNetTest")
- d.addCallback(director.startNetTest, net_test_loader, reporters)
+ @d.addCallback
+ def cb(res):
+ director.startNetTest(net_test_loader, reporters)
director.allTestsDone.addBoth(shutdown)
def start():
diff --git a/ooni/settings.py b/ooni/settings.py
index acb7502..846fc10 100644
--- a/ooni/settings.py
+++ b/ooni/settings.py
@@ -29,12 +29,15 @@ class OConfig(object):
def set_paths(self):
if self.global_options.get('datadir'):
self.data_directory = abspath(expanduser(self.global_options['datadir']))
+ elif self.advanced.get('data_dir'):
+ self.data_directory = self.advanced['data_dir']
else:
self.data_directory = '/usr/share/ooni/'
self.nettest_directory = os.path.join(self.data_directory, 'nettests')
self.ooni_home = os.path.join(expanduser('~'), '.ooni')
self.inputs_directory = os.path.join(self.ooni_home, 'inputs')
+ self.reports_directory = os.path.join(self.ooni_home, 'reports')
if self.global_options.get('configfile'):
config_file = global_options['configfile']
@@ -48,6 +51,8 @@ class OConfig(object):
print "Creating it in '%s'." % self.ooni_home
os.mkdir(self.ooni_home)
os.mkdir(self.inputs_directory)
+ if not os.path.isdir(self.reports_directory):
+ os.mkdir(self.reports_directory)
def _create_config_file(self):
sample_config_file = os.path.join(self.data_directory,
@@ -75,6 +80,7 @@ class OConfig(object):
getattr(self, setting)[k] = v
except AttributeError:
pass
+ self.set_paths()
def generate_pcap_filename():
if self.global_options.get('pcapfile'):
diff --git a/ooni/tests/test_nettest.py b/ooni/tests/test_nettest.py
index 77b8a1c..eb909d9 100644
--- a/ooni/tests/test_nettest.py
+++ b/ooni/tests/test_nettest.py
@@ -224,7 +224,7 @@ class TestNetTest(unittest.TestCase):
ntl.checkOptions()
director = Director()
- d = director.startNetTest('', ntl, [MockReporter()])
+ d = director.startNetTest(ntl, [MockReporter()])
@d.addCallback
def complete(result):
1
0
commit 7393a4e3eb57bcf3c14685b240e3ab5eebfe97d3
Author: Arturo Filastò <art(a)fuffa.org>
Date: Wed Mar 6 15:48:48 2013 +0100
Reorganization of code tree
* Move unittests into ooni directory
---
bin/ooniprobe | 4 +-
nettests/examples/example_myip.py | 4 +
nettests/tls-handshake.py | 32 -----
ooni/api/spec.py | 92 +++++++++++++
ooni/config.py | 134 ++++++++-----------
ooni/director.py | 29 +++-
ooni/nettest.py | 208 ++++++++++++++++++++--------
ooni/oonicli.py | 7 +-
ooni/oonid.py | 20 +++
ooni/reporter.py | 2 +-
ooni/runner.py | 241 ---------------------------------
ooni/tests/mocks.py | 172 ++++++++++++++++++++++++
ooni/tests/test-class-design.py | 101 ++++++++++++++
ooni/tests/test_director.py | 58 ++++++++
ooni/tests/test_dns.py | 24 ++++
ooni/tests/test_managers.py | 215 +++++++++++++++++++++++++++++
ooni/tests/test_mutate.py | 15 +++
ooni/tests/test_nettest.py | 268 +++++++++++++++++++++++++++++++++++++
ooni/tests/test_otime.py | 15 +++
ooni/tests/test_reporter.py | 238 ++++++++++++++++++++++++++++++++
ooni/tests/test_safe_represent.py | 14 ++
ooni/tests/test_trueheaders.py | 41 ++++++
ooni/tests/test_utils.py | 20 +++
ooni/utils/log.py | 8 +-
ooniprobe.conf.sample | 2 +
tests/mocks.py | 168 -----------------------
tests/test-class-design.py | 101 --------------
tests/test_director.py | 59 --------
tests/test_dns.py | 24 ----
tests/test_inputunit.py | 29 ----
tests/test_managers.py | 215 -----------------------------
tests/test_mutate.py | 15 ---
tests/test_nettest.py | 268 -------------------------------------
tests/test_otime.py | 15 ---
tests/test_reporter.py | 238 --------------------------------
tests/test_safe_represent.py | 14 --
tests/test_trueheaders.py | 41 ------
tests/test_utils.py | 20 ---
38 files changed, 1545 insertions(+), 1626 deletions(-)
diff --git a/bin/ooniprobe b/bin/ooniprobe
index 695b137..ba537ab 100755
--- a/bin/ooniprobe
+++ b/bin/ooniprobe
@@ -8,10 +8,10 @@ sys.path[:] = map(os.path.abspath, sys.path)
sys.path.insert(0, os.path.abspath(os.getcwd()))
# This is a hack to overcome a bug in python
-from ooni.utils.hacks import patched_reduce_ex
+from ooniprobe.utils.hacks import patched_reduce_ex
copy_reg._reduce_ex = patched_reduce_ex
# from ooni.oonicli import run
# run()
-from ooni.oonicli import runWithDirector
+from ooniprobe.oonicli import runWithDirector
runWithDirector()
diff --git a/nettests/examples/example_myip.py b/nettests/examples/example_myip.py
index 40a4849..70cf773 100644
--- a/nettests/examples/example_myip.py
+++ b/nettests/examples/example_myip.py
@@ -6,6 +6,10 @@
from ooni.templates import httpt
class MyIP(httpt.HTTPTest):
inputs = ['https://check.torproject.org']
+
+ def test_lookup(self):
+ return self.doRequest(self.input)
+
def processResponseBody(self, body):
import re
regexp = "Your IP address appears to be: <b>(.+?)<\/b>"
diff --git a/nettests/tls-handshake.py b/nettests/tls-handshake.py
deleted file mode 100644
index eba950e..0000000
--- a/nettests/tls-handshake.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-import subprocess
-from subprocess import PIPE
-serverport = "129.21.124.215:443"
-# a subset of those from firefox
-ciphers = [
- "ECDHE-ECDSA-AES256-SHA",
- "ECDHE-RSA-AES256-SHA",
- "DHE-RSA-CAMELLIA256-SHA",
- "DHE-DSS-CAMELLIA256-SHA",
- "DHE-RSA-AES256-SHA",
- "DHE-DSS-AES256-SHA",
- "ECDH-ECDSA-AES256-CBC-SHA",
- "ECDH-RSA-AES256-CBC-SHA",
- "CAMELLIA256-SHA",
- "AES256-SHA",
- "ECDHE-ECDSA-RC4-SHA",
- "ECDHE-ECDSA-AES128-SHA",
- "ECDHE-RSA-RC4-SHA",
- "ECDHE-RSA-AES128-SHA",
- "DHE-RSA-CAMELLIA128-SHA",
- "DHE-DSS-CAMELLIA128-SHA"
-]
-def checkBridgeConnection(host, port)
- cipher_arg = ":".join(ciphers)
- cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
- cmd += ["-cipher", cipher_arg]
- proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
- out, error = proc.communicate()
- success = "Cipher is DHE-RSA-AES256-SHA" in out
- return success
diff --git a/ooni/api/__init__.py b/ooni/api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/api/spec.py b/ooni/api/spec.py
new file mode 100644
index 0000000..af238f4
--- /dev/null
+++ b/ooni/api/spec.py
@@ -0,0 +1,92 @@
+import os
+import re
+import json
+import types
+
+from cyclone import web, escape
+
+from ooni import config
+
+class InvalidInputFilename(Exception):
+ pass
+
+class FilenameExists(Exception):
+ pass
+
+class ORequestHandler(web.RequestHandler):
+ serialize_lists = True
+
+ def write(self, chunk):
+ """
+ XXX This is a patch that can be removed once
+ https://github.com/fiorix/cyclone/pull/92 makes it into a release.
+ """
+ if isinstance(chunk, types.ListType):
+ chunk = escape.json_encode(chunk)
+ self.set_header("Content-Type", "application/json")
+ web.RequestHandler.write(self, chunk)
+
+class Status(ORequestHandler):
+ def get(self):
+ result = {'active_tests': oonidApplication.director.activeNetTests}
+ self.write(result)
+
+def list_inputs():
+ input_list = []
+ for filename in os.listdir(config.inputs_directory):
+ input_list.append({'filename': filename})
+ return input_list
+
+class Inputs(ORequestHandler):
+ def get(self):
+ self.write(input_list)
+
+ def post(self):
+ filename = self.get_argument("fullname", None)
+ if not filename or not re.match('(\w.*\.\w.*).*', filename):
+ raise InvalidInputFilename
+
+ if os.path.exists(filename):
+ raise FilenameExists
+
+ input_file = self.request.files.get("input_file")
+ content_type = input_file["content_type"]
+ body = input_file["body"]
+
+ fn = os.path.join(config.inputs_directory, filename)
+ with open(os.path.abspath(fn), "w") as fp:
+ fp.write(body)
+
+class ListTests(ORequestHandler):
+ def get(self):
+ self.write(oonidApplication.director.netTests)
+
+class StartTest(ORequestHandler):
+ def post(self, test_name):
+ """
+ Starts a test with the specified options.
+ """
+ json.decode(self.request.body)
+
+class StopTest(ORequestHandler):
+ def delete(self, test_name):
+ pass
+
+class TestStatus(ORequestHandler):
+ def get(self, test_id):
+ pass
+
+oonidAPI = [
+ (r"/status", Status),
+ (r"/inputs", Inputs),
+ (r"/test", ListTests),
+ (r"/test/(.*)/start", StartTest),
+ (r"/test/(.*)/stop", StopTest),
+ (r"/test/(.*)", TestStatus),
+ (r"/(.*)", web.StaticFileHandler,
+ {"path": os.path.join(config.data_directory, 'ui', 'app'),
+ "default_filename": "index.html"})
+]
+
+oonidApplication = web.Application(oonidAPI, debug=True)
+
diff --git a/ooni/config.py b/ooni/config.py
index 74a1668..5aeb49d 100644
--- a/ooni/config.py
+++ b/ooni/config.py
@@ -6,33 +6,8 @@ from twisted.internet import reactor, threads, defer
from ooni import otime
from ooni.utils import Storage
-reports = Storage()
-scapyFactory = None
-stateDict = None
-state = Storage()
-
-# XXX refactor this to use a database
-resume_lock = defer.DeferredLock()
-
-basic = None
-cmd_line_options = None
-resume_filename = None
-
-# XXX-Twisted this is used to check if we have started the reactor or not. It
-# is necessary because if the tests are already concluded because we have
-# resumed a test session then it will call reactor.run() even though there is
-# no condition that will ever stop it.
-# There should be a more twisted way of doing this.
-start_reactor = True
-
-tor_state = None
-tor_control = None
-
-config_file = None
-sample_config_file = None
-
-# This is used to store the probes IP address obtained via Tor
-probe_ip = None
+class TestFilenameNotSet(Exception):
+ pass
def get_root_path():
this_directory = os.path.dirname(__file__)
@@ -46,50 +21,6 @@ def createConfigFile():
"""
sample_config_file = os.path.join(get_root_path(), 'ooniprobe.conf.sample')
-def loadConfigFile():
- """
- This is a helper function that makes sure that the configuration attributes
- are singletons.
- """
- config_file = os.path.join(get_root_path(), 'ooniprobe.conf')
- try:
- f = open(config_file)
- except IOError:
- createConfigFile()
- raise Exception("Unable to open config file. "\
- "Copy ooniprobe.conf.sample to ooniprobe.conf")
-
- config_file_contents = '\n'.join(f.readlines())
- configuration = yaml.safe_load(config_file_contents)
-
- # Process the basic configuration options
- basic = Storage()
- for k, v in configuration['basic'].items():
- basic[k] = v
-
- # Process the privacy configuration options
- privacy = Storage()
- for k, v in configuration['privacy'].items():
- privacy[k] = v
-
- # Process the advanced configuration options
- advanced = Storage()
- for k, v in configuration['advanced'].items():
- advanced[k] = v
-
- # Process the tor configuration options
- tor = Storage()
- try:
- for k, v in configuration['tor'].items():
- tor[k] = v
- except AttributeError:
- pass
-
- return basic, privacy, advanced, tor
-
-class TestFilenameNotSet(Exception):
- pass
-
def generatePcapFilename():
if cmd_line_options['pcapfile']:
reports.pcap = cmd_line_options['pcapfile']
@@ -103,9 +34,61 @@ def generatePcapFilename():
frm_str = "report_%s_"+otime.timestamp()+".%s"
reports.pcap = frm_str % (test_name, "pcap")
-if not basic:
- # Here we make sure that we instance the config file attributes only once
- basic, privacy, advanced, tor = loadConfigFile()
+class ConfigurationSetting(Storage):
+ def __init__(self, key):
+ config_file = os.path.join(get_root_path(), 'ooniprobe.conf')
+ try:
+ f = open(config_file)
+ except IOError:
+ createConfigFile()
+ raise Exception("Unable to open config file. "\
+ "Copy ooniprobe.conf.sample to ooniprobe.conf")
+
+ config_file_contents = '\n'.join(f.readlines())
+ configuration = yaml.safe_load(config_file_contents)
+
+ try:
+ for k, v in configuration[key].items():
+ self[k] = v
+ except AttributeError:
+ pass
+
+basic = ConfigurationSetting('basic')
+advanced = ConfigurationSetting('advanced')
+privacy = ConfigurationSetting('privacy')
+tor = ConfigurationSetting('tor')
+
+data_directory = os.path.join(get_root_path(), 'data')
+nettest_directory = os.path.join(get_root_path(), 'nettests')
+inputs_directory = os.path.join(get_root_path(), 'inputs')
+
+reports = Storage()
+state = Storage()
+scapyFactory = None
+stateDict = None
+
+# XXX refactor this to use a database
+resume_lock = defer.DeferredLock()
+
+cmd_line_options = None
+resume_filename = None
+
+# XXX-Twisted this is used to check if we have started the reactor or not. It
+# is necessary because if the tests are already concluded because we have
+# resumed a test session then it will call reactor.run() even though there is
+# no condition that will ever stop it.
+# There should be a more twisted way of doing this.
+start_reactor = True
+tor_state = None
+tor_control = None
+config_file = None
+sample_config_file = None
+# This is used to store the probes IP address obtained via Tor
+probe_ip = None
+# This is used to keep track of the state of the sniffer
+sniffer_running = None
+
+logging = True
if not resume_filename:
resume_filename = os.path.join(get_root_path(), 'ooniprobe.resume')
@@ -113,6 +96,3 @@ if not resume_filename:
with open(resume_filename) as f: pass
except IOError as e:
with open(resume_filename, 'w+') as f: pass
-
-# This is used to keep track of the state of the sniffer
-sniffer_running = None
diff --git a/ooni/director.py b/ooni/director.py
index 8365ebd..a9daf84 100644
--- a/ooni/director.py
+++ b/ooni/director.py
@@ -6,7 +6,7 @@ from ooni.managers import ReportEntryManager, MeasurementManager
from ooni.reporter import Report
from ooni.utils import log, checkForRoot, NotRootError
from ooni.utils.net import randomFreePort
-from ooni.nettest import NetTest
+from ooni.nettest import NetTest, getNetTestInformation
from ooni.errors import UnableToStartTor
from txtorcon import TorConfig
@@ -57,10 +57,12 @@ class Director(object):
"""
_scheduledTests = 0
+ # Only list NetTests belonging to these categories
+ categories = ['blocking', 'manipulation']
def __init__(self):
- self.netTests = []
self.activeNetTests = []
+ self.netTests = self.getNetTests()
self.measurementManager = MeasurementManager()
self.measurementManager.director = self
@@ -80,6 +82,29 @@ class Director(object):
self.torControlProtocol = None
+ def getNetTests(self):
+ nettests = {}
+ def is_nettest(filename):
+ return not filename == '__init__.py' \
+ and filename.endswith('.py')
+
+ for category in self.categories:
+ dirname = os.path.join(config.nettest_directory, category)
+ # print path to all filenames.
+ for filename in os.listdir(dirname):
+ if is_nettest(filename):
+ net_test_file = os.path.join(dirname, filename)
+ nettest = getNetTestInformation(net_test_file)
+
+ if nettest['id'] in nettests:
+ log.err("Found a two tests with the same name %s, %s" %
+ (nettest_path, nettests[nettest['id']]['path']))
+ else:
+ category = dirname.replace(config.nettest_directory, '')
+ nettests[nettest['id']] = nettest
+
+ return nettests
+
def start(self):
if config.privacy.includepcap:
log.msg("Starting")
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 1fe19f1..dc72ce8 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -18,15 +18,162 @@ from StringIO import StringIO
class NoTestCasesFound(Exception):
pass
+def get_test_methods(item, method_prefix="test_"):
+ """
+ Look for test_ methods in subclasses of NetTestCase
+ """
+ test_cases = []
+ try:
+ assert issubclass(item, NetTestCase)
+ methods = reflect.prefixedMethodNames(item, method_prefix)
+ test_methods = []
+ for method in methods:
+ test_methods.append(method_prefix + method)
+ if test_methods:
+ test_cases.append((item, test_methods))
+ except (TypeError, AssertionError):
+ pass
+ return test_cases
+
+def loadNetTestString(net_test_string):
+ """
+ Load NetTest from a string.
+ WARNING input to this function *MUST* be sanitized and *NEVER* be
+ untrusted.
+ Failure to do so will result in code exec.
+
+ net_test_string:
+
+ a string that contains the net test to be run.
+ """
+ net_test_file_object = StringIO(net_test_string)
+
+ ns = {}
+ test_cases = []
+ exec net_test_file_object.read() in ns
+ for item in ns.itervalues():
+ test_cases.extend(get_test_methods(item))
+
+ if not test_cases:
+ raise NoTestCasesFound
+
+ return test_cases
+
+def loadNetTestFile(net_test_file):
+ """
+ Load NetTest from a file.
+ """
+ test_cases = []
+ module = filenameToModule(net_test_file)
+ for __, item in getmembers(module):
+ test_cases.extend(get_test_methods(item))
+
+ if not test_cases:
+ raise NoTestCasesFound
+
+ return test_cases
+
+def getTestClassFromFile(net_test_file):
+ """
+ Will return the first class that is an instance of NetTestCase.
+
+ XXX this means that if inside of a test there are more than 1 test case
+ then we will only run the first one.
+ """
+ module = filenameToModule(net_test_file)
+ for __, item in getmembers(module):
+ try:
+ assert issubclass(item, NetTestCase)
+ return item
+ except (TypeError, AssertionError):
+ pass
+
+def getOption(opt_parameter, required_options, type='text'):
+ """
+ Arguments:
+ usage_options: a list as should be the optParameters of an UsageOptions class.
+
+ required_options: a list containing the strings of the options that are
+ required.
+
+ type: a string containing the type of the option.
+
+ Returns:
+ a dict containing
+ {
+ 'description': the description of the option,
+ 'default': the default value of the option,
+ 'required': True|False if the option is required or not,
+ 'type': the type of the option ('text' or 'file')
+ }
+ """
+ option_name, _, default, description = opt_parameter
+ if option_name in required_options:
+ required = True
+ else:
+ required = False
+
+ return {'description': description,
+ 'default': default, 'required': required,
+ 'type': type
+ }
+
+def getArguments(test_class):
+ arguments = {}
+ if test_class.inputFile:
+ option_name = test_class.inputFile[0]
+ arguments[option_name] = getOption(test_class.inputFile,
+ test_class.requiredOptions, type='file')
+ try:
+ list(test_class.usageOptions.optParameters)
+ except AttributeError:
+ return arguments
+
+ for opt_parameter in test_class.usageOptions.optParameters:
+ option_name = opt_parameter[0]
+ arguments[option_name] = getOption(opt_parameter,
+ test_class.requiredOptions)
+
+ return arguments
+
+def getNetTestInformation(net_test_file):
+ """
+ Returns a dict containing:
+
+ {
+ 'id': the test filename excluding the .py extension,
+ 'name': the full name of the test,
+ 'description': the description of the test,
+ 'version': version number of this test,
+ 'arguments': a dict containing as keys the supported arguments and as
+ values the argument description.
+ }
+ """
+ test_class = getTestClassFromFile(net_test_file)
+
+ test_id = os.path.basename(net_test_file).replace('.py', '')
+ information = {'id': test_id,
+ 'name': test_class.name,
+ 'description': test_class.description,
+ 'version': test_class.version,
+ 'arguments': getArguments(test_class)
+ }
+ return information
+
class NetTestLoader(object):
method_prefix = 'test'
def __init__(self, options, test_file=None, test_string=None):
self.options = options
+ test_cases = None
+
if test_file:
- self.loadNetTestFile(test_file)
+ test_cases = loadNetTestFile(test_file)
elif test_string:
- self.loadNetTestString(test_string)
+ test_cases = loadNetTestString(test_string)
+
+ if test_cases:
+ self.setupTestCases(test_cases)
@property
def testDetails(self):
@@ -115,44 +262,6 @@ class NetTestLoader(object):
assert usage_options == test_class.usageOptions
return usage_options
- def loadNetTestString(self, net_test_string):
- """
- Load NetTest from a string.
- WARNING input to this function *MUST* be sanitized and *NEVER* be
- untrusted.
- Failure to do so will result in code exec.
-
- net_test_string:
-
- a string that contains the net test to be run.
- """
- net_test_file_object = StringIO(net_test_string)
-
- ns = {}
- test_cases = []
- exec net_test_file_object.read() in ns
- for item in ns.itervalues():
- test_cases.extend(self._get_test_methods(item))
-
- if not test_cases:
- raise NoTestCasesFound
-
- self.setupTestCases(test_cases)
-
- def loadNetTestFile(self, net_test_file):
- """
- Load NetTest from a file.
- """
- test_cases = []
- module = filenameToModule(net_test_file)
- for __, item in getmembers(module):
- test_cases.extend(self._get_test_methods(item))
-
- if not test_cases:
- raise NoTestCasesFound
-
- self.setupTestCases(test_cases)
-
def setupTestCases(self, test_cases):
"""
Creates all the necessary test_cases (a list of tuples containing the
@@ -205,22 +314,6 @@ class NetTestLoader(object):
inputs = [None]
klass.inputs = inputs
- def _get_test_methods(self, item):
- """
- Look for test_ methods in subclasses of NetTestCase
- """
- test_cases = []
- try:
- assert issubclass(item, NetTestCase)
- methods = reflect.prefixedMethodNames(item, self.method_prefix)
- test_methods = []
- for method in methods:
- test_methods.append(self.method_prefix + method)
- if test_methods:
- test_cases.append((item, test_methods))
- except (TypeError, AssertionError):
- pass
- return test_cases
class NetTestState(object):
def __init__(self, allTasksDone):
@@ -409,9 +502,10 @@ class NetTestCase(object):
Quirks:
Every class that is prefixed with test *must* return a twisted.internet.defer.Deferred.
"""
- name = "I Did Not Change The Name"
+ name = "This test is nameless"
author = "Jane Doe <foo(a)example.com>"
version = "0.0.0"
+ description = "Sorry, this test has no description :("
inputs = [None]
inputFile = None
diff --git a/ooni/oonicli.py b/ooni/oonicli.py
index 06aa20c..a99386d 100644
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@ -10,12 +10,11 @@ from twisted.internet import reactor
from twisted.python import usage
from twisted.python.util import spewer
-from ooni.errors import InvalidOONIBCollectorAddress
-
+from ooni import errors
from ooni import config
+
from ooni.director import Director
from ooni.reporter import YAMLReporter, OONIBReporter
-
from ooni.nettest import NetTestLoader, MissingRequiredOption
from ooni.utils import log
@@ -147,7 +146,7 @@ def runWithDirector():
oonib_reporter = OONIBReporter(test_details,
global_options['collector'])
reporters.append(oonib_reporter)
- except InvalidOONIBCollectorAddress:
+ except errors.InvalidOONIBCollectorAddress:
log.err("Invalid format for oonib collector address.")
log.msg("Should be in the format http://<collector_address>:<port>")
log.msg("for example: ooniprobe -c httpo://nkvphnp3p6agi5qq.onion")
diff --git a/ooni/oonid.py b/ooni/oonid.py
new file mode 100644
index 0000000..dde768e
--- /dev/null
+++ b/ooni/oonid.py
@@ -0,0 +1,20 @@
+import os
+import random
+
+from twisted.application import service, internet
+from twisted.web import static, server
+
+from ooni import config
+from ooni.api.spec import oonidApplication
+from ooni.director import Director
+from ooni.reporter import YAMLReporter, OONIBReporter
+
+def getOonid():
+ director = Director()
+ director.start()
+ oonidApplication.director = director
+ return internet.TCPServer(int(config.advanced.oonid_api_port), oonidApplication)
+
+application = service.Application("ooniprobe")
+service = getOonid()
+service.setServiceParent(application)
diff --git a/ooni/reporter.py b/ooni/reporter.py
index 84dad2f..a7bd933 100644
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@ -26,7 +26,7 @@ except ImportError:
log.err("Scapy is not installed.")
-from ooni.errors import InvalidOONIBCollectorAddress
+from ooni.errors import InvalidOONIBCollectorAddress, NoMoreReporters
from ooni.errors import ReportNotCreated, ReportAlreadyClosed
from ooni import otime
diff --git a/ooni/runner.py b/ooni/runner.py
deleted file mode 100644
index 080db18..0000000
--- a/ooni/runner.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import os
-import time
-import random
-
-import yaml
-
-from twisted.internet import defer
-from twisted.internet import reactor
-
-from txtorcon import TorConfig
-from txtorcon import TorState, launch_tor
-
-from ooni import config
-from ooni.reporter import OONIBReporter, YAMLReporter, OONIBReportError
-from ooni.inputunit import InputUnitFactory
-from ooni.nettest import NetTestCase, NoPostProcessor
-from ooni.utils import log, checkForRoot, pushFilenameStack
-from ooni.utils import NotRootError, Storage
-from ooni.utils.net import randomFreePort
-
-class InvalidResumeFile(Exception):
- pass
-
-class noResumeSession(Exception):
- pass
-
-def loadResumeFile():
- """
- Sets the singleton stateDict object to the content of the resume file.
- If the file is empty then it will create an empty one.
-
- Raises:
-
- :class:ooni.runner.InvalidResumeFile if the resume file is not valid
-
- """
- if not config.stateDict:
- try:
- with open(config.resume_filename) as f:
- config.stateDict = yaml.safe_load(f)
- except:
- log.err("Error loading YAML file")
- raise InvalidResumeFile
-
- if not config.stateDict:
- with open(config.resume_filename, 'w+') as f:
- yaml.safe_dump(dict(), f)
- config.stateDict = dict()
-
- elif isinstance(config.stateDict, dict):
- return
- else:
- log.err("The resume file is of the wrong format")
- raise InvalidResumeFile
-
-def resumeTest(test_filename, input_unit_factory):
- """
- Returns the an input_unit_factory that is at the index of the previous run of the test
- for the specified test_filename.
-
- Args:
-
- test_filename (str): the filename of the test that is being run
- including the .py extension.
-
- input_unit_factory (:class:ooni.inputunit.InputUnitFactory): with the
- same input of the past run.
-
- Returns:
-
- :class:ooni.inputunit.InputUnitFactory that is at the index of the
- previous test run.
-
- """
- try:
- idx = config.stateDict[test_filename]
- for x in range(idx):
- try:
- input_unit_factory.next()
- except StopIteration:
- log.msg("Previous run was complete")
- return input_unit_factory
-
- return input_unit_factory
-
- except KeyError:
- log.debug("No resume key found for selected test name. It is therefore 0")
- config.stateDict[test_filename] = 0
- return input_unit_factory
-
-(a)defer.inlineCallbacks
-def updateResumeFile(test_filename):
- """
- update the resume file with the current stateDict state.
- """
- log.debug("Acquiring lock for %s" % test_filename)
- yield config.resume_lock.acquire()
-
- current_resume_state = yaml.safe_load(open(config.resume_filename))
- current_resume_state = config.stateDict
- yaml.safe_dump(current_resume_state, open(config.resume_filename, 'w+'))
-
- log.debug("Releasing lock for %s" % test_filename)
- config.resume_lock.release()
- defer.returnValue(config.stateDict[test_filename])
-
-(a)defer.inlineCallbacks
-def increaseInputUnitIdx(test_filename):
- """
- Args:
-
- test_filename (str): the filename of the test that is being run
- including the .py extension.
-
- input_unit_idx (int): the current input unit index for the test.
-
- """
- config.stateDict[test_filename] += 1
- yield updateResumeFile(test_filename)
-
-def updateProgressMeters(test_filename, input_unit_factory,
- test_case_number):
- """
- Update the progress meters for keeping track of test state.
- """
- if not config.state.test_filename:
- config.state[test_filename] = Storage()
-
- config.state[test_filename].per_item_average = 2.0
-
- input_unit_idx = float(config.stateDict[test_filename])
- input_unit_items = len(input_unit_factory)
- test_case_number = float(test_case_number)
- total_iterations = input_unit_items * test_case_number
- current_iteration = input_unit_idx * test_case_number
-
- log.debug("input_unit_items: %s" % input_unit_items)
- log.debug("test_case_number: %s" % test_case_number)
-
- log.debug("Test case number: %s" % test_case_number)
- log.debug("Total iterations: %s" % total_iterations)
- log.debug("Current iteration: %s" % current_iteration)
-
- def progress():
- return (current_iteration / total_iterations) * 100.0
-
- config.state[test_filename].progress = progress
-
- def eta():
- return (total_iterations - current_iteration) \
- * config.state[test_filename].per_item_average
- config.state[test_filename].eta = eta
-
- config.state[test_filename].input_unit_idx = input_unit_idx
- config.state[test_filename].input_unit_items = input_unit_items
-
-
-(a)defer.inlineCallbacks
-def runTestCases(test_cases, options, cmd_line_options):
- log.debug("Running %s" % test_cases)
- log.debug("Options %s" % options)
- log.debug("cmd_line_options %s" % dict(cmd_line_options))
-
- test_inputs = options['inputs']
-
- # Set a default reporter
- if not cmd_line_options['collector'] and not \
- cmd_line_options['no-default-reporter']:
- with open('collector') as f:
- reporter_url = random.choice(f.readlines())
- reporter_url = reporter_url.split('#')[0].strip()
- cmd_line_options['collector'] = reporter_url
-
- oonib_reporter = OONIBReporter(cmd_line_options)
- yaml_reporter = YAMLReporter(cmd_line_options)
-
- if cmd_line_options['collector']:
- log.msg("Using remote collector, please be patient while we create the report.")
- try:
- yield oonib_reporter.createReport(options)
- except OONIBReportError:
- log.err("Error in creating new report")
- log.msg("We will only create reports to a file")
- oonib_reporter = None
- else:
- oonib_reporter = None
-
- yield yaml_reporter.createReport(options)
- log.msg("Reporting to file %s" % yaml_reporter._stream.name)
-
- try:
- input_unit_factory = InputUnitFactory(test_inputs)
- input_unit_factory.inputUnitSize = int(cmd_line_options['parallelism'])
- except Exception, e:
- log.exception(e)
-
- try:
- loadResumeFile()
- except InvalidResumeFile:
- log.err("Error in loading resume file %s" % config.resume_filename)
- log.err("Try deleting the resume file")
- raise InvalidResumeFile
-
- test_filename = os.path.basename(cmd_line_options['test'])
-
- if cmd_line_options['resume']:
- log.debug("Resuming %s" % test_filename)
- resumeTest(test_filename, input_unit_factory)
- else:
- log.debug("Not going to resume %s" % test_filename)
- config.stateDict[test_filename] = 0
-
- updateProgressMeters(test_filename, input_unit_factory, len(test_cases))
-
- try:
- for input_unit in input_unit_factory:
- log.debug("Running %s with input unit %s" % (test_filename, input_unit))
-
- yield runTestCasesWithInputUnit(test_cases, input_unit,
- yaml_reporter, oonib_reporter)
-
- yield increaseInputUnitIdx(test_filename)
-
- updateProgressMeters(test_filename, input_unit_factory, len(test_cases))
-
- except Exception:
- log.exception("Problem in running test")
- yaml_reporter.finish()
-
-def loadTest(cmd_line_options):
- """
- Takes care of parsing test command line arguments and loading their
- options.
- """
- # XXX here there is too much strong coupling with cmd_line_options
- # Ideally this would get all wrapped in a nice little class that get's
- # instanced with it's cmd_line_options as an instance attribute
- classes = findTestClassesFromFile(cmd_line_options)
- test_cases, options = loadTestsAndOptions(classes, cmd_line_options)
-
- return test_cases, options, cmd_line_options
diff --git a/ooni/tests/__init__.py b/ooni/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/tests/mocks.py b/ooni/tests/mocks.py
new file mode 100644
index 0000000..99e5200
--- /dev/null
+++ b/ooni/tests/mocks.py
@@ -0,0 +1,172 @@
+from twisted.python import failure
+from twisted.internet import defer
+
+from ooni import config
+from ooni.tasks import BaseTask, TaskWithTimeout
+from ooni.nettest import NetTest
+from ooni.managers import TaskManager
+
+config.logging = False
+
+class MockMeasurementFailOnce(BaseTask):
+ def run(self):
+ f = open('dummyTaskFailOnce.txt', 'w')
+ f.write('fail')
+ f.close()
+ if self.failure >= 1:
+ return defer.succeed(self)
+ else:
+ return defer.fail(failure.Failure)
+
+class MockMeasurementManager(TaskManager):
+ def __init__(self):
+ self.successes = []
+ TaskManager.__init__(self)
+
+ def failed(self, failure, task):
+ pass
+
+ def succeeded(self, result, task):
+ self.successes.append((result, task))
+
+class MockReporter(object):
+ def __init__(self):
+ self.created = defer.Deferred()
+
+ def writeReportEntry(self, entry):
+ pass
+
+ def createReport(self):
+ self.created.callback(self)
+
+ def finish(self):
+ pass
+
+class MockFailure(Exception):
+ pass
+
+## from test_managers
+mockFailure = failure.Failure(MockFailure('mock'))
+
+class MockSuccessTask(BaseTask):
+ def run(self):
+ return defer.succeed(42)
+
+class MockFailTask(BaseTask):
+ def run(self):
+ return defer.fail(mockFailure)
+
+class MockFailOnceTask(BaseTask):
+ def run(self):
+ if self.failures >= 1:
+ return defer.succeed(42)
+ else:
+ return defer.fail(mockFailure)
+
+class MockSuccessTaskWithTimeout(TaskWithTimeout):
+ def run(self):
+ return defer.succeed(42)
+
+class MockFailTaskThatTimesOut(TaskWithTimeout):
+ def run(self):
+ return defer.Deferred()
+
+class MockTimeoutOnceTask(TaskWithTimeout):
+ def run(self):
+ if self.failures >= 1:
+ return defer.succeed(42)
+ else:
+ return defer.Deferred()
+
+class MockFailTaskWithTimeout(TaskWithTimeout):
+ def run(self):
+ return defer.fail(mockFailure)
+
+
+class MockNetTest(object):
+ def __init__(self):
+ self.successes = []
+
+ def succeeded(self, measurement):
+ self.successes.append(measurement)
+
+class MockMeasurement(TaskWithTimeout):
+ def __init__(self, net_test):
+ TaskWithTimeout.__init__(self)
+ self.netTest = net_test
+
+ def succeeded(self, result):
+ return self.netTest.succeeded(42)
+
+class MockSuccessMeasurement(MockMeasurement):
+ def run(self):
+ return defer.succeed(42)
+
+class MockFailMeasurement(MockMeasurement):
+ def run(self):
+ return defer.fail(mockFailure)
+
+class MockFailOnceMeasurement(MockMeasurement):
+ def run(self):
+ if self.failures >= 1:
+ return defer.succeed(42)
+ else:
+ return defer.fail(mockFailure)
+
+class MockDirector(object):
+ def __init__(self):
+ self.successes = []
+
+ def measurementFailed(self, failure, measurement):
+ pass
+
+ def measurementSucceeded(self, measurement):
+ self.successes.append(measurement)
+
+## from test_reporter.py
+class MockOReporter(object):
+ def __init__(self):
+ self.created = defer.Deferred()
+
+ def writeReportEntry(self, entry):
+ return defer.succeed(42)
+
+ def finish(self):
+ pass
+
+ def createReport(self):
+ from ooni.utils import log
+ log.debug("Creating report with %s" % self)
+ self.created.callback(self)
+
+class MockOReporterThatFailsWrite(MockOReporter):
+ def writeReportEntry(self, entry):
+ raise MockFailure
+
+class MockOReporterThatFailsOpen(MockOReporter):
+ def createReport(self):
+ self.created.errback(failure.Failure(MockFailure()))
+
+class MockOReporterThatFailsWriteOnce(MockOReporter):
+ def __init__(self):
+ self.failure = 0
+ MockOReporter.__init__(self)
+
+ def writeReportEntry(self, entry):
+ if self.failure >= 1:
+ return defer.succeed(42)
+ else:
+ self.failure += 1
+ raise MockFailure
+
+class MockTaskManager(TaskManager):
+ def __init__(self):
+ self.successes = []
+ TaskManager.__init__(self)
+
+ def failed(self, failure, task):
+ pass
+
+ def succeeded(self, result, task):
+ self.successes.append((result, task))
+
diff --git a/ooni/tests/test-class-design.py b/ooni/tests/test-class-design.py
new file mode 100644
index 0000000..bb80cd3
--- /dev/null
+++ b/ooni/tests/test-class-design.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+#
+# testing classes to test multiple inheritance.
+# these are not meant to be run by trial, though they could be made to be so.
+# i didn't know where to put them. --isis
+
+import abc
+from pprint import pprint
+from inspect import classify_class_attrs
+
+class PluginBase(object):
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractproperty
+ def name(self):
+ return 'you should not see this'
+
+ @name.setter
+ def name(self, value):
+ return 'you should not set this'
+
+ @name.deleter
+ def name(self):
+ return 'you should not del this'
+
+ @abc.abstractmethod
+ def inputParser(self, line):
+ """Do something to parse something."""
+ return
+
+class Foo(object):
+ woo = "this class has some shit in it"
+ def bar(self):
+ print "i'm a Foo.bar()!"
+ print woo
+
+class KwargTest(Foo):
+ _name = "isis"
+
+ #def __new__(cls, *a, **kw):
+ # return super(KwargTest, cls).__new__(cls, *a, **kw)
+
+ @property
+ def name(self):
+ return self._name
+
+ @name.setter
+ def name(self, value):
+ self._name = value
+
+ def __init__(self, *a, **kw):
+ super(KwargTest, self).__init__()
+
+ ## this causes the instantion args to override the class attrs
+ for key, value in kw.items():
+ setattr(self.__class__, key, value)
+
+ print "%s.__init__(): self.__dict__ = %s" \
+ % (type(self), pprint(type(self).__dict__))
+
+ for attr in classify_class_attrs(self):
+ print attr
+
+ @classmethod
+ def sayname(cls):
+ print cls.name
+
+class KwargTestChild(KwargTest):
+ name = "arturo"
+ def __init__(self):
+ super(KwargTestChild, self).__init__()
+ print self.name
+
+class KwargTestChildOther(KwargTest):
+ def __init__(self, name="robot", does="lasers"):
+ super(KwargTestChildOther, self).__init__()
+ print self.name
+
+
+if __name__ == "__main__":
+ print "class KwargTest attr name: %s" % KwargTest.name
+ kwargtest = KwargTest()
+ print "KwargTest instantiated wo args"
+ print "kwargtest.name: %s" % kwargtest.name
+ print "kwargtest.sayname(): %s" % kwargtest.sayname()
+ kwargtest2 = KwargTest(name="lovecruft", does="hacking")
+ print "KwargTest instantiated with name args"
+ print "kwargtest.name: %s" % kwargtest2.name
+ print "kwargtest.sayname(): %s" % kwargtest2.sayname()
+
+ print "class KwargTestChild attr name: %s" % KwargTestChild.name
+ kwargtestchild = KwargTestChild()
+ print "KwargTestChild instantiated wo args"
+ print "kwargtestchild.name: %s" % kwargtestchild.name
+ print "kwargtestchild.sayname(): %s" % kwargtestchild.sayname()
+
+ print "class KwargTestChildOther attr name: %s" % KwargTestChildOther.name
+ kwargtestchildother = KwargTestChildOther()
+ print "KwargTestChildOther instantiated wo args"
+ print "kwargtestchildother.name: %s" % kwargtestchildother.name
+ print "kwargtestchildother.sayname(): %s" % kwargtestchildother.sayname()
diff --git a/ooni/tests/test_director.py b/ooni/tests/test_director.py
new file mode 100644
index 0000000..7920fcb
--- /dev/null
+++ b/ooni/tests/test_director.py
@@ -0,0 +1,58 @@
+from twisted.internet import defer, base
+from twisted.trial import unittest
+
+from ooni.director import Director
+from ooni.nettest import NetTestLoader
+from ooni.tests.mocks import MockReporter
+base.DelayedCall.debug = True
+
+net_test_string = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham']]
+
+class DummyTestCase(NetTestCase):
+ inputFile = ['file', 'f', None, 'The input File']
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+"""
+
+
+dummyArgs = ('--spam', 1, '--file', 'dummyInputFile.txt')
+
+class TestDirector(unittest.TestCase):
+ timeout = 1
+ def setUp(self):
+ with open('dummyInputFile.txt', 'w') as f:
+ for i in range(10):
+ f.write("%s\n" % i)
+
+ self.reporters = [MockReporter()]
+ self.director = Director()
+
+ def tearDown(self):
+ pass
+
+ def test_start_net_test(self):
+ ntl = NetTestLoader(dummyArgs, test_string=net_test_string)
+
+ ntl.checkOptions()
+ d = self.director.startNetTest('', ntl, self.reporters)
+
+ @d.addCallback
+ def done(result):
+ self.assertEqual(self.director.successfulMeasurements, 20)
+
+ return d
+
+ def test_stop_net_test(self):
+ pass
+
diff --git a/ooni/tests/test_dns.py b/ooni/tests/test_dns.py
new file mode 100644
index 0000000..e9bb524
--- /dev/null
+++ b/ooni/tests/test_dns.py
@@ -0,0 +1,24 @@
+#
+# This unittest is to verify that our usage of the twisted DNS resolver does
+# not break with new versions of twisted.
+
+import pdb
+from twisted.trial import unittest
+
+from twisted.internet import reactor
+
+from twisted.names import dns
+from twisted.names.client import Resolver
+
+class DNSTest(unittest.TestCase):
+ def test_a_lookup_ooni_query(self):
+ def done_query(message, *arg):
+ answer = message.answers[0]
+ self.assertEqual(answer.type, 1)
+
+ dns_query = [dns.Query('ooni.nu', type=dns.A)]
+ resolver = Resolver(servers=[('8.8.8.8', 53)])
+ d = resolver.queryUDP(dns_query)
+ d.addCallback(done_query)
+ return d
+
diff --git a/ooni/tests/test_managers.py b/ooni/tests/test_managers.py
new file mode 100644
index 0000000..e2af7b3
--- /dev/null
+++ b/ooni/tests/test_managers.py
@@ -0,0 +1,215 @@
+from twisted.trial import unittest
+from twisted.python import failure
+from twisted.internet import defer, task
+
+from ooni.tasks import BaseTask, TaskWithTimeout, TaskTimedOut
+from ooni.managers import TaskManager, MeasurementManager
+
+from ooni.tests.mocks import MockSuccessTask, MockFailTask, MockFailOnceTask, MockFailure
+from ooni.tests.mocks import MockSuccessTaskWithTimeout, MockFailTaskThatTimesOut
+from ooni.tests.mocks import MockTimeoutOnceTask, MockFailTaskWithTimeout
+from ooni.tests.mocks import MockTaskManager, mockFailure, MockDirector
+from ooni.tests.mocks import MockNetTest, MockMeasurement, MockSuccessMeasurement
+from ooni.tests.mocks import MockFailMeasurement, MockFailOnceMeasurement
+
+class TestTaskManager(unittest.TestCase):
+ timeout = 1
+ def setUp(self):
+ self.measurementManager = MockTaskManager()
+ self.measurementManager.concurrency = 20
+ self.measurementManager.retries = 2
+
+ self.measurementManager.start()
+
+ self.clock = task.Clock()
+
+ def schedule_successful_tasks(self, task_type, number=1):
+ all_done = []
+ for x in range(number):
+ mock_task = task_type()
+ all_done.append(mock_task.done)
+ self.measurementManager.schedule(mock_task)
+
+ d = defer.DeferredList(all_done)
+ @d.addCallback
+ def done(res):
+ for task_result, task_instance in self.measurementManager.successes:
+ self.assertEqual(task_result, 42)
+ self.assertIsInstance(task_instance, task_type)
+
+ return d
+
+ def schedule_failing_tasks(self, task_type, number=1):
+ all_done = []
+ for x in range(number):
+ mock_task = task_type()
+ all_done.append(mock_task.done)
+ self.measurementManager.schedule(mock_task)
+
+ d = defer.DeferredList(all_done)
+ @d.addCallback
+ def done(res):
+ # 10*2 because 2 is the number of retries
+ self.assertEqual(len(self.measurementManager.failures), number*3)
+ for task_result, task_instance in self.measurementManager.failures:
+ self.assertEqual(task_result, mockFailure)
+ self.assertIsInstance(task_instance, task_type)
+
+ return d
+
+ def test_schedule_failing_with_mock_failure_task(self):
+ mock_task = MockFailTask()
+ self.measurementManager.schedule(mock_task)
+ self.assertFailure(mock_task.done, MockFailure)
+ return mock_task.done
+
+ def test_schedule_successful_one_task(self):
+ return self.schedule_successful_tasks(MockSuccessTask)
+
+ def test_schedule_successful_one_task_with_timeout(self):
+ return self.schedule_successful_tasks(MockSuccessTaskWithTimeout)
+
+ def test_schedule_failing_tasks_that_timesout(self):
+ self.measurementManager.retries = 0
+
+ task_type = MockFailTaskThatTimesOut
+ task_timeout = 5
+
+ mock_task = task_type()
+ mock_task.timeout = task_timeout
+ mock_task.clock = self.clock
+
+ self.measurementManager.schedule(mock_task)
+
+ self.clock.advance(task_timeout)
+
+ @mock_task.done.addBoth
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), 1)
+ for task_result, task_instance in self.measurementManager.failures:
+ self.assertIsInstance(task_instance, task_type)
+
+ return mock_task.done
+
+ def test_schedule_time_out_once(self):
+ task_type = MockTimeoutOnceTask
+ task_timeout = 5
+
+ mock_task = task_type()
+ mock_task.timeout = task_timeout
+ mock_task.clock = self.clock
+
+ self.measurementManager.schedule(mock_task)
+
+ self.clock.advance(task_timeout)
+
+ @mock_task.done.addBoth
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), 1)
+ for task_result, task_instance in self.measurementManager.failures:
+ self.assertIsInstance(task_instance, task_type)
+
+ for task_result, task_instance in self.measurementManager.successes:
+ self.assertEqual(task_result, 42)
+ self.assertIsInstance(task_instance, task_type)
+
+ return mock_task.done
+
+
+ def test_schedule_failing_one_task(self):
+ return self.schedule_failing_tasks(MockFailTask)
+
+ def test_schedule_failing_one_task_with_timeout(self):
+ return self.schedule_failing_tasks(MockFailTaskWithTimeout)
+
+ def test_schedule_successful_ten_tasks(self):
+ return self.schedule_successful_tasks(MockSuccessTask, number=10)
+
+ def test_schedule_failing_ten_tasks(self):
+ return self.schedule_failing_tasks(MockFailTask, number=10)
+
+ def test_schedule_successful_27_tasks(self):
+ return self.schedule_successful_tasks(MockSuccessTask, number=27)
+
+ def test_schedule_failing_27_tasks(self):
+ return self.schedule_failing_tasks(MockFailTask, number=27)
+
+ def test_task_retry_and_succeed(self):
+ mock_task = MockFailOnceTask()
+ self.measurementManager.schedule(mock_task)
+
+ @mock_task.done.addCallback
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), 1)
+
+ self.assertEqual(self.measurementManager.failures,
+ [(mockFailure, mock_task)])
+ self.assertEqual(self.measurementManager.successes,
+ [(42, mock_task)])
+
+ return mock_task.done
+
+ def dd_test_task_retry_and_succeed_56_tasks(self):
+ """
+ XXX this test fails in a non-deterministic manner.
+ """
+ all_done = []
+ number = 56
+ for x in range(number):
+ mock_task = MockFailOnceTask()
+ all_done.append(mock_task.done)
+ self.measurementManager.schedule(mock_task)
+
+ d = defer.DeferredList(all_done)
+
+ @d.addCallback
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), number)
+
+ for task_result, task_instance in self.measurementManager.successes:
+ self.assertEqual(task_result, 42)
+ self.assertIsInstance(task_instance, MockFailOnceTask)
+
+ return d
+
+class TestMeasurementManager(unittest.TestCase):
+ def setUp(self):
+ mock_director = MockDirector()
+
+ self.measurementManager = MeasurementManager()
+ self.measurementManager.director = mock_director
+
+ self.measurementManager.concurrency = 10
+ self.measurementManager.retries = 2
+
+ self.measurementManager.start()
+
+ self.mockNetTest = MockNetTest()
+
+ def test_schedule_and_net_test_notified(self, number=1):
+ # XXX we should probably be inheriting from the base test class
+ mock_task = MockSuccessMeasurement(self.mockNetTest)
+ self.measurementManager.schedule(mock_task)
+
+ @mock_task.done.addCallback
+ def done(res):
+ self.assertEqual(self.mockNetTest.successes,
+ [42])
+
+ self.assertEqual(len(self.mockNetTest.successes), 1)
+ return mock_task.done
+
+ def test_schedule_failing_one_measurement(self):
+ mock_task = MockFailMeasurement(self.mockNetTest)
+ self.measurementManager.schedule(mock_task)
+
+ @mock_task.done.addErrback
+ def done(failure):
+ self.assertEqual(len(self.measurementManager.failures), 3)
+
+ self.assertEqual(failure, mockFailure)
+ self.assertEqual(len(self.mockNetTest.successes), 0)
+
+ return mock_task.done
+
+
diff --git a/ooni/tests/test_mutate.py b/ooni/tests/test_mutate.py
new file mode 100644
index 0000000..7e30586
--- /dev/null
+++ b/ooni/tests/test_mutate.py
@@ -0,0 +1,15 @@
+import unittest
+from ooni.kit import daphn3
+
+class TestDaphn3(unittest.TestCase):
+ def test_mutate_string(self):
+ original_string = '\x00\x00\x00'
+ mutated = daphn3.daphn3MutateString(original_string, 1)
+ self.assertEqual(mutated, '\x00\x01\x00')
+ def test_mutate_daphn3(self):
+ original_dict = [{'client': '\x00\x00\x00'},
+ {'server': '\x00\x00\x00'}]
+ mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
+ self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
+ {'server': '\x00\x01\x00'}])
+
diff --git a/ooni/tests/test_nettest.py b/ooni/tests/test_nettest.py
new file mode 100644
index 0000000..4d72a84
--- /dev/null
+++ b/ooni/tests/test_nettest.py
@@ -0,0 +1,268 @@
+import os
+from StringIO import StringIO
+from tempfile import TemporaryFile, mkstemp
+
+from twisted.trial import unittest
+from twisted.internet import defer, reactor
+from twisted.python.usage import UsageError
+
+from ooni.nettest import NetTest, InvalidOption, MissingRequiredOption
+from ooni.nettest import NetTestLoader, FailureToLoadNetTest, loadNetTestString, loadNetTestFile
+from ooni.tasks import BaseTask
+from ooni.utils import NotRootError
+
+from ooni.director import Director
+
+from ooni.managers import TaskManager
+
+from ooni.tests.mocks import MockMeasurement, MockMeasurementFailOnce
+from ooni.tests.mocks import MockNetTest, MockDirector, MockReporter
+from ooni.tests.mocks import MockMeasurementManager
+defer.setDebugging(True)
+
+net_test_string = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham']]
+
+class DummyTestCase(NetTestCase):
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+"""
+
+net_test_root_required = net_test_string+"""
+ requiresRoot = True
+"""
+
+net_test_string_with_file = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham']]
+
+class DummyTestCase(NetTestCase):
+ inputFile = ['file', 'f', None, 'The input File']
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+"""
+
+net_test_string_with_required_option = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham'],
+ ['foo', 'o', None, 'moo'],
+ ['bar', 'o', None, 'baz'],
+ ]
+
+class DummyTestCase(NetTestCase):
+ inputFile = ['file', 'f', None, 'The input File']
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+
+ requiredOptions = ['foo', 'bar']
+"""
+
+dummyInputs = range(1)
+dummyArgs = ('--spam', 'notham')
+dummyOptions = {'spam':'notham'}
+dummyInvalidArgs = ('--cram', 'jam')
+dummyInvalidOptions= {'cram':'jam'}
+dummyArgsWithRequiredOptions = ('--foo', 'moo', '--bar', 'baz')
+dummyRequiredOptions = {'foo':'moo', 'bar':'baz'}
+dummyArgsWithFile = ('--spam', 'notham', '--file', 'dummyInputFile.txt')
+
+class TestNetTest(unittest.TestCase):
+ timeout = 1
+ def setUp(self):
+ with open('dummyInputFile.txt', 'w') as f:
+ for i in range(10):
+ f.write("%s\n" % i)
+
+ def assertCallable(self, thing):
+ self.assertIn('__call__', dir(thing))
+
+ def verifyMethods(self, testCases):
+ uniq_test_methods = set()
+ for test_class, test_methods in testCases:
+ instance = test_class()
+ for test_method in test_methods:
+ c = getattr(instance, test_method)
+ self.assertCallable(c)
+ uniq_test_methods.add(test_method)
+ self.assertEqual(set(['test_a', 'test_b']), uniq_test_methods)
+
+ def test_load_net_test_from_file(self):
+ """
+ Given a file verify that the net test cases are properly
+ generated.
+ """
+ __, net_test_file = mkstemp()
+ with open(net_test_file, 'w') as f:
+ f.write(net_test_string)
+ f.close()
+
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestFile(net_test_file))
+
+ self.verifyMethods(ntl.testCases)
+ os.unlink(net_test_file)
+
+ def test_load_net_test_from_str(self):
+ """
+ Given a file like object verify that the net test cases are properly
+ generated.
+ """
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ self.verifyMethods(ntl.testCases)
+
+ def test_load_net_test_from_StringIO(self):
+ """
+ Given a file like object verify that the net test cases are properly
+ generated.
+ """
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ self.verifyMethods(ntl.testCases)
+
+ def test_load_with_option(self):
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ self.assertIsInstance(ntl, NetTestLoader)
+ for test_klass, test_meth in ntl.testCases:
+ for option in dummyOptions.keys():
+ self.assertIn(option, test_klass.usageOptions())
+
+ def test_load_with_invalid_option(self):
+ try:
+ ntl = NetTestLoader(dummyInvalidArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ ntl.checkOptions()
+ raise Exception
+ except UsageError:
+ pass
+
+ def test_load_with_required_option(self):
+ ntl = NetTestLoader(dummyArgsWithRequiredOptions)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_required_option))
+
+ self.assertIsInstance(ntl, NetTestLoader)
+
+ def test_load_with_missing_required_option(self):
+ try:
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_required_option))
+
+ except MissingRequiredOption:
+ pass
+
+ def test_net_test_inputs(self):
+ ntl = NetTestLoader(dummyArgsWithFile)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_file))
+
+ ntl.checkOptions()
+
+ # XXX: if you use the same test_class twice you will have consumed all
+ # of its inputs!
+ tested = set([])
+ for test_class, test_method in ntl.testCases:
+ if test_class not in tested:
+ tested.update([test_class])
+ self.assertEqual(len(list(test_class.inputs)), 10)
+
+ def test_setup_local_options_in_test_cases(self):
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ ntl.checkOptions()
+
+ for test_class, test_method in ntl.testCases:
+ self.assertEqual(test_class.localOptions, dummyOptions)
+
+ def test_generate_measurements_size(self):
+ ntl = NetTestLoader(dummyArgsWithFile)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_file))
+
+ ntl.checkOptions()
+ net_test = NetTest(ntl, None)
+
+ measurements = list(net_test.generateMeasurements())
+ self.assertEqual(len(measurements), 20)
+
+ def test_net_test_completed_callback(self):
+ ntl = NetTestLoader(dummyArgsWithFile)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_file))
+
+ ntl.checkOptions()
+ director = Director()
+
+ d = director.startNetTest('', ntl, [MockReporter()])
+
+ @d.addCallback
+ def complete(result):
+ #XXX: why is the return type (True, None) ?
+ self.assertEqual(result, [(True,None)])
+ self.assertEqual(director.successfulMeasurements, 20)
+
+ return d
+
+ def test_require_root_succeed(self):
+ #XXX: will require root to run
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_root_required))
+
+ for test_class, method in ntl.testCases:
+ self.assertTrue(test_class.requiresRoot)
+
+ #def test_require_root_failed(self):
+ # #XXX: will fail if you run as root
+ # try:
+ # net_test = NetTestLoader(StringIO(net_test_root_required),
+ # dummyArgs)
+ # except NotRootError:
+ # pass
+
+ #def test_create_report_succeed(self):
+ # pass
+
+ #def test_create_report_failed(self):
+ # pass
+
+ #def test_run_all_test(self):
+ # raise NotImplementedError
+
+ #def test_resume_test(self):
+ # pass
+
+ #def test_progress(self):
+ # pass
+
+ #def test_time_out(self):
+ # raise NotImplementedError
diff --git a/ooni/tests/test_otime.py b/ooni/tests/test_otime.py
new file mode 100644
index 0000000..80979f2
--- /dev/null
+++ b/ooni/tests/test_otime.py
@@ -0,0 +1,15 @@
+import unittest
+from datetime import datetime
+from ooni import otime
+
+test_date = datetime(2002, 6, 26, 22, 45, 49)
+
+class TestOtime(unittest.TestCase):
+ def test_timestamp(self):
+ self.assertEqual(otime.timestamp(test_date), "2002-06-26T224549Z")
+
+ def test_fromTimestamp(self):
+ time_stamp = otime.timestamp(test_date)
+ self.assertEqual(test_date, otime.fromTimestamp(time_stamp))
+
+
diff --git a/ooni/tests/test_reporter.py b/ooni/tests/test_reporter.py
new file mode 100644
index 0000000..d7ee907
--- /dev/null
+++ b/ooni/tests/test_reporter.py
@@ -0,0 +1,238 @@
+from twisted.internet import defer
+from twisted.trial import unittest
+
+from ooni.reporter import Report, YAMLReporter, OONIBReporter, safe_dump
+from ooni.managers import ReportEntryManager, TaskManager
+from ooni.nettest import NetTest, NetTestState
+from ooni.errors import ReportNotCreated, ReportAlreadyClosed
+
+from ooni.tasks import TaskWithTimeout
+from ooni.tests.mocks import MockOReporter, MockTaskManager
+from ooni.tests.mocks import MockMeasurement, MockNetTest
+from ooni.tests.mocks import MockOReporterThatFailsWrite
+from ooni.tests.mocks import MockOReporterThatFailsWriteOnce
+from ooni.tests.mocks import MockOReporterThatFailsOpen
+
+from twisted.python import failure
+import yaml
+
+class TestReport(unittest.TestCase):
+ def setUp(self):
+ pass
+ def tearDown(self):
+ pass
+ def test_create_report_with_no_reporter(self):
+ report = Report([],ReportEntryManager())
+ self.assertIsInstance(report, Report)
+
+ def test_create_report_with_single_reporter(self):
+ report = Report([MockOReporter()], ReportEntryManager())
+ self.assertIsInstance(report, Report)
+
+ def test_create_report_with_multiple_reporters(self):
+ report = Report([MockOReporter() for x in xrange(3)],
+ ReportEntryManager())
+ self.assertIsInstance(report, Report)
+
+ def test_report_open_with_single_reporter(self):
+ report = Report([MockOReporter()],ReportEntryManager())
+ d = report.open()
+ return d
+
+ def test_report_open_with_multiple_reporter(self):
+ report = Report([MockOReporter() for x in xrange(3)],
+ ReportEntryManager())
+ d = report.open()
+ return d
+
+ def test_fail_to_open_report_with_single_reporter(self):
+ report = Report([MockOReporterThatFailsOpen()],
+ ReportEntryManager())
+ d = report.open()
+ def f(x):
+ self.assertEquals(len(report.reporters), 0)
+ d.addCallback(f)
+ return d
+
+ def test_fail_to_open_single_report_with_multiple_reporter(self):
+ report = Report([MockOReporterThatFailsOpen(), MockOReporter(),
+ MockOReporter()], ReportEntryManager())
+ d = report.open()
+ def f(x):
+ self.assertEquals(len(report.reporters),2)
+ d.addCallback(f)
+ return d
+
+ def test_fail_to_open_all_reports_with_multiple_reporter(self):
+ report = Report([MockOReporterThatFailsOpen() for x in xrange(3)],
+ ReportEntryManager())
+ d = report.open()
+ def f(x):
+ self.assertEquals(len(report.reporters),0)
+ d.addCallback(f)
+ return d
+
+ def test_write_report_with_single_reporter_and_succeed(self):
+ #XXX: verify that the MockOReporter writeReportEntry succeeds
+ report = Report([MockOReporter()], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ return d
+
+ def test_write_report_with_single_reporter_and_fail_after_timeout(self):
+ report = Report([MockOReporterThatFailsWrite()], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ def f(err):
+ self.assertEquals(len(report.reporters),0)
+ d.addBoth(f)
+ return d
+
+ def test_write_report_with_single_reporter_and_succeed_after_timeout(self):
+ report = Report([MockOReporterThatFailsWriteOnce()], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ return d
+
+ def test_write_report_with_multiple_reporter_and_succeed(self):
+ report = Report([MockOReporter() for x in xrange(3)], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ return d
+
+ def test_write_report_with_multiple_reporter_and_fail_a_single_reporter(self):
+ report = Report([MockOReporter(), MockOReporter(), MockOReporterThatFailsWrite()], ReportEntryManager())
+ d = report.open()
+
+ self.assertEquals(len(report.reporters),3)
+ d = report.write(MockMeasurement(MockNetTest()))
+
+ def f(x):
+ # one of the reporters should have been removed
+ self.assertEquals(len(report.reporters), 2)
+ d.addBoth(f)
+ return d
+
+ def test_write_report_with_multiple_reporter_and_fail_all_reporter(self):
+ report = Report([MockOReporterThatFailsWrite() for x in xrange(3)], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ def f(err):
+ self.assertEquals(len(report.reporters),0)
+ d.addErrback(f)
+ return d
+
+class TestYAMLReporter(unittest.TestCase):
+ def setUp(self):
+ self.testDetails = {'software_name': 'ooniprobe', 'options':
+ {'pcapfile': None, 'help': 0, 'subargs': ['-f', 'alexa_10'], 'resume':
+ 0, 'parallelism': '10', 'no-default-reporter': 0, 'testdeck': None,
+ 'test': 'nettests/blocking/http_requests.py', 'logfile': None,
+ 'collector': None, 'reportfile': None}, 'test_version': '0.2.3',
+ 'software_version': '0.0.10', 'test_name': 'http_requests_test',
+ 'start_time': 1362054343.0, 'probe_asn': 'AS0', 'probe_ip':
+ '127.0.0.1', 'probe_cc': 'US'}
+
+ def tearDown(self):
+ pass
+ def test_create_yaml_reporter(self):
+ self.assertIsInstance(YAMLReporter(self.testDetails),
+ YAMLReporter)
+
+ def test_open_yaml_report_and_succeed(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ # verify that testDetails was written to report properly
+ def f(r):
+ r._stream.seek(0)
+ details, = yaml.safe_load_all(r._stream)
+ self.assertEqual(details, self.testDetails)
+ r.created.addCallback(f)
+ return r.created
+
+ #def test_open_yaml_report_and_fail(self):
+ # #XXX: YAMLReporter does not handle failures of this type
+ # pass
+
+ def test_write_yaml_report_entry(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+
+ report_entry = {'foo':'bar', 'bin':'baz'}
+ r.writeReportEntry(report_entry)
+
+ # verify that details and entry were written to report
+ def f(r):
+ r._stream.seek(0)
+ report = yaml.safe_load_all(r._stream)
+ details, entry = report
+ self.assertEqual(details, self.testDetails)
+ self.assertEqual(entry, report_entry)
+ r.created.addCallback(f)
+ return r.created
+
+ def test_write_multiple_yaml_report_entry(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ def reportEntry():
+ for x in xrange(10):
+ yield {'foo':'bar', 'bin':'baz', 'item':x}
+ for entry in reportEntry():
+ r.writeReportEntry(entry)
+ # verify that details and multiple entries were written to report
+ def f(r):
+ r._stream.seek(0)
+ report = yaml.safe_load_all(r._stream)
+ details = report.next()
+ self.assertEqual(details, self.testDetails)
+ self.assertEqual([r for r in report], [r for r in reportEntry()])
+ r.created.addCallback(f)
+ return r.created
+
+ def test_close_yaml_report(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ r.finish()
+ self.assertTrue(r._stream.closed)
+
+ def test_write_yaml_report_after_close(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ r.finish()
+ def f(r):
+ r.writeReportEntry("foo")
+ r.created.addCallback(f)
+ self.assertFailure(r.created, ReportAlreadyClosed)
+
+ def test_write_yaml_report_before_open(self):
+ r = YAMLReporter(self.testDetails)
+ def f(r):
+ r.writeReportEntry("foo")
+ r.created.addCallback(f)
+ self.assertFailure(r.created, ReportNotCreated)
+
+#class TestOONIBReporter(unittest.TestCase):
+# def setUp(self):
+# pass
+# def tearDown(self):
+# pass
+# def test_create_oonib_reporter(self):
+# raise NotImplementedError
+# def test_open_oonib_report_and_succeed(self):
+# raise NotImplementedError
+# def test_open_oonib_report_and_fail(self):
+# raise NotImplementedError
+# def test_write_oonib_report_entry_and_succeed(self):
+# raise NotImplementedError
+# def test_write_oonib_report_entry_and_succeed_after_timeout(self):
+# raise NotImplementedError
+# def test_write_oonib_report_entry_and_fail_after_timeout(self):
+# raise NotImplementedError
+# def test_write_oonib_report_after_close(self):
+# raise NotImplementedError
+# def test_write_oonib_report_before_open(self):
+# raise NotImplementedError
+# def test_close_oonib_report_and_succeed(self):
+# raise NotImplementedError
+# def test_close_oonib_report_and_fail(self):
+# raise NotImplementedError
diff --git a/ooni/tests/test_safe_represent.py b/ooni/tests/test_safe_represent.py
new file mode 100644
index 0000000..82a5196
--- /dev/null
+++ b/ooni/tests/test_safe_represent.py
@@ -0,0 +1,14 @@
+import yaml
+
+from twisted.trial import unittest
+
+from ooni.reporter import OSafeDumper
+
+from scapy.all import IP, UDP
+
+class TestScapyRepresent(unittest.TestCase):
+ def test_represent_scapy(self):
+ data = IP()/UDP()
+ yaml.dump_all([data], Dumper=OSafeDumper)
+
+
diff --git a/ooni/tests/test_trueheaders.py b/ooni/tests/test_trueheaders.py
new file mode 100644
index 0000000..9ac0a27
--- /dev/null
+++ b/ooni/tests/test_trueheaders.py
@@ -0,0 +1,41 @@
+from twisted.trial import unittest
+
+from ooni.utils.txagentwithsocks import TrueHeaders
+
+dummy_headers_dict = {
+ 'Header1': ['Value1', 'Value2'],
+ 'Header2': ['ValueA', 'ValueB']
+}
+
+dummy_headers_dict2 = {
+ 'Header1': ['Value1', 'Value2'],
+ 'Header2': ['ValueA', 'ValueB'],
+ 'Header3': ['ValueA', 'ValueB'],
+}
+
+dummy_headers_dict3 = {
+ 'Header1': ['Value1', 'Value2'],
+ 'Header2': ['ValueA', 'ValueB'],
+ 'Header4': ['ValueA', 'ValueB'],
+}
+
+
+class TestTrueHeaders(unittest.TestCase):
+ def test_names_match(self):
+ th = TrueHeaders(dummy_headers_dict)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
+
+ def test_names_not_match(self):
+ th = TrueHeaders(dummy_headers_dict)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
+
+ th = TrueHeaders(dummy_headers_dict3)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
+
+ def test_names_match_expect_ignore(self):
+ th = TrueHeaders(dummy_headers_dict)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
+
+
+
+
diff --git a/ooni/tests/test_utils.py b/ooni/tests/test_utils.py
new file mode 100644
index 0000000..cc648e0
--- /dev/null
+++ b/ooni/tests/test_utils.py
@@ -0,0 +1,20 @@
+import unittest
+from ooni.utils import pushFilenameStack
+
+class TestUtils(unittest.TestCase):
+ def test_pushFilenameStack(self):
+ f = open("dummyfile", "w+")
+ f.write("0\n")
+ f.close()
+ for i in xrange(1, 5):
+ f = open("dummyfile.%s" % i, "w+")
+ f.write("%s\n" % i)
+ f.close()
+
+ pushFilenameStack("dummyfile")
+ for i in xrange(1, 5):
+ f = open("dummyfile.%s" % i)
+ c = f.readlines()[0].strip()
+ self.assertEqual(str(i-1), str(c))
+ f.close()
+
diff --git a/ooni/utils/log.py b/ooni/utils/log.py
index 0740c10..141116e 100644
--- a/ooni/utils/log.py
+++ b/ooni/utils/log.py
@@ -45,14 +45,16 @@ def stop():
print "Stopping OONI"
def msg(msg, *arg, **kw):
- print "%s" % msg
+ if config.logging:
+ print "%s" % msg
def debug(msg, *arg, **kw):
- if config.advanced.debug:
+ if config.advanced.debug and config.logging:
print "[D] %s" % msg
def err(msg, *arg, **kw):
- print "[!] %s" % msg
+ if config.logging:
+ print "[!] %s" % msg
def exception(error):
"""
diff --git a/ooniprobe.conf.sample b/ooniprobe.conf.sample
index 51c60f5..8a6b825 100644
--- a/ooniprobe.conf.sample
+++ b/ooniprobe.conf.sample
@@ -30,6 +30,8 @@ advanced:
# If you do not specify start_tor, you will have to have Tor running and
# explicitly set the control port and SOCKS port
start_tor: true
+ # On which port the oonid API should be listening on
+ oonid_api_port: 50666
tor:
#socks_port: 9050
#control_port: 9051
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/tests/mocks.py b/tests/mocks.py
deleted file mode 100644
index fed683e..0000000
--- a/tests/mocks.py
+++ /dev/null
@@ -1,168 +0,0 @@
-from ooni.tasks import BaseTask, TaskWithTimeout
-from twisted.python import failure
-from ooni.nettest import NetTest
-from ooni.managers import TaskManager
-from twisted.internet import defer
-
-class MockMeasurementFailOnce(BaseTask):
- def run(self):
- f = open('dummyTaskFailOnce.txt', 'w')
- f.write('fail')
- f.close()
- if self.failure >= 1:
- return defer.succeed(self)
- else:
- return defer.fail(failure.Failure)
-
-class MockMeasurementManager(TaskManager):
- def __init__(self):
- self.successes = []
- TaskManager.__init__(self)
-
- def failed(self, failure, task):
- pass
-
- def succeeded(self, result, task):
- self.successes.append((result, task))
-
-class MockReporter(object):
- def __init__(self):
- self.created = defer.Deferred()
-
- def writeReportEntry(self, entry):
- pass
-
- def createReport(self):
- self.created.callback(self)
-
- def finish(self):
- pass
-
-class MockFailure(Exception):
- pass
-
-## from test_managers
-mockFailure = failure.Failure(MockFailure('mock'))
-
-class MockSuccessTask(BaseTask):
- def run(self):
- return defer.succeed(42)
-
-class MockFailTask(BaseTask):
- def run(self):
- return defer.fail(mockFailure)
-
-class MockFailOnceTask(BaseTask):
- def run(self):
- if self.failures >= 1:
- return defer.succeed(42)
- else:
- return defer.fail(mockFailure)
-
-class MockSuccessTaskWithTimeout(TaskWithTimeout):
- def run(self):
- return defer.succeed(42)
-
-class MockFailTaskThatTimesOut(TaskWithTimeout):
- def run(self):
- return defer.Deferred()
-
-class MockTimeoutOnceTask(TaskWithTimeout):
- def run(self):
- if self.failures >= 1:
- return defer.succeed(42)
- else:
- return defer.Deferred()
-
-class MockFailTaskWithTimeout(TaskWithTimeout):
- def run(self):
- return defer.fail(mockFailure)
-
-
-class MockNetTest(object):
- def __init__(self):
- self.successes = []
-
- def succeeded(self, measurement):
- self.successes.append(measurement)
-
-class MockMeasurement(TaskWithTimeout):
- def __init__(self, net_test):
- TaskWithTimeout.__init__(self)
- self.netTest = net_test
-
- def succeeded(self, result):
- return self.netTest.succeeded(42)
-
-class MockSuccessMeasurement(MockMeasurement):
- def run(self):
- return defer.succeed(42)
-
-class MockFailMeasurement(MockMeasurement):
- def run(self):
- return defer.fail(mockFailure)
-
-class MockFailOnceMeasurement(MockMeasurement):
- def run(self):
- if self.failures >= 1:
- return defer.succeed(42)
- else:
- return defer.fail(mockFailure)
-
-class MockDirector(object):
- def __init__(self):
- self.successes = []
-
- def measurementFailed(self, failure, measurement):
- pass
-
- def measurementSucceeded(self, measurement):
- self.successes.append(measurement)
-
-## from test_reporter.py
-class MockOReporter(object):
- def __init__(self):
- self.created = defer.Deferred()
-
- def writeReportEntry(self, entry):
- return defer.succeed(42)
-
- def finish(self):
- pass
-
- def createReport(self):
- from ooni.utils import log
- log.debug("Creating report with %s" % self)
- self.created.callback(self)
-
-class MockOReporterThatFailsWrite(MockOReporter):
- def writeReportEntry(self, entry):
- raise MockFailure
-
-class MockOReporterThatFailsOpen(MockOReporter):
- def createReport(self):
- self.created.errback(failure.Failure(MockFailure()))
-
-class MockOReporterThatFailsWriteOnce(MockOReporter):
- def __init__(self):
- self.failure = 0
- MockOReporter.__init__(self)
-
- def writeReportEntry(self, entry):
- if self.failure >= 1:
- return defer.succeed(42)
- else:
- self.failure += 1
- raise MockFailure
-
-class MockTaskManager(TaskManager):
- def __init__(self):
- self.successes = []
- TaskManager.__init__(self)
-
- def failed(self, failure, task):
- pass
-
- def succeeded(self, result, task):
- self.successes.append((result, task))
-
diff --git a/tests/test-class-design.py b/tests/test-class-design.py
deleted file mode 100644
index bb80cd3..0000000
--- a/tests/test-class-design.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-#
-# testing classes to test multiple inheritance.
-# these are not meant to be run by trial, though they could be made to be so.
-# i didn't know where to put them. --isis
-
-import abc
-from pprint import pprint
-from inspect import classify_class_attrs
-
-class PluginBase(object):
- __metaclass__ = abc.ABCMeta
-
- @abc.abstractproperty
- def name(self):
- return 'you should not see this'
-
- @name.setter
- def name(self, value):
- return 'you should not set this'
-
- @name.deleter
- def name(self):
- return 'you should not del this'
-
- @abc.abstractmethod
- def inputParser(self, line):
- """Do something to parse something."""
- return
-
-class Foo(object):
- woo = "this class has some shit in it"
- def bar(self):
- print "i'm a Foo.bar()!"
- print woo
-
-class KwargTest(Foo):
- _name = "isis"
-
- #def __new__(cls, *a, **kw):
- # return super(KwargTest, cls).__new__(cls, *a, **kw)
-
- @property
- def name(self):
- return self._name
-
- @name.setter
- def name(self, value):
- self._name = value
-
- def __init__(self, *a, **kw):
- super(KwargTest, self).__init__()
-
- ## this causes the instantion args to override the class attrs
- for key, value in kw.items():
- setattr(self.__class__, key, value)
-
- print "%s.__init__(): self.__dict__ = %s" \
- % (type(self), pprint(type(self).__dict__))
-
- for attr in classify_class_attrs(self):
- print attr
-
- @classmethod
- def sayname(cls):
- print cls.name
-
-class KwargTestChild(KwargTest):
- name = "arturo"
- def __init__(self):
- super(KwargTestChild, self).__init__()
- print self.name
-
-class KwargTestChildOther(KwargTest):
- def __init__(self, name="robot", does="lasers"):
- super(KwargTestChildOther, self).__init__()
- print self.name
-
-
-if __name__ == "__main__":
- print "class KwargTest attr name: %s" % KwargTest.name
- kwargtest = KwargTest()
- print "KwargTest instantiated wo args"
- print "kwargtest.name: %s" % kwargtest.name
- print "kwargtest.sayname(): %s" % kwargtest.sayname()
- kwargtest2 = KwargTest(name="lovecruft", does="hacking")
- print "KwargTest instantiated with name args"
- print "kwargtest.name: %s" % kwargtest2.name
- print "kwargtest.sayname(): %s" % kwargtest2.sayname()
-
- print "class KwargTestChild attr name: %s" % KwargTestChild.name
- kwargtestchild = KwargTestChild()
- print "KwargTestChild instantiated wo args"
- print "kwargtestchild.name: %s" % kwargtestchild.name
- print "kwargtestchild.sayname(): %s" % kwargtestchild.sayname()
-
- print "class KwargTestChildOther attr name: %s" % KwargTestChildOther.name
- kwargtestchildother = KwargTestChildOther()
- print "KwargTestChildOther instantiated wo args"
- print "kwargtestchildother.name: %s" % kwargtestchildother.name
- print "kwargtestchildother.sayname(): %s" % kwargtestchildother.sayname()
diff --git a/tests/test_director.py b/tests/test_director.py
deleted file mode 100644
index a9dfbe8..0000000
--- a/tests/test_director.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from twisted.internet import defer, base
-from twisted.trial import unittest
-
-from ooni.director import Director
-from ooni.nettest import NetTestLoader
-from tests.mocks import MockReporter
-base.DelayedCall.debug = True
-
-net_test_string = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham']]
-
-class DummyTestCase(NetTestCase):
- inputFile = ['file', 'f', None, 'The input File']
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-"""
-
-
-dummyArgs = ('--spam', 1, '--file', 'dummyInputFile.txt')
-
-class TestDirector(unittest.TestCase):
- timeout = 1
- def setUp(self):
- with open('dummyInputFile.txt', 'w') as f:
- for i in range(10):
- f.write("%s\n" % i)
-
- self.reporters = [MockReporter()]
- self.director = Director()
-
- def tearDown(self):
- pass
-
- def test_start_net_test(self):
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- ntl.checkOptions()
- d = self.director.startNetTest('', ntl, self.reporters)
-
- @d.addCallback
- def done(result):
- self.assertEqual(self.director.successfulMeasurements, 20)
-
- return d
-
- def test_stop_net_test(self):
- pass
-
diff --git a/tests/test_dns.py b/tests/test_dns.py
deleted file mode 100644
index e9bb524..0000000
--- a/tests/test_dns.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-# This unittest is to verify that our usage of the twisted DNS resolver does
-# not break with new versions of twisted.
-
-import pdb
-from twisted.trial import unittest
-
-from twisted.internet import reactor
-
-from twisted.names import dns
-from twisted.names.client import Resolver
-
-class DNSTest(unittest.TestCase):
- def test_a_lookup_ooni_query(self):
- def done_query(message, *arg):
- answer = message.answers[0]
- self.assertEqual(answer.type, 1)
-
- dns_query = [dns.Query('ooni.nu', type=dns.A)]
- resolver = Resolver(servers=[('8.8.8.8', 53)])
- d = resolver.queryUDP(dns_query)
- d.addCallback(done_query)
- return d
-
diff --git a/tests/test_inputunit.py b/tests/test_inputunit.py
deleted file mode 100644
index 1f9043c..0000000
--- a/tests/test_inputunit.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import unittest
-from ooni.inputunit import InputUnit, InputUnitFactory
-
-def dummyGenerator():
- for x in range(100):
- yield x
-
-class TestInputUnit(unittest.TestCase):
- def test_input_unit_factory(self):
- inputUnit = InputUnitFactory(range(100))
- for i in inputUnit:
- self.assertEqual(len(list(i)), inputUnit.inputUnitSize)
-
- def test_input_unit(self):
- inputs = range(100)
- inputUnit = InputUnit(inputs)
- idx = 0
- for i in inputUnit:
- idx += 1
-
- self.assertEqual(idx, 100)
-
- def test_input_unit_factory_length(self):
- inputUnitFactory = InputUnitFactory(range(100))
- l1 = len(inputUnitFactory)
- l2 = sum(1 for _ in inputUnitFactory)
- self.assertEqual(l1, 10)
- self.assertEqual(l2, 10)
-
diff --git a/tests/test_managers.py b/tests/test_managers.py
deleted file mode 100644
index 39f0881..0000000
--- a/tests/test_managers.py
+++ /dev/null
@@ -1,215 +0,0 @@
-from twisted.trial import unittest
-from twisted.python import failure
-from twisted.internet import defer, task
-
-from ooni.tasks import BaseTask, TaskWithTimeout, TaskTimedOut
-from ooni.managers import TaskManager, MeasurementManager
-
-from tests.mocks import MockSuccessTask, MockFailTask, MockFailOnceTask, MockFailure
-from tests.mocks import MockSuccessTaskWithTimeout, MockFailTaskThatTimesOut
-from tests.mocks import MockTimeoutOnceTask, MockFailTaskWithTimeout
-from tests.mocks import MockTaskManager, mockFailure, MockDirector
-from tests.mocks import MockNetTest, MockMeasurement, MockSuccessMeasurement
-from tests.mocks import MockFailMeasurement, MockFailOnceMeasurement
-
-class TestTaskManager(unittest.TestCase):
- timeout = 1
- def setUp(self):
- self.measurementManager = MockTaskManager()
- self.measurementManager.concurrency = 20
- self.measurementManager.retries = 2
-
- self.measurementManager.start()
-
- self.clock = task.Clock()
-
- def schedule_successful_tasks(self, task_type, number=1):
- all_done = []
- for x in range(number):
- mock_task = task_type()
- all_done.append(mock_task.done)
- self.measurementManager.schedule(mock_task)
-
- d = defer.DeferredList(all_done)
- @d.addCallback
- def done(res):
- for task_result, task_instance in self.measurementManager.successes:
- self.assertEqual(task_result, 42)
- self.assertIsInstance(task_instance, task_type)
-
- return d
-
- def schedule_failing_tasks(self, task_type, number=1):
- all_done = []
- for x in range(number):
- mock_task = task_type()
- all_done.append(mock_task.done)
- self.measurementManager.schedule(mock_task)
-
- d = defer.DeferredList(all_done)
- @d.addCallback
- def done(res):
- # 10*2 because 2 is the number of retries
- self.assertEqual(len(self.measurementManager.failures), number*3)
- for task_result, task_instance in self.measurementManager.failures:
- self.assertEqual(task_result, mockFailure)
- self.assertIsInstance(task_instance, task_type)
-
- return d
-
- def test_schedule_failing_with_mock_failure_task(self):
- mock_task = MockFailTask()
- self.measurementManager.schedule(mock_task)
- self.assertFailure(mock_task.done, MockFailure)
- return mock_task.done
-
- def test_schedule_successful_one_task(self):
- return self.schedule_successful_tasks(MockSuccessTask)
-
- def test_schedule_successful_one_task_with_timeout(self):
- return self.schedule_successful_tasks(MockSuccessTaskWithTimeout)
-
- def test_schedule_failing_tasks_that_timesout(self):
- self.measurementManager.retries = 0
-
- task_type = MockFailTaskThatTimesOut
- task_timeout = 5
-
- mock_task = task_type()
- mock_task.timeout = task_timeout
- mock_task.clock = self.clock
-
- self.measurementManager.schedule(mock_task)
-
- self.clock.advance(task_timeout)
-
- @mock_task.done.addBoth
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), 1)
- for task_result, task_instance in self.measurementManager.failures:
- self.assertIsInstance(task_instance, task_type)
-
- return mock_task.done
-
- def test_schedule_time_out_once(self):
- task_type = MockTimeoutOnceTask
- task_timeout = 5
-
- mock_task = task_type()
- mock_task.timeout = task_timeout
- mock_task.clock = self.clock
-
- self.measurementManager.schedule(mock_task)
-
- self.clock.advance(task_timeout)
-
- @mock_task.done.addBoth
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), 1)
- for task_result, task_instance in self.measurementManager.failures:
- self.assertIsInstance(task_instance, task_type)
-
- for task_result, task_instance in self.measurementManager.successes:
- self.assertEqual(task_result, 42)
- self.assertIsInstance(task_instance, task_type)
-
- return mock_task.done
-
-
- def test_schedule_failing_one_task(self):
- return self.schedule_failing_tasks(MockFailTask)
-
- def test_schedule_failing_one_task_with_timeout(self):
- return self.schedule_failing_tasks(MockFailTaskWithTimeout)
-
- def test_schedule_successful_ten_tasks(self):
- return self.schedule_successful_tasks(MockSuccessTask, number=10)
-
- def test_schedule_failing_ten_tasks(self):
- return self.schedule_failing_tasks(MockFailTask, number=10)
-
- def test_schedule_successful_27_tasks(self):
- return self.schedule_successful_tasks(MockSuccessTask, number=27)
-
- def test_schedule_failing_27_tasks(self):
- return self.schedule_failing_tasks(MockFailTask, number=27)
-
- def test_task_retry_and_succeed(self):
- mock_task = MockFailOnceTask()
- self.measurementManager.schedule(mock_task)
-
- @mock_task.done.addCallback
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), 1)
-
- self.assertEqual(self.measurementManager.failures,
- [(mockFailure, mock_task)])
- self.assertEqual(self.measurementManager.successes,
- [(42, mock_task)])
-
- return mock_task.done
-
- def dd_test_task_retry_and_succeed_56_tasks(self):
- """
- XXX this test fails in a non-deterministic manner.
- """
- all_done = []
- number = 56
- for x in range(number):
- mock_task = MockFailOnceTask()
- all_done.append(mock_task.done)
- self.measurementManager.schedule(mock_task)
-
- d = defer.DeferredList(all_done)
-
- @d.addCallback
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), number)
-
- for task_result, task_instance in self.measurementManager.successes:
- self.assertEqual(task_result, 42)
- self.assertIsInstance(task_instance, MockFailOnceTask)
-
- return d
-
-class TestMeasurementManager(unittest.TestCase):
- def setUp(self):
- mock_director = MockDirector()
-
- self.measurementManager = MeasurementManager()
- self.measurementManager.director = mock_director
-
- self.measurementManager.concurrency = 10
- self.measurementManager.retries = 2
-
- self.measurementManager.start()
-
- self.mockNetTest = MockNetTest()
-
- def test_schedule_and_net_test_notified(self, number=1):
- # XXX we should probably be inheriting from the base test class
- mock_task = MockSuccessMeasurement(self.mockNetTest)
- self.measurementManager.schedule(mock_task)
-
- @mock_task.done.addCallback
- def done(res):
- self.assertEqual(self.mockNetTest.successes,
- [42])
-
- self.assertEqual(len(self.mockNetTest.successes), 1)
- return mock_task.done
-
- def test_schedule_failing_one_measurement(self):
- mock_task = MockFailMeasurement(self.mockNetTest)
- self.measurementManager.schedule(mock_task)
-
- @mock_task.done.addErrback
- def done(failure):
- self.assertEqual(len(self.measurementManager.failures), 3)
-
- self.assertEqual(failure, mockFailure)
- self.assertEqual(len(self.mockNetTest.successes), 0)
-
- return mock_task.done
-
-
diff --git a/tests/test_mutate.py b/tests/test_mutate.py
deleted file mode 100644
index 7e30586..0000000
--- a/tests/test_mutate.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import unittest
-from ooni.kit import daphn3
-
-class TestDaphn3(unittest.TestCase):
- def test_mutate_string(self):
- original_string = '\x00\x00\x00'
- mutated = daphn3.daphn3MutateString(original_string, 1)
- self.assertEqual(mutated, '\x00\x01\x00')
- def test_mutate_daphn3(self):
- original_dict = [{'client': '\x00\x00\x00'},
- {'server': '\x00\x00\x00'}]
- mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
- self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
- {'server': '\x00\x01\x00'}])
-
diff --git a/tests/test_nettest.py b/tests/test_nettest.py
deleted file mode 100644
index 78240d5..0000000
--- a/tests/test_nettest.py
+++ /dev/null
@@ -1,268 +0,0 @@
-import os
-from StringIO import StringIO
-from tempfile import TemporaryFile, mkstemp
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-from twisted.python.usage import UsageError
-
-from ooni.nettest import NetTest, InvalidOption, MissingRequiredOption
-from ooni.nettest import NetTestLoader, FailureToLoadNetTest
-from ooni.tasks import BaseTask
-from ooni.utils import NotRootError
-
-from ooni.director import Director
-
-from ooni.managers import TaskManager
-
-from tests.mocks import MockMeasurement, MockMeasurementFailOnce
-from tests.mocks import MockNetTest, MockDirector, MockReporter
-from tests.mocks import MockMeasurementManager
-defer.setDebugging(True)
-
-net_test_string = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham']]
-
-class DummyTestCase(NetTestCase):
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-"""
-
-net_test_root_required = net_test_string+"""
- requiresRoot = True
-"""
-
-net_test_string_with_file = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham']]
-
-class DummyTestCase(NetTestCase):
- inputFile = ['file', 'f', None, 'The input File']
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-"""
-
-net_test_string_with_required_option = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham'],
- ['foo', 'o', None, 'moo'],
- ['bar', 'o', None, 'baz'],
- ]
-
-class DummyTestCase(NetTestCase):
- inputFile = ['file', 'f', None, 'The input File']
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-
- requiredOptions = ['foo', 'bar']
-"""
-
-dummyInputs = range(1)
-dummyArgs = ('--spam', 'notham')
-dummyOptions = {'spam':'notham'}
-dummyInvalidArgs = ('--cram', 'jam')
-dummyInvalidOptions= {'cram':'jam'}
-dummyArgsWithRequiredOptions = ('--foo', 'moo', '--bar', 'baz')
-dummyRequiredOptions = {'foo':'moo', 'bar':'baz'}
-dummyArgsWithFile = ('--spam', 'notham', '--file', 'dummyInputFile.txt')
-
-class TestNetTest(unittest.TestCase):
- timeout = 1
- def setUp(self):
- with open('dummyInputFile.txt', 'w') as f:
- for i in range(10):
- f.write("%s\n" % i)
-
- def assertCallable(self, thing):
- self.assertIn('__call__', dir(thing))
-
- def verifyMethods(self, testCases):
- uniq_test_methods = set()
- for test_class, test_methods in testCases:
- instance = test_class()
- for test_method in test_methods:
- c = getattr(instance, test_method)
- self.assertCallable(c)
- uniq_test_methods.add(test_method)
- self.assertEqual(set(['test_a', 'test_b']), uniq_test_methods)
-
- def test_load_net_test_from_file(self):
- """
- Given a file verify that the net test cases are properly
- generated.
- """
- __, net_test_file = mkstemp()
- with open(net_test_file, 'w') as f:
- f.write(net_test_string)
- f.close()
-
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestFile(net_test_file)
-
- self.verifyMethods(ntl.testCases)
- os.unlink(net_test_file)
-
- def test_load_net_test_from_str(self):
- """
- Given a file like object verify that the net test cases are properly
- generated.
- """
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- self.verifyMethods(ntl.testCases)
-
- def test_load_net_test_from_StringIO(self):
- """
- Given a file like object verify that the net test cases are properly
- generated.
- """
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- self.verifyMethods(ntl.testCases)
-
- def test_load_with_option(self):
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- self.assertIsInstance(ntl, NetTestLoader)
- for test_klass, test_meth in ntl.testCases:
- for option in dummyOptions.keys():
- self.assertIn(option, test_klass.usageOptions())
-
- def test_load_with_invalid_option(self):
- try:
- ntl = NetTestLoader(dummyInvalidArgs)
- ntl.loadNetTestString(net_test_string)
-
- ntl.checkOptions()
- raise Exception
- except UsageError:
- pass
-
- def test_load_with_required_option(self):
- ntl = NetTestLoader(dummyArgsWithRequiredOptions)
- ntl.loadNetTestString(net_test_string_with_required_option)
-
- self.assertIsInstance(ntl, NetTestLoader)
-
- def test_load_with_missing_required_option(self):
- try:
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string_with_required_option)
-
- except MissingRequiredOption:
- pass
-
- def test_net_test_inputs(self):
- ntl = NetTestLoader(dummyArgsWithFile)
- ntl.loadNetTestString(net_test_string_with_file)
-
- ntl.checkOptions()
-
- # XXX: if you use the same test_class twice you will have consumed all
- # of its inputs!
- tested = set([])
- for test_class, test_method in ntl.testCases:
- if test_class not in tested:
- tested.update([test_class])
- self.assertEqual(len(list(test_class.inputs)), 10)
-
- def test_setup_local_options_in_test_cases(self):
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- ntl.checkOptions()
-
- for test_class, test_method in ntl.testCases:
- self.assertEqual(test_class.localOptions, dummyOptions)
-
- def test_generate_measurements_size(self):
- ntl = NetTestLoader(dummyArgsWithFile)
- ntl.loadNetTestString(net_test_string_with_file)
-
- ntl.checkOptions()
- net_test = NetTest(ntl, None)
-
- measurements = list(net_test.generateMeasurements())
- self.assertEqual(len(measurements), 20)
-
- def test_net_test_completed_callback(self):
- ntl = NetTestLoader(dummyArgsWithFile)
- ntl.loadNetTestString(net_test_string_with_file)
-
- ntl.checkOptions()
- director = Director()
-
- d = director.startNetTest('', ntl, [MockReporter()])
-
- @d.addCallback
- def complete(result):
- #XXX: why is the return type (True, None) ?
- self.assertEqual(result, [(True,None)])
- self.assertEqual(director.successfulMeasurements, 20)
-
- return d
-
- def test_require_root_succeed(self):
- #XXX: will require root to run
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_root_required)
-
- for test_class, method in ntl.testCases:
- self.assertTrue(test_class.requiresRoot)
-
- #def test_require_root_failed(self):
- # #XXX: will fail if you run as root
- # try:
- # net_test = NetTestLoader(StringIO(net_test_root_required),
- # dummyArgs)
- # except NotRootError:
- # pass
-
- #def test_create_report_succeed(self):
- # pass
-
- #def test_create_report_failed(self):
- # pass
-
- #def test_run_all_test(self):
- # raise NotImplementedError
-
- #def test_resume_test(self):
- # pass
-
- #def test_progress(self):
- # pass
-
- #def test_time_out(self):
- # raise NotImplementedError
diff --git a/tests/test_otime.py b/tests/test_otime.py
deleted file mode 100644
index 80979f2..0000000
--- a/tests/test_otime.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import unittest
-from datetime import datetime
-from ooni import otime
-
-test_date = datetime(2002, 6, 26, 22, 45, 49)
-
-class TestOtime(unittest.TestCase):
- def test_timestamp(self):
- self.assertEqual(otime.timestamp(test_date), "2002-06-26T224549Z")
-
- def test_fromTimestamp(self):
- time_stamp = otime.timestamp(test_date)
- self.assertEqual(test_date, otime.fromTimestamp(time_stamp))
-
-
diff --git a/tests/test_reporter.py b/tests/test_reporter.py
deleted file mode 100644
index e21b7a1..0000000
--- a/tests/test_reporter.py
+++ /dev/null
@@ -1,238 +0,0 @@
-from twisted.internet import defer
-from twisted.trial import unittest
-
-from ooni.reporter import Report, YAMLReporter, OONIBReporter, safe_dump
-from ooni.managers import ReportEntryManager, TaskManager
-from ooni.nettest import NetTest, NetTestState
-from ooni.errors import ReportNotCreated, ReportAlreadyClosed
-
-from ooni.tasks import TaskWithTimeout
-from tests.mocks import MockOReporter, MockTaskManager
-from tests.mocks import MockMeasurement, MockNetTest
-from tests.mocks import MockOReporterThatFailsWrite
-from tests.mocks import MockOReporterThatFailsWriteOnce
-from tests.mocks import MockOReporterThatFailsOpen
-
-from twisted.python import failure
-import yaml
-
-class TestReport(unittest.TestCase):
- def setUp(self):
- pass
- def tearDown(self):
- pass
- def test_create_report_with_no_reporter(self):
- report = Report([],ReportEntryManager())
- self.assertIsInstance(report, Report)
-
- def test_create_report_with_single_reporter(self):
- report = Report([MockOReporter()], ReportEntryManager())
- self.assertIsInstance(report, Report)
-
- def test_create_report_with_multiple_reporters(self):
- report = Report([MockOReporter() for x in xrange(3)],
- ReportEntryManager())
- self.assertIsInstance(report, Report)
-
- def test_report_open_with_single_reporter(self):
- report = Report([MockOReporter()],ReportEntryManager())
- d = report.open()
- return d
-
- def test_report_open_with_multiple_reporter(self):
- report = Report([MockOReporter() for x in xrange(3)],
- ReportEntryManager())
- d = report.open()
- return d
-
- def test_fail_to_open_report_with_single_reporter(self):
- report = Report([MockOReporterThatFailsOpen()],
- ReportEntryManager())
- d = report.open()
- def f(x):
- self.assertEquals(len(report.reporters), 0)
- d.addCallback(f)
- return d
-
- def test_fail_to_open_single_report_with_multiple_reporter(self):
- report = Report([MockOReporterThatFailsOpen(), MockOReporter(),
- MockOReporter()], ReportEntryManager())
- d = report.open()
- def f(x):
- self.assertEquals(len(report.reporters),2)
- d.addCallback(f)
- return d
-
- def test_fail_to_open_all_reports_with_multiple_reporter(self):
- report = Report([MockOReporterThatFailsOpen() for x in xrange(3)],
- ReportEntryManager())
- d = report.open()
- def f(x):
- self.assertEquals(len(report.reporters),0)
- d.addCallback(f)
- return d
-
- def test_write_report_with_single_reporter_and_succeed(self):
- #XXX: verify that the MockOReporter writeReportEntry succeeds
- report = Report([MockOReporter()], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- return d
-
- def test_write_report_with_single_reporter_and_fail_after_timeout(self):
- report = Report([MockOReporterThatFailsWrite()], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- def f(err):
- self.assertEquals(len(report.reporters),0)
- d.addBoth(f)
- return d
-
- def test_write_report_with_single_reporter_and_succeed_after_timeout(self):
- report = Report([MockOReporterThatFailsWriteOnce()], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- return d
-
- def test_write_report_with_multiple_reporter_and_succeed(self):
- report = Report([MockOReporter() for x in xrange(3)], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- return d
-
- def test_write_report_with_multiple_reporter_and_fail_a_single_reporter(self):
- report = Report([MockOReporter(), MockOReporter(), MockOReporterThatFailsWrite()], ReportEntryManager())
- d = report.open()
-
- self.assertEquals(len(report.reporters),3)
- d = report.write(MockMeasurement(MockNetTest()))
-
- def f(x):
- # one of the reporters should have been removed
- self.assertEquals(len(report.reporters), 2)
- d.addBoth(f)
- return d
-
- def test_write_report_with_multiple_reporter_and_fail_all_reporter(self):
- report = Report([MockOReporterThatFailsWrite() for x in xrange(3)], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- def f(err):
- self.assertEquals(len(report.reporters),0)
- d.addErrback(f)
- return d
-
-class TestYAMLReporter(unittest.TestCase):
- def setUp(self):
- self.testDetails = {'software_name': 'ooniprobe', 'options':
- {'pcapfile': None, 'help': 0, 'subargs': ['-f', 'alexa_10'], 'resume':
- 0, 'parallelism': '10', 'no-default-reporter': 0, 'testdeck': None,
- 'test': 'nettests/blocking/http_requests.py', 'logfile': None,
- 'collector': None, 'reportfile': None}, 'test_version': '0.2.3',
- 'software_version': '0.0.10', 'test_name': 'http_requests_test',
- 'start_time': 1362054343.0, 'probe_asn': 'AS0', 'probe_ip':
- '127.0.0.1', 'probe_cc': 'US'}
-
- def tearDown(self):
- pass
- def test_create_yaml_reporter(self):
- self.assertIsInstance(YAMLReporter(self.testDetails),
- YAMLReporter)
-
- def test_open_yaml_report_and_succeed(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- # verify that testDetails was written to report properly
- def f(r):
- r._stream.seek(0)
- details, = yaml.safe_load_all(r._stream)
- self.assertEqual(details, self.testDetails)
- r.created.addCallback(f)
- return r.created
-
- #def test_open_yaml_report_and_fail(self):
- # #XXX: YAMLReporter does not handle failures of this type
- # pass
-
- def test_write_yaml_report_entry(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
-
- report_entry = {'foo':'bar', 'bin':'baz'}
- r.writeReportEntry(report_entry)
-
- # verify that details and entry were written to report
- def f(r):
- r._stream.seek(0)
- report = yaml.safe_load_all(r._stream)
- details, entry = report
- self.assertEqual(details, self.testDetails)
- self.assertEqual(entry, report_entry)
- r.created.addCallback(f)
- return r.created
-
- def test_write_multiple_yaml_report_entry(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- def reportEntry():
- for x in xrange(10):
- yield {'foo':'bar', 'bin':'baz', 'item':x}
- for entry in reportEntry():
- r.writeReportEntry(entry)
- # verify that details and multiple entries were written to report
- def f(r):
- r._stream.seek(0)
- report = yaml.safe_load_all(r._stream)
- details = report.next()
- self.assertEqual(details, self.testDetails)
- self.assertEqual([r for r in report], [r for r in reportEntry()])
- r.created.addCallback(f)
- return r.created
-
- def test_close_yaml_report(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- r.finish()
- self.assertTrue(r._stream.closed)
-
- def test_write_yaml_report_after_close(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- r.finish()
- def f(r):
- r.writeReportEntry("foo")
- r.created.addCallback(f)
- self.assertFailure(r.created, ReportAlreadyClosed)
-
- def test_write_yaml_report_before_open(self):
- r = YAMLReporter(self.testDetails)
- def f(r):
- r.writeReportEntry("foo")
- r.created.addCallback(f)
- self.assertFailure(r.created, ReportNotCreated)
-
-#class TestOONIBReporter(unittest.TestCase):
-# def setUp(self):
-# pass
-# def tearDown(self):
-# pass
-# def test_create_oonib_reporter(self):
-# raise NotImplementedError
-# def test_open_oonib_report_and_succeed(self):
-# raise NotImplementedError
-# def test_open_oonib_report_and_fail(self):
-# raise NotImplementedError
-# def test_write_oonib_report_entry_and_succeed(self):
-# raise NotImplementedError
-# def test_write_oonib_report_entry_and_succeed_after_timeout(self):
-# raise NotImplementedError
-# def test_write_oonib_report_entry_and_fail_after_timeout(self):
-# raise NotImplementedError
-# def test_write_oonib_report_after_close(self):
-# raise NotImplementedError
-# def test_write_oonib_report_before_open(self):
-# raise NotImplementedError
-# def test_close_oonib_report_and_succeed(self):
-# raise NotImplementedError
-# def test_close_oonib_report_and_fail(self):
-# raise NotImplementedError
diff --git a/tests/test_safe_represent.py b/tests/test_safe_represent.py
deleted file mode 100644
index 82a5196..0000000
--- a/tests/test_safe_represent.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import yaml
-
-from twisted.trial import unittest
-
-from ooni.reporter import OSafeDumper
-
-from scapy.all import IP, UDP
-
-class TestScapyRepresent(unittest.TestCase):
- def test_represent_scapy(self):
- data = IP()/UDP()
- yaml.dump_all([data], Dumper=OSafeDumper)
-
-
diff --git a/tests/test_trueheaders.py b/tests/test_trueheaders.py
deleted file mode 100644
index 9ac0a27..0000000
--- a/tests/test_trueheaders.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from twisted.trial import unittest
-
-from ooni.utils.txagentwithsocks import TrueHeaders
-
-dummy_headers_dict = {
- 'Header1': ['Value1', 'Value2'],
- 'Header2': ['ValueA', 'ValueB']
-}
-
-dummy_headers_dict2 = {
- 'Header1': ['Value1', 'Value2'],
- 'Header2': ['ValueA', 'ValueB'],
- 'Header3': ['ValueA', 'ValueB'],
-}
-
-dummy_headers_dict3 = {
- 'Header1': ['Value1', 'Value2'],
- 'Header2': ['ValueA', 'ValueB'],
- 'Header4': ['ValueA', 'ValueB'],
-}
-
-
-class TestTrueHeaders(unittest.TestCase):
- def test_names_match(self):
- th = TrueHeaders(dummy_headers_dict)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
-
- def test_names_not_match(self):
- th = TrueHeaders(dummy_headers_dict)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
-
- th = TrueHeaders(dummy_headers_dict3)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
-
- def test_names_match_expect_ignore(self):
- th = TrueHeaders(dummy_headers_dict)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
-
-
-
-
diff --git a/tests/test_utils.py b/tests/test_utils.py
deleted file mode 100644
index cc648e0..0000000
--- a/tests/test_utils.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import unittest
-from ooni.utils import pushFilenameStack
-
-class TestUtils(unittest.TestCase):
- def test_pushFilenameStack(self):
- f = open("dummyfile", "w+")
- f.write("0\n")
- f.close()
- for i in xrange(1, 5):
- f = open("dummyfile.%s" % i, "w+")
- f.write("%s\n" % i)
- f.close()
-
- pushFilenameStack("dummyfile")
- for i in xrange(1, 5):
- f = open("dummyfile.%s" % i)
- c = f.readlines()[0].strip()
- self.assertEqual(str(i-1), str(c))
- f.close()
-
1
0

[ooni-probe/develop] Merge branch 'feature/task_manager' into feature/daemon
by isis@torproject.org 26 Jun '13
by isis@torproject.org 26 Jun '13
26 Jun '13
commit 2cb42717a5aa18a47f86fd8db364e124435cdea8
Merge: 033764e 2c80ceb
Author: Arturo Filastò <art(a)fuffa.org>
Date: Mon Apr 22 18:37:30 2013 +0200
Merge branch 'feature/task_manager' into feature/daemon
* feature/task_manager: (70 commits)
Disabled test_pushFilenameStack, because it's failing in travis
Use absolute filepath instead of relative
Only run trial unittests
Fix typo in ooniprobe sample config file
Add libpcap-dev to dependencies
Install pyrex and python deps via pip
Also run the unittests
Fix travis file
Further debugging and code robustness
Set the tor_state GeoIP class attribute on startup
Bump version to 0.0.11
Clean up tests for updated ooni codebase
Final cleanup and bugfixing for task_manager
Pass the IP address of the probe instead of an object
Make reporting more robust
Make the starting of ooniprobe via cmd line more robust
Parametrize task timeout and retry count
Add option to disable the default collector
Raise NoMoreReporters exception on failure
Fix typo in test deck before_i_commit
...
Conflicts:
ooni/config.py
ooni/director.py
ooni/errors.py
ooni/managers.py
ooni/nettest.py
ooni/oonicli.py
ooni/reporter.py
ooni/tests/disabled_test_utils.py
ooni/tests/test-class-design.py
ooni/tests/test_director.py
ooni/tests/test_nettest.py
ooni/tests/test_reporter.py
ooni/tests/test_utils.py
ooniprobe.conf.sample
tests/test_utils.py
.travis.yml | 17 ++
HACKING | 2 +-
README.md | 4 +
docs/source/tests/dnsspoof.rst | 2 +-
nettests/examples/example_dns_http.py | 11 +
nettests/experimental/dns_injection.py | 63 ++++++
nettests/experimental/http_filtering_bypassing.py | 84 +++++++
nettests/experimental/http_trix.py | 47 ++++
.../manipulation/http_header_field_manipulation.py | 4 +-
nettests/manipulation/http_host.py | 64 ++++--
nettests/manipulation/http_invalid_request_line.py | 5 +-
ooni/__init__.py | 4 +-
ooni/director.py | 44 ++--
ooni/errors.py | 33 ++-
ooni/geoip.py | 190 ++++++++++++++++
ooni/managers.py | 18 +-
ooni/nettest.py | 86 +++++--
ooni/oonicli.py | 47 ++--
ooni/reporter.py | 180 +++++++--------
ooni/tasks.py | 5 +
ooni/templates/dnst.py | 10 +-
ooni/templates/httpt.py | 8 +-
ooni/templates/scapyt.py | 5 +-
ooni/templates/tcpt.py | 2 +
ooni/tests/disabled_test_utils.py | 21 ++
ooni/tests/mocks.py | 2 +-
ooni/tests/test-class-design.py | 101 ---------
ooni/tests/test_director.py | 58 -----
ooni/tests/test_nettest.py | 57 ++---
ooni/tests/test_reporter.py | 234 --------------------
ooni/tests/test_utils.py | 20 --
ooni/utils/__init__.py | 7 +-
ooni/utils/geodata.py | 1 +
ooni/utils/txscapy.py | 8 +
ooniprobe.conf.sample | 18 +-
requirements.txt | 5 +-
setup.py | 55 +++--
37 files changed, 845 insertions(+), 677 deletions(-)
diff --cc ooni/director.py
index a9daf84,7ad99a9..5f23668
--- a/ooni/director.py
+++ b/ooni/director.py
@@@ -82,29 -83,11 +85,34 @@@ class Director(object)
self.torControlProtocol = None
+ def getNetTests(self):
+ nettests = {}
+ def is_nettest(filename):
+ return not filename == '__init__.py' \
+ and filename.endswith('.py')
+
+ for category in self.categories:
+ dirname = os.path.join(config.nettest_directory, category)
+ # print path to all filenames.
+ for filename in os.listdir(dirname):
+ if is_nettest(filename):
+ net_test_file = os.path.join(dirname, filename)
+ nettest = getNetTestInformation(net_test_file)
+
+ if nettest['id'] in nettests:
+ log.err("Found a two tests with the same name %s, %s" %
+ (nettest_path, nettests[nettest['id']]['path']))
+ else:
+ category = dirname.replace(config.nettest_directory, '')
+ nettests[nettest['id']] = nettest
+
+ return nettests
+
+ # This deferred is fired once all the measurements and their reporting
+ # tasks are completed.
+ self.allTestsDone = defer.Deferred()
+
+ @defer.inlineCallbacks
def start(self):
if config.privacy.includepcap:
log.msg("Starting")
diff --cc ooni/oonicli.py
index a99386d,c64710e..b5e8e27
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@@ -11,10 -11,11 +11,10 @@@ from twisted.python import usag
from twisted.python.util import spewer
from ooni import errors
- from ooni import config
+ from ooni import config
from ooni.director import Director
from ooni.reporter import YAMLReporter, OONIBReporter
-
from ooni.nettest import NetTestLoader, MissingRequiredOption
from ooni.utils import log
1
0
commit f79570c7d08bdf0cef8690d4e87041908e0bcbac
Author: Arturo Filastò <art(a)fuffa.org>
Date: Sat Mar 9 02:06:26 2013 +0100
Attach errback to dl
---
ooni/nettest.py | 6 +++---
ooni/reporter.py | 6 +++++-
ooni/tasks.py | 1 -
ooni/tests/test_reporter.py | 10 +++-------
4 files changed, 11 insertions(+), 12 deletions(-)
diff --git a/ooni/nettest.py b/ooni/nettest.py
index dc72ce8..4e73164 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -10,7 +10,7 @@ from ooni.utils import log, checkForRoot, NotRootError, geodata
from ooni import config
from ooni import otime
-from ooni import errors as e
+from ooni import errors
from inspect import getmembers
from StringIO import StringIO
@@ -187,7 +187,7 @@ class NetTestLoader(object):
log.msg("We will include some geo data in the report")
try:
client_geodata = geodata.IPToLocation(config.probe_ip)
- except e.GeoIPDataFilesNotFound:
+ except errors.GeoIPDataFilesNotFound:
log.err("Unable to find the geoip data files")
client_geodata = {'city': None, 'countrycode': None, 'asn': None}
@@ -402,7 +402,7 @@ class NetTest(object):
self.state.taskDone()
if len(self.report.reporters) == 0:
- raise e.AllReportersFailed
+ raise errors.AllReportersFailed
return report_results
diff --git a/ooni/reporter.py b/ooni/reporter.py
index a7bd933..596a001 100644
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@ -398,6 +398,7 @@ class Report(object):
log.debug("Reporters created: %s" % l)
# Should we consume errors silently?
dl = defer.DeferredList(l)
+ dl.addErrback(self.checkRemainingReporters)
return dl
def failedOpeningReport(self, failure, reporter):
@@ -410,7 +411,9 @@ class Report(object):
log.err("Failed to open %s reporter, giving up..." % reporter)
log.err("Reporter %s failed, removing from report..." % reporter)
self.reporters.remove(reporter)
- # Don't forward the exception unless there are no more reporters
+ return
+
+ def checkRemainingReporters(self, failure):
if len(self.reporters) == 0:
log.err("Removed last reporter %s" % reporter)
raise NoMoreReporters
@@ -463,6 +466,7 @@ class Report(object):
# to the deferredlist that checks to see if any reporters are left
# and raise an exception if there are no remaining reporters
dl = defer.DeferredList(l,fireOnOneErrback=True, consumeErrors=True)
+ dl.addErrback(self.checkRemainingReporters)
return dl
def close(self, _):
diff --git a/ooni/tasks.py b/ooni/tasks.py
index c17a5c1..41cedc1 100644
--- a/ooni/tasks.py
+++ b/ooni/tasks.py
@@ -135,4 +135,3 @@ class ReportEntry(TaskWithTimeout):
def run(self):
return self.reporter.writeReportEntry(self.measurement)
-
diff --git a/ooni/tests/test_reporter.py b/ooni/tests/test_reporter.py
index d7ee907..ab2e8bf 100644
--- a/ooni/tests/test_reporter.py
+++ b/ooni/tests/test_reporter.py
@@ -4,7 +4,7 @@ from twisted.trial import unittest
from ooni.reporter import Report, YAMLReporter, OONIBReporter, safe_dump
from ooni.managers import ReportEntryManager, TaskManager
from ooni.nettest import NetTest, NetTestState
-from ooni.errors import ReportNotCreated, ReportAlreadyClosed
+from ooni.errors import ReportNotCreated, ReportAlreadyClosed, NoMoreReporters
from ooni.tasks import TaskWithTimeout
from ooni.tests.mocks import MockOReporter, MockTaskManager
@@ -119,7 +119,7 @@ class TestReport(unittest.TestCase):
d = report.write(MockMeasurement(MockNetTest()))
def f(err):
self.assertEquals(len(report.reporters),0)
- d.addErrback(f)
+ d.addCallback(f)
return d
class TestYAMLReporter(unittest.TestCase):
@@ -138,7 +138,7 @@ class TestYAMLReporter(unittest.TestCase):
def test_create_yaml_reporter(self):
self.assertIsInstance(YAMLReporter(self.testDetails),
YAMLReporter)
-
+
def test_open_yaml_report_and_succeed(self):
r = YAMLReporter(self.testDetails)
r.createReport()
@@ -150,10 +150,6 @@ class TestYAMLReporter(unittest.TestCase):
r.created.addCallback(f)
return r.created
- #def test_open_yaml_report_and_fail(self):
- # #XXX: YAMLReporter does not handle failures of this type
- # pass
-
def test_write_yaml_report_entry(self):
r = YAMLReporter(self.testDetails)
r.createReport()
1
0