tor-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
June 2013
- 19 participants
- 1571 discussions

[ooni-probe/master] Set the default collector via the command line option
by art@torproject.org 19 Jun '13
by art@torproject.org 19 Jun '13
19 Jun '13
commit c714841fc7a818ee7fcbc2f46ab7ccba3f0ee40c
Author: Arturo Filastò <art(a)fuffa.org>
Date: Thu Feb 28 20:47:05 2013 +0100
Set the default collector via the command line option
---
collector | 1 -
ooni/oonicli.py | 11 +----------
2 files changed, 1 insertion(+), 11 deletions(-)
diff --git a/collector b/collector
deleted file mode 100644
index 0b260a1..0000000
--- a/collector
+++ /dev/null
@@ -1 +0,0 @@
-httpo://nkvphnp3p6agi5qq.onion
diff --git a/ooni/oonicli.py b/ooni/oonicli.py
index 93bf8bf..af06ad2 100644
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@ -29,8 +29,7 @@ class Options(usage.Options):
" files listed on the command line")
optFlags = [["help", "h"],
- ["resume", "r"],
- ["no-default-reporter", "n"]]
+ ["resume", "r"]]
optParameters = [["reportfile", "o", None, "report file name"],
["testdeck", "i", None,
@@ -154,14 +153,6 @@ def runWithDirector():
log.msg("for example: ooniprobe -c httpo://nkvphnp3p6agi5qq.onion")
sys.exit(1)
- # Select one of the baked-in reporters unless the user has requested otherwise
- if not global_options['no-default-reporter']:
- with open('collector') as f:
- reporter_url = random.choice(f.readlines())
- reporter_url = reporter_url.split('#')[0].strip()
- oonib_reporter = OONIBReporter(test_details, reporter_url)
- reporters.append(oonib_reporter)
-
log.debug("adding callback for startNetTest")
d.addCallback(director.startNetTest, net_test_loader, reporters)
d.addCallback(shutdown)
1
0

[ooni-probe/master] Merge branch 'master' into feature/task_manager
by art@torproject.org 19 Jun '13
by art@torproject.org 19 Jun '13
19 Jun '13
commit 46c4db194d2e0cda648a500291f0d605dd445e96
Merge: 18266c6 997a747
Author: Arturo Filastò <art(a)fuffa.org>
Date: Tue Mar 5 16:21:24 2013 +0100
Merge branch 'master' into feature/task_manager
* master:
Remove unneeded commented out line
Flatten the reporter keys
Return something in txscapy
Update before_i_commit script to make it work (cherry picked from commit 9cdf5596c8f16cac819afdaf1b9b537b36f796a6)
Refactor logic for importing scapy related functions
Add sent packets to report and use startSending instead of sendPackets
Conflicts:
decks/before_i_commit.testdeck
ooni/reporter.py | 15 +++++-------
ooni/templates/scapyt.py | 6 ++---
ooni/utils/net.py | 2 +-
ooni/utils/txscapy.py | 56 +++++++++++++++++++++++++++++++++------------
scripts/example_parser.py | 5 ++--
5 files changed, 54 insertions(+), 30 deletions(-)
diff --cc ooni/reporter.py
index 69d4892,1f6c3e6..a0c9466
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@@ -147,19 -183,13 +147,16 @@@ class OReporter(object)
else:
test_input = test.input
- test_started = test._start_time
- test_runtime = time.time() - test_started
-
- report = {'input': test_input,
- 'test_name': test_name,
- 'test_started': test_started,
- 'test_runtime': test_runtime,
- 'report': test_report}
- return defer.maybeDeferred(self.writeReportEntry, report)
+ test_report['input'] = test_input
+ test_report['test_name'] = test_name
+ test_report['test_started'] = test._start_time
+ test_report['test_runtime'] = time.time() - test._start_time
+
+ return defer.maybeDeferred(self.writeReportEntry, test_report)
+class InvalidDestination(ReporterException):
+ pass
+
class YAMLReporter(OReporter):
"""
These are useful functions for reporting to YAML format.
1
0

[ooni-probe/master] Change how we pass arguments to the nettest loader
by art@torproject.org 19 Jun '13
by art@torproject.org 19 Jun '13
19 Jun '13
commit 64715a714170b9175d07ea9420ef8b3dcf177c83
Author: Arturo Filastò <art(a)fuffa.org>
Date: Wed Feb 27 18:11:30 2013 +0100
Change how we pass arguments to the nettest loader
* Explicit the passing of the nettest file argument
---
ooni/errors.py | 4 ++++
ooni/nettest.py | 12 ++++++------
ooni/oonicli.py | 6 ++++--
3 files changed, 14 insertions(+), 8 deletions(-)
diff --git a/ooni/errors.py b/ooni/errors.py
index 2023dbf..a194b96 100644
--- a/ooni/errors.py
+++ b/ooni/errors.py
@@ -126,3 +126,7 @@ class InvalidOONIBCollectorAddress(Exception):
class AllReportersFailed(Exception):
pass
+class GeoIPDataFilesNotFound(Exception):
+ pass
+
+
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 6323989..0b3e69c 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -10,7 +10,7 @@ from ooni.utils import log, checkForRoot, NotRootError, geodata
from ooni import config
from ooni import otime
-from ooni.errors import AllReportersFailed
+from ooni import errors as e
from inspect import getmembers
from StringIO import StringIO
@@ -21,12 +21,12 @@ class NoTestCasesFound(Exception):
class NetTestLoader(object):
method_prefix = 'test'
- def __init__(self, options):
+ def __init__(self, options, test_file=None, test_string=None):
self.options = options
- if 'test_file' in options:
- self.loadNetTestFile(options['test_file'])
- elif 'test_string' in options:
- self.loadNetTestString(options['test_string'])
+ if test_file:
+ self.loadNetTestFile(test_file)
+ elif test_string:
+ self.loadNetTestString(test_string)
@property
def testDetails(self):
diff --git a/ooni/oonicli.py b/ooni/oonicli.py
index 243b90c..73d7709 100644
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@ -107,10 +107,12 @@ def runWithDirector():
if global_options['testdeck']:
test_deck = yaml.safe_load(open(global_options['testdeck']))
for test in test_deck:
- test_list.append(NetTestLoader(test['options']))
+ test_list.append(NetTestLoader(test['options'],
+ test_file=global_options['test_file']))
else:
log.debug("No test deck detected")
- test_list.append(NetTestLoader(global_options))
+ test_list.append(NetTestLoader(global_options['subargs'],
+ test_file=global_options['test_file']))
# check each test's usageOptions
for net_test_loader in test_list:
1
0

19 Jun '13
commit 74d61678afc2804796c844f099c6d7c84766a857
Author: Arturo Filastò <art(a)fuffa.org>
Date: Thu Feb 28 20:48:47 2013 +0100
Update test decks with the proper key
---
decks/before_i_commit.testdeck | 8 ++++----
decks/short_no_root.deck | 10 +++++-----
2 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/decks/before_i_commit.testdeck b/decks/before_i_commit.testdeck
index f177a64..a9e08d8 100644
--- a/decks/before_i_commit.testdeck
+++ b/decks/before_i_commit.testdeck
@@ -5,7 +5,7 @@
pcapfile: null
reportfile: reports/captive_portal_test.yamloo
subargs: []
- test: nettests/manipulation/captiveportal.py
+ test_file: nettests/manipulation/captiveportal.py
- options:
collector: null
help: 0
@@ -13,7 +13,7 @@
pcapfile: null
reportfile: reports/dns_tamper_test.yamloo
subargs: [-T, example_inputs/dns_tamper_test_resolvers.txt, -f, example_inputs/dns_tamper_file.txt]
- test: nettests/blocking/dnsconsistency.py
+ test_file: nettests/blocking/dnsconsistency.py
- options:
collector: null
help: 0
@@ -21,7 +21,7 @@
pcapfile: null
reportfile: reports/http_host.yamloo
subargs: [-b, 'http://93.95.227.200', -c, 'http://nkvphnp3p6agi5qq.onion', -f, example_inputs/http_host_file.txt]
- test: nettests/manipulation/http_host.py
+ test_file: nettests/manipulation/http_host.py
- options:
collector: null
help: 0
@@ -29,4 +29,4 @@
pcapfile: null
reportfile: reports/header_field_manipulation.yamloo
subargs: [-b, 'http://93.95.227.200']
- test: nettests/manipulation/http_header_field_manipulation.py
+ test_file: nettests/manipulation/http_header_field_manipulation.py
diff --git a/decks/short_no_root.deck b/decks/short_no_root.deck
index afb7b75..18117f5 100644
--- a/decks/short_no_root.deck
+++ b/decks/short_no_root.deck
@@ -8,7 +8,7 @@
reportfile: null
resume: 0
subargs: []
- test: nettests/manipulation/captiveportal.py
+ test_file: nettests/manipulation/captiveportal.py
testdeck: null
- options:
collector: null
@@ -20,7 +20,7 @@
reportfile: null
resume: 0
subargs: [-f, example_inputs/url_lists_file.txt]
- test: nettests/blocking/http_requests.py
+ test_file: nettests/blocking/http_requests.py
testdeck: null
- options:
collector: null
@@ -32,7 +32,7 @@
reportfile: null
resume: 0
subargs: [-f, example_inputs/tcpconnect_host_file.txt]
- test: nettests/blocking/tcpconnect.py
+ test_file: nettests/blocking/tcpconnect.py
testdeck: null
- options:
collector: null
@@ -44,7 +44,7 @@
reportfile: null
resume: 0
subargs: [-f, example_inputs/http_host_file.txt, -T, example_inputs/dns_tamper_test_resolvers.txt]
- test: nettests/blocking/dnsconsistency.py
+ test_file: nettests/blocking/dnsconsistency.py
testdeck: null
- options:
collector: null
@@ -56,5 +56,5 @@
reportfile: null
resume: 0
subargs: [-b, 'http://93.95.227.200']
- test: nettests/manipulation/http_header_field_manipulation.py
+ test_file: nettests/manipulation/http_header_field_manipulation.py
testdeck: null
1
0

[ooni-probe/master] Merge branch 'master' into feature/task_manager
by art@torproject.org 19 Jun '13
by art@torproject.org 19 Jun '13
19 Jun '13
commit 9b00fa993757df5081c042f1deda32f907d235f9
Merge: 46c4db1 a62e36d
Author: Arturo Filastò <art(a)fuffa.org>
Date: Tue Mar 5 16:33:01 2013 +0100
Merge branch 'master' into feature/task_manager
* master:
Add changelog for ooni
Update ooniprobe.conf.sample
Add tests for YAML reporter
Open YAML report in createReport
Update tests to reflect change of syntax in NetTestLoader
Update NetTest tests to reflect change of syntax
Conflicts:
ooni/errors.py
tests/test_director.py
tests/test_nettest.py
ChangeLog.md | 253 ++++++++++++++++++++++++++++++++++++++++++++++++
ooni/errors.py | 6 ++
ooni/reporter.py | 12 ++-
ooniprobe.conf.sample | 4 +-
tests/test_managers.py | 2 +-
tests/test_reporter.py | 115 +++++++++++++++++-----
6 files changed, 362 insertions(+), 30 deletions(-)
diff --cc ooni/errors.py
index a194b96,36a042f..85f39ab
--- a/ooni/errors.py
+++ b/ooni/errors.py
@@@ -126,7 -126,9 +126,13 @@@ class InvalidOONIBCollectorAddress(Exce
class AllReportersFailed(Exception):
pass
+class GeoIPDataFilesNotFound(Exception):
+ pass
+
+
+ class ReportNotCreated(Exception):
+ pass
+
+ class ReportAlreadyClosed(Exception):
+ pass
+
1
0

19 Jun '13
commit 71d8151930c287c4b0e7f17940cb2954ac171f83
Author: Arturo Filastò <art(a)fuffa.org>
Date: Wed Mar 6 15:24:56 2013 +0100
Raise NoMoreReporters exception on failure
---
ooni/errors.py | 4 +++-
ooni/reporter.py | 3 +--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/ooni/errors.py b/ooni/errors.py
index 85f39ab..ca2211a 100644
--- a/ooni/errors.py
+++ b/ooni/errors.py
@@ -129,10 +129,12 @@ class AllReportersFailed(Exception):
class GeoIPDataFilesNotFound(Exception):
pass
-
class ReportNotCreated(Exception):
pass
class ReportAlreadyClosed(Exception):
pass
+class NoMoreReporters(Exception):
+ pass
+
diff --git a/ooni/reporter.py b/ooni/reporter.py
index 594c6b8..84dad2f 100644
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@ -413,8 +413,7 @@ class Report(object):
# Don't forward the exception unless there are no more reporters
if len(self.reporters) == 0:
log.err("Removed last reporter %s" % reporter)
- failure.reporter = reporter
- return failure
+ raise NoMoreReporters
def write(self, measurement):
"""
1
0
commit f79570c7d08bdf0cef8690d4e87041908e0bcbac
Author: Arturo Filastò <art(a)fuffa.org>
Date: Sat Mar 9 02:06:26 2013 +0100
Attach errback to dl
---
ooni/nettest.py | 6 +++---
ooni/reporter.py | 6 +++++-
ooni/tasks.py | 1 -
ooni/tests/test_reporter.py | 10 +++-------
4 files changed, 11 insertions(+), 12 deletions(-)
diff --git a/ooni/nettest.py b/ooni/nettest.py
index dc72ce8..4e73164 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -10,7 +10,7 @@ from ooni.utils import log, checkForRoot, NotRootError, geodata
from ooni import config
from ooni import otime
-from ooni import errors as e
+from ooni import errors
from inspect import getmembers
from StringIO import StringIO
@@ -187,7 +187,7 @@ class NetTestLoader(object):
log.msg("We will include some geo data in the report")
try:
client_geodata = geodata.IPToLocation(config.probe_ip)
- except e.GeoIPDataFilesNotFound:
+ except errors.GeoIPDataFilesNotFound:
log.err("Unable to find the geoip data files")
client_geodata = {'city': None, 'countrycode': None, 'asn': None}
@@ -402,7 +402,7 @@ class NetTest(object):
self.state.taskDone()
if len(self.report.reporters) == 0:
- raise e.AllReportersFailed
+ raise errors.AllReportersFailed
return report_results
diff --git a/ooni/reporter.py b/ooni/reporter.py
index a7bd933..596a001 100644
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@ -398,6 +398,7 @@ class Report(object):
log.debug("Reporters created: %s" % l)
# Should we consume errors silently?
dl = defer.DeferredList(l)
+ dl.addErrback(self.checkRemainingReporters)
return dl
def failedOpeningReport(self, failure, reporter):
@@ -410,7 +411,9 @@ class Report(object):
log.err("Failed to open %s reporter, giving up..." % reporter)
log.err("Reporter %s failed, removing from report..." % reporter)
self.reporters.remove(reporter)
- # Don't forward the exception unless there are no more reporters
+ return
+
+ def checkRemainingReporters(self, failure):
if len(self.reporters) == 0:
log.err("Removed last reporter %s" % reporter)
raise NoMoreReporters
@@ -463,6 +466,7 @@ class Report(object):
# to the deferredlist that checks to see if any reporters are left
# and raise an exception if there are no remaining reporters
dl = defer.DeferredList(l,fireOnOneErrback=True, consumeErrors=True)
+ dl.addErrback(self.checkRemainingReporters)
return dl
def close(self, _):
diff --git a/ooni/tasks.py b/ooni/tasks.py
index c17a5c1..41cedc1 100644
--- a/ooni/tasks.py
+++ b/ooni/tasks.py
@@ -135,4 +135,3 @@ class ReportEntry(TaskWithTimeout):
def run(self):
return self.reporter.writeReportEntry(self.measurement)
-
diff --git a/ooni/tests/test_reporter.py b/ooni/tests/test_reporter.py
index d7ee907..ab2e8bf 100644
--- a/ooni/tests/test_reporter.py
+++ b/ooni/tests/test_reporter.py
@@ -4,7 +4,7 @@ from twisted.trial import unittest
from ooni.reporter import Report, YAMLReporter, OONIBReporter, safe_dump
from ooni.managers import ReportEntryManager, TaskManager
from ooni.nettest import NetTest, NetTestState
-from ooni.errors import ReportNotCreated, ReportAlreadyClosed
+from ooni.errors import ReportNotCreated, ReportAlreadyClosed, NoMoreReporters
from ooni.tasks import TaskWithTimeout
from ooni.tests.mocks import MockOReporter, MockTaskManager
@@ -119,7 +119,7 @@ class TestReport(unittest.TestCase):
d = report.write(MockMeasurement(MockNetTest()))
def f(err):
self.assertEquals(len(report.reporters),0)
- d.addErrback(f)
+ d.addCallback(f)
return d
class TestYAMLReporter(unittest.TestCase):
@@ -138,7 +138,7 @@ class TestYAMLReporter(unittest.TestCase):
def test_create_yaml_reporter(self):
self.assertIsInstance(YAMLReporter(self.testDetails),
YAMLReporter)
-
+
def test_open_yaml_report_and_succeed(self):
r = YAMLReporter(self.testDetails)
r.createReport()
@@ -150,10 +150,6 @@ class TestYAMLReporter(unittest.TestCase):
r.created.addCallback(f)
return r.created
- #def test_open_yaml_report_and_fail(self):
- # #XXX: YAMLReporter does not handle failures of this type
- # pass
-
def test_write_yaml_report_entry(self):
r = YAMLReporter(self.testDetails)
r.createReport()
1
0
commit 033764eeef9b4f27df4512e447638447ff8fc293
Author: Arturo Filastò <art(a)fuffa.org>
Date: Mon Mar 18 15:38:17 2013 -0400
Rename ooniprobe to ooni.
---
bin/ooniprobe | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/bin/ooniprobe b/bin/ooniprobe
index ba537ab..695b137 100755
--- a/bin/ooniprobe
+++ b/bin/ooniprobe
@@ -8,10 +8,10 @@ sys.path[:] = map(os.path.abspath, sys.path)
sys.path.insert(0, os.path.abspath(os.getcwd()))
# This is a hack to overcome a bug in python
-from ooniprobe.utils.hacks import patched_reduce_ex
+from ooni.utils.hacks import patched_reduce_ex
copy_reg._reduce_ex = patched_reduce_ex
# from ooni.oonicli import run
# run()
-from ooniprobe.oonicli import runWithDirector
+from ooni.oonicli import runWithDirector
runWithDirector()
1
0
commit 7393a4e3eb57bcf3c14685b240e3ab5eebfe97d3
Author: Arturo Filastò <art(a)fuffa.org>
Date: Wed Mar 6 15:48:48 2013 +0100
Reorganization of code tree
* Move unittests into ooni directory
---
bin/ooniprobe | 4 +-
nettests/examples/example_myip.py | 4 +
nettests/tls-handshake.py | 32 -----
ooni/api/spec.py | 92 +++++++++++++
ooni/config.py | 134 ++++++++-----------
ooni/director.py | 29 +++-
ooni/nettest.py | 208 ++++++++++++++++++++--------
ooni/oonicli.py | 7 +-
ooni/oonid.py | 20 +++
ooni/reporter.py | 2 +-
ooni/runner.py | 241 ---------------------------------
ooni/tests/mocks.py | 172 ++++++++++++++++++++++++
ooni/tests/test-class-design.py | 101 ++++++++++++++
ooni/tests/test_director.py | 58 ++++++++
ooni/tests/test_dns.py | 24 ++++
ooni/tests/test_managers.py | 215 +++++++++++++++++++++++++++++
ooni/tests/test_mutate.py | 15 +++
ooni/tests/test_nettest.py | 268 +++++++++++++++++++++++++++++++++++++
ooni/tests/test_otime.py | 15 +++
ooni/tests/test_reporter.py | 238 ++++++++++++++++++++++++++++++++
ooni/tests/test_safe_represent.py | 14 ++
ooni/tests/test_trueheaders.py | 41 ++++++
ooni/tests/test_utils.py | 20 +++
ooni/utils/log.py | 8 +-
ooniprobe.conf.sample | 2 +
tests/mocks.py | 168 -----------------------
tests/test-class-design.py | 101 --------------
tests/test_director.py | 59 --------
tests/test_dns.py | 24 ----
tests/test_inputunit.py | 29 ----
tests/test_managers.py | 215 -----------------------------
tests/test_mutate.py | 15 ---
tests/test_nettest.py | 268 -------------------------------------
tests/test_otime.py | 15 ---
tests/test_reporter.py | 238 --------------------------------
tests/test_safe_represent.py | 14 --
tests/test_trueheaders.py | 41 ------
tests/test_utils.py | 20 ---
38 files changed, 1545 insertions(+), 1626 deletions(-)
diff --git a/bin/ooniprobe b/bin/ooniprobe
index 695b137..ba537ab 100755
--- a/bin/ooniprobe
+++ b/bin/ooniprobe
@@ -8,10 +8,10 @@ sys.path[:] = map(os.path.abspath, sys.path)
sys.path.insert(0, os.path.abspath(os.getcwd()))
# This is a hack to overcome a bug in python
-from ooni.utils.hacks import patched_reduce_ex
+from ooniprobe.utils.hacks import patched_reduce_ex
copy_reg._reduce_ex = patched_reduce_ex
# from ooni.oonicli import run
# run()
-from ooni.oonicli import runWithDirector
+from ooniprobe.oonicli import runWithDirector
runWithDirector()
diff --git a/nettests/examples/example_myip.py b/nettests/examples/example_myip.py
index 40a4849..70cf773 100644
--- a/nettests/examples/example_myip.py
+++ b/nettests/examples/example_myip.py
@@ -6,6 +6,10 @@
from ooni.templates import httpt
class MyIP(httpt.HTTPTest):
inputs = ['https://check.torproject.org']
+
+ def test_lookup(self):
+ return self.doRequest(self.input)
+
def processResponseBody(self, body):
import re
regexp = "Your IP address appears to be: <b>(.+?)<\/b>"
diff --git a/nettests/tls-handshake.py b/nettests/tls-handshake.py
deleted file mode 100644
index eba950e..0000000
--- a/nettests/tls-handshake.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-import subprocess
-from subprocess import PIPE
-serverport = "129.21.124.215:443"
-# a subset of those from firefox
-ciphers = [
- "ECDHE-ECDSA-AES256-SHA",
- "ECDHE-RSA-AES256-SHA",
- "DHE-RSA-CAMELLIA256-SHA",
- "DHE-DSS-CAMELLIA256-SHA",
- "DHE-RSA-AES256-SHA",
- "DHE-DSS-AES256-SHA",
- "ECDH-ECDSA-AES256-CBC-SHA",
- "ECDH-RSA-AES256-CBC-SHA",
- "CAMELLIA256-SHA",
- "AES256-SHA",
- "ECDHE-ECDSA-RC4-SHA",
- "ECDHE-ECDSA-AES128-SHA",
- "ECDHE-RSA-RC4-SHA",
- "ECDHE-RSA-AES128-SHA",
- "DHE-RSA-CAMELLIA128-SHA",
- "DHE-DSS-CAMELLIA128-SHA"
-]
-def checkBridgeConnection(host, port)
- cipher_arg = ":".join(ciphers)
- cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
- cmd += ["-cipher", cipher_arg]
- proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
- out, error = proc.communicate()
- success = "Cipher is DHE-RSA-AES256-SHA" in out
- return success
diff --git a/ooni/api/__init__.py b/ooni/api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/api/spec.py b/ooni/api/spec.py
new file mode 100644
index 0000000..af238f4
--- /dev/null
+++ b/ooni/api/spec.py
@@ -0,0 +1,92 @@
+import os
+import re
+import json
+import types
+
+from cyclone import web, escape
+
+from ooni import config
+
+class InvalidInputFilename(Exception):
+ pass
+
+class FilenameExists(Exception):
+ pass
+
+class ORequestHandler(web.RequestHandler):
+ serialize_lists = True
+
+ def write(self, chunk):
+ """
+ XXX This is a patch that can be removed once
+ https://github.com/fiorix/cyclone/pull/92 makes it into a release.
+ """
+ if isinstance(chunk, types.ListType):
+ chunk = escape.json_encode(chunk)
+ self.set_header("Content-Type", "application/json")
+ web.RequestHandler.write(self, chunk)
+
+class Status(ORequestHandler):
+ def get(self):
+ result = {'active_tests': oonidApplication.director.activeNetTests}
+ self.write(result)
+
+def list_inputs():
+ input_list = []
+ for filename in os.listdir(config.inputs_directory):
+ input_list.append({'filename': filename})
+ return input_list
+
+class Inputs(ORequestHandler):
+ def get(self):
+ self.write(input_list)
+
+ def post(self):
+ filename = self.get_argument("fullname", None)
+ if not filename or not re.match('(\w.*\.\w.*).*', filename):
+ raise InvalidInputFilename
+
+ if os.path.exists(filename):
+ raise FilenameExists
+
+ input_file = self.request.files.get("input_file")
+ content_type = input_file["content_type"]
+ body = input_file["body"]
+
+ fn = os.path.join(config.inputs_directory, filename)
+ with open(os.path.abspath(fn), "w") as fp:
+ fp.write(body)
+
+class ListTests(ORequestHandler):
+ def get(self):
+ self.write(oonidApplication.director.netTests)
+
+class StartTest(ORequestHandler):
+ def post(self, test_name):
+ """
+ Starts a test with the specified options.
+ """
+ json.decode(self.request.body)
+
+class StopTest(ORequestHandler):
+ def delete(self, test_name):
+ pass
+
+class TestStatus(ORequestHandler):
+ def get(self, test_id):
+ pass
+
+oonidAPI = [
+ (r"/status", Status),
+ (r"/inputs", Inputs),
+ (r"/test", ListTests),
+ (r"/test/(.*)/start", StartTest),
+ (r"/test/(.*)/stop", StopTest),
+ (r"/test/(.*)", TestStatus),
+ (r"/(.*)", web.StaticFileHandler,
+ {"path": os.path.join(config.data_directory, 'ui', 'app'),
+ "default_filename": "index.html"})
+]
+
+oonidApplication = web.Application(oonidAPI, debug=True)
+
diff --git a/ooni/config.py b/ooni/config.py
index 74a1668..5aeb49d 100644
--- a/ooni/config.py
+++ b/ooni/config.py
@@ -6,33 +6,8 @@ from twisted.internet import reactor, threads, defer
from ooni import otime
from ooni.utils import Storage
-reports = Storage()
-scapyFactory = None
-stateDict = None
-state = Storage()
-
-# XXX refactor this to use a database
-resume_lock = defer.DeferredLock()
-
-basic = None
-cmd_line_options = None
-resume_filename = None
-
-# XXX-Twisted this is used to check if we have started the reactor or not. It
-# is necessary because if the tests are already concluded because we have
-# resumed a test session then it will call reactor.run() even though there is
-# no condition that will ever stop it.
-# There should be a more twisted way of doing this.
-start_reactor = True
-
-tor_state = None
-tor_control = None
-
-config_file = None
-sample_config_file = None
-
-# This is used to store the probes IP address obtained via Tor
-probe_ip = None
+class TestFilenameNotSet(Exception):
+ pass
def get_root_path():
this_directory = os.path.dirname(__file__)
@@ -46,50 +21,6 @@ def createConfigFile():
"""
sample_config_file = os.path.join(get_root_path(), 'ooniprobe.conf.sample')
-def loadConfigFile():
- """
- This is a helper function that makes sure that the configuration attributes
- are singletons.
- """
- config_file = os.path.join(get_root_path(), 'ooniprobe.conf')
- try:
- f = open(config_file)
- except IOError:
- createConfigFile()
- raise Exception("Unable to open config file. "\
- "Copy ooniprobe.conf.sample to ooniprobe.conf")
-
- config_file_contents = '\n'.join(f.readlines())
- configuration = yaml.safe_load(config_file_contents)
-
- # Process the basic configuration options
- basic = Storage()
- for k, v in configuration['basic'].items():
- basic[k] = v
-
- # Process the privacy configuration options
- privacy = Storage()
- for k, v in configuration['privacy'].items():
- privacy[k] = v
-
- # Process the advanced configuration options
- advanced = Storage()
- for k, v in configuration['advanced'].items():
- advanced[k] = v
-
- # Process the tor configuration options
- tor = Storage()
- try:
- for k, v in configuration['tor'].items():
- tor[k] = v
- except AttributeError:
- pass
-
- return basic, privacy, advanced, tor
-
-class TestFilenameNotSet(Exception):
- pass
-
def generatePcapFilename():
if cmd_line_options['pcapfile']:
reports.pcap = cmd_line_options['pcapfile']
@@ -103,9 +34,61 @@ def generatePcapFilename():
frm_str = "report_%s_"+otime.timestamp()+".%s"
reports.pcap = frm_str % (test_name, "pcap")
-if not basic:
- # Here we make sure that we instance the config file attributes only once
- basic, privacy, advanced, tor = loadConfigFile()
+class ConfigurationSetting(Storage):
+ def __init__(self, key):
+ config_file = os.path.join(get_root_path(), 'ooniprobe.conf')
+ try:
+ f = open(config_file)
+ except IOError:
+ createConfigFile()
+ raise Exception("Unable to open config file. "\
+ "Copy ooniprobe.conf.sample to ooniprobe.conf")
+
+ config_file_contents = '\n'.join(f.readlines())
+ configuration = yaml.safe_load(config_file_contents)
+
+ try:
+ for k, v in configuration[key].items():
+ self[k] = v
+ except AttributeError:
+ pass
+
+basic = ConfigurationSetting('basic')
+advanced = ConfigurationSetting('advanced')
+privacy = ConfigurationSetting('privacy')
+tor = ConfigurationSetting('tor')
+
+data_directory = os.path.join(get_root_path(), 'data')
+nettest_directory = os.path.join(get_root_path(), 'nettests')
+inputs_directory = os.path.join(get_root_path(), 'inputs')
+
+reports = Storage()
+state = Storage()
+scapyFactory = None
+stateDict = None
+
+# XXX refactor this to use a database
+resume_lock = defer.DeferredLock()
+
+cmd_line_options = None
+resume_filename = None
+
+# XXX-Twisted this is used to check if we have started the reactor or not. It
+# is necessary because if the tests are already concluded because we have
+# resumed a test session then it will call reactor.run() even though there is
+# no condition that will ever stop it.
+# There should be a more twisted way of doing this.
+start_reactor = True
+tor_state = None
+tor_control = None
+config_file = None
+sample_config_file = None
+# This is used to store the probes IP address obtained via Tor
+probe_ip = None
+# This is used to keep track of the state of the sniffer
+sniffer_running = None
+
+logging = True
if not resume_filename:
resume_filename = os.path.join(get_root_path(), 'ooniprobe.resume')
@@ -113,6 +96,3 @@ if not resume_filename:
with open(resume_filename) as f: pass
except IOError as e:
with open(resume_filename, 'w+') as f: pass
-
-# This is used to keep track of the state of the sniffer
-sniffer_running = None
diff --git a/ooni/director.py b/ooni/director.py
index 8365ebd..a9daf84 100644
--- a/ooni/director.py
+++ b/ooni/director.py
@@ -6,7 +6,7 @@ from ooni.managers import ReportEntryManager, MeasurementManager
from ooni.reporter import Report
from ooni.utils import log, checkForRoot, NotRootError
from ooni.utils.net import randomFreePort
-from ooni.nettest import NetTest
+from ooni.nettest import NetTest, getNetTestInformation
from ooni.errors import UnableToStartTor
from txtorcon import TorConfig
@@ -57,10 +57,12 @@ class Director(object):
"""
_scheduledTests = 0
+ # Only list NetTests belonging to these categories
+ categories = ['blocking', 'manipulation']
def __init__(self):
- self.netTests = []
self.activeNetTests = []
+ self.netTests = self.getNetTests()
self.measurementManager = MeasurementManager()
self.measurementManager.director = self
@@ -80,6 +82,29 @@ class Director(object):
self.torControlProtocol = None
+ def getNetTests(self):
+ nettests = {}
+ def is_nettest(filename):
+ return not filename == '__init__.py' \
+ and filename.endswith('.py')
+
+ for category in self.categories:
+ dirname = os.path.join(config.nettest_directory, category)
+ # print path to all filenames.
+ for filename in os.listdir(dirname):
+ if is_nettest(filename):
+ net_test_file = os.path.join(dirname, filename)
+ nettest = getNetTestInformation(net_test_file)
+
+ if nettest['id'] in nettests:
+ log.err("Found a two tests with the same name %s, %s" %
+ (nettest_path, nettests[nettest['id']]['path']))
+ else:
+ category = dirname.replace(config.nettest_directory, '')
+ nettests[nettest['id']] = nettest
+
+ return nettests
+
def start(self):
if config.privacy.includepcap:
log.msg("Starting")
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 1fe19f1..dc72ce8 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -18,15 +18,162 @@ from StringIO import StringIO
class NoTestCasesFound(Exception):
pass
+def get_test_methods(item, method_prefix="test_"):
+ """
+ Look for test_ methods in subclasses of NetTestCase
+ """
+ test_cases = []
+ try:
+ assert issubclass(item, NetTestCase)
+ methods = reflect.prefixedMethodNames(item, method_prefix)
+ test_methods = []
+ for method in methods:
+ test_methods.append(method_prefix + method)
+ if test_methods:
+ test_cases.append((item, test_methods))
+ except (TypeError, AssertionError):
+ pass
+ return test_cases
+
+def loadNetTestString(net_test_string):
+ """
+ Load NetTest from a string.
+ WARNING input to this function *MUST* be sanitized and *NEVER* be
+ untrusted.
+ Failure to do so will result in code exec.
+
+ net_test_string:
+
+ a string that contains the net test to be run.
+ """
+ net_test_file_object = StringIO(net_test_string)
+
+ ns = {}
+ test_cases = []
+ exec net_test_file_object.read() in ns
+ for item in ns.itervalues():
+ test_cases.extend(get_test_methods(item))
+
+ if not test_cases:
+ raise NoTestCasesFound
+
+ return test_cases
+
+def loadNetTestFile(net_test_file):
+ """
+ Load NetTest from a file.
+ """
+ test_cases = []
+ module = filenameToModule(net_test_file)
+ for __, item in getmembers(module):
+ test_cases.extend(get_test_methods(item))
+
+ if not test_cases:
+ raise NoTestCasesFound
+
+ return test_cases
+
+def getTestClassFromFile(net_test_file):
+ """
+ Will return the first class that is an instance of NetTestCase.
+
+ XXX this means that if inside of a test there are more than 1 test case
+ then we will only run the first one.
+ """
+ module = filenameToModule(net_test_file)
+ for __, item in getmembers(module):
+ try:
+ assert issubclass(item, NetTestCase)
+ return item
+ except (TypeError, AssertionError):
+ pass
+
+def getOption(opt_parameter, required_options, type='text'):
+ """
+ Arguments:
+ usage_options: a list as should be the optParameters of an UsageOptions class.
+
+ required_options: a list containing the strings of the options that are
+ required.
+
+ type: a string containing the type of the option.
+
+ Returns:
+ a dict containing
+ {
+ 'description': the description of the option,
+ 'default': the default value of the option,
+ 'required': True|False if the option is required or not,
+ 'type': the type of the option ('text' or 'file')
+ }
+ """
+ option_name, _, default, description = opt_parameter
+ if option_name in required_options:
+ required = True
+ else:
+ required = False
+
+ return {'description': description,
+ 'default': default, 'required': required,
+ 'type': type
+ }
+
+def getArguments(test_class):
+ arguments = {}
+ if test_class.inputFile:
+ option_name = test_class.inputFile[0]
+ arguments[option_name] = getOption(test_class.inputFile,
+ test_class.requiredOptions, type='file')
+ try:
+ list(test_class.usageOptions.optParameters)
+ except AttributeError:
+ return arguments
+
+ for opt_parameter in test_class.usageOptions.optParameters:
+ option_name = opt_parameter[0]
+ arguments[option_name] = getOption(opt_parameter,
+ test_class.requiredOptions)
+
+ return arguments
+
+def getNetTestInformation(net_test_file):
+ """
+ Returns a dict containing:
+
+ {
+ 'id': the test filename excluding the .py extension,
+ 'name': the full name of the test,
+ 'description': the description of the test,
+ 'version': version number of this test,
+ 'arguments': a dict containing as keys the supported arguments and as
+ values the argument description.
+ }
+ """
+ test_class = getTestClassFromFile(net_test_file)
+
+ test_id = os.path.basename(net_test_file).replace('.py', '')
+ information = {'id': test_id,
+ 'name': test_class.name,
+ 'description': test_class.description,
+ 'version': test_class.version,
+ 'arguments': getArguments(test_class)
+ }
+ return information
+
class NetTestLoader(object):
method_prefix = 'test'
def __init__(self, options, test_file=None, test_string=None):
self.options = options
+ test_cases = None
+
if test_file:
- self.loadNetTestFile(test_file)
+ test_cases = loadNetTestFile(test_file)
elif test_string:
- self.loadNetTestString(test_string)
+ test_cases = loadNetTestString(test_string)
+
+ if test_cases:
+ self.setupTestCases(test_cases)
@property
def testDetails(self):
@@ -115,44 +262,6 @@ class NetTestLoader(object):
assert usage_options == test_class.usageOptions
return usage_options
- def loadNetTestString(self, net_test_string):
- """
- Load NetTest from a string.
- WARNING input to this function *MUST* be sanitized and *NEVER* be
- untrusted.
- Failure to do so will result in code exec.
-
- net_test_string:
-
- a string that contains the net test to be run.
- """
- net_test_file_object = StringIO(net_test_string)
-
- ns = {}
- test_cases = []
- exec net_test_file_object.read() in ns
- for item in ns.itervalues():
- test_cases.extend(self._get_test_methods(item))
-
- if not test_cases:
- raise NoTestCasesFound
-
- self.setupTestCases(test_cases)
-
- def loadNetTestFile(self, net_test_file):
- """
- Load NetTest from a file.
- """
- test_cases = []
- module = filenameToModule(net_test_file)
- for __, item in getmembers(module):
- test_cases.extend(self._get_test_methods(item))
-
- if not test_cases:
- raise NoTestCasesFound
-
- self.setupTestCases(test_cases)
-
def setupTestCases(self, test_cases):
"""
Creates all the necessary test_cases (a list of tuples containing the
@@ -205,22 +314,6 @@ class NetTestLoader(object):
inputs = [None]
klass.inputs = inputs
- def _get_test_methods(self, item):
- """
- Look for test_ methods in subclasses of NetTestCase
- """
- test_cases = []
- try:
- assert issubclass(item, NetTestCase)
- methods = reflect.prefixedMethodNames(item, self.method_prefix)
- test_methods = []
- for method in methods:
- test_methods.append(self.method_prefix + method)
- if test_methods:
- test_cases.append((item, test_methods))
- except (TypeError, AssertionError):
- pass
- return test_cases
class NetTestState(object):
def __init__(self, allTasksDone):
@@ -409,9 +502,10 @@ class NetTestCase(object):
Quirks:
Every class that is prefixed with test *must* return a twisted.internet.defer.Deferred.
"""
- name = "I Did Not Change The Name"
+ name = "This test is nameless"
author = "Jane Doe <foo(a)example.com>"
version = "0.0.0"
+ description = "Sorry, this test has no description :("
inputs = [None]
inputFile = None
diff --git a/ooni/oonicli.py b/ooni/oonicli.py
index 06aa20c..a99386d 100644
--- a/ooni/oonicli.py
+++ b/ooni/oonicli.py
@@ -10,12 +10,11 @@ from twisted.internet import reactor
from twisted.python import usage
from twisted.python.util import spewer
-from ooni.errors import InvalidOONIBCollectorAddress
-
+from ooni import errors
from ooni import config
+
from ooni.director import Director
from ooni.reporter import YAMLReporter, OONIBReporter
-
from ooni.nettest import NetTestLoader, MissingRequiredOption
from ooni.utils import log
@@ -147,7 +146,7 @@ def runWithDirector():
oonib_reporter = OONIBReporter(test_details,
global_options['collector'])
reporters.append(oonib_reporter)
- except InvalidOONIBCollectorAddress:
+ except errors.InvalidOONIBCollectorAddress:
log.err("Invalid format for oonib collector address.")
log.msg("Should be in the format http://<collector_address>:<port>")
log.msg("for example: ooniprobe -c httpo://nkvphnp3p6agi5qq.onion")
diff --git a/ooni/oonid.py b/ooni/oonid.py
new file mode 100644
index 0000000..dde768e
--- /dev/null
+++ b/ooni/oonid.py
@@ -0,0 +1,20 @@
+import os
+import random
+
+from twisted.application import service, internet
+from twisted.web import static, server
+
+from ooni import config
+from ooni.api.spec import oonidApplication
+from ooni.director import Director
+from ooni.reporter import YAMLReporter, OONIBReporter
+
+def getOonid():
+ director = Director()
+ director.start()
+ oonidApplication.director = director
+ return internet.TCPServer(int(config.advanced.oonid_api_port), oonidApplication)
+
+application = service.Application("ooniprobe")
+service = getOonid()
+service.setServiceParent(application)
diff --git a/ooni/reporter.py b/ooni/reporter.py
index 84dad2f..a7bd933 100644
--- a/ooni/reporter.py
+++ b/ooni/reporter.py
@@ -26,7 +26,7 @@ except ImportError:
log.err("Scapy is not installed.")
-from ooni.errors import InvalidOONIBCollectorAddress
+from ooni.errors import InvalidOONIBCollectorAddress, NoMoreReporters
from ooni.errors import ReportNotCreated, ReportAlreadyClosed
from ooni import otime
diff --git a/ooni/runner.py b/ooni/runner.py
deleted file mode 100644
index 080db18..0000000
--- a/ooni/runner.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import os
-import time
-import random
-
-import yaml
-
-from twisted.internet import defer
-from twisted.internet import reactor
-
-from txtorcon import TorConfig
-from txtorcon import TorState, launch_tor
-
-from ooni import config
-from ooni.reporter import OONIBReporter, YAMLReporter, OONIBReportError
-from ooni.inputunit import InputUnitFactory
-from ooni.nettest import NetTestCase, NoPostProcessor
-from ooni.utils import log, checkForRoot, pushFilenameStack
-from ooni.utils import NotRootError, Storage
-from ooni.utils.net import randomFreePort
-
-class InvalidResumeFile(Exception):
- pass
-
-class noResumeSession(Exception):
- pass
-
-def loadResumeFile():
- """
- Sets the singleton stateDict object to the content of the resume file.
- If the file is empty then it will create an empty one.
-
- Raises:
-
- :class:ooni.runner.InvalidResumeFile if the resume file is not valid
-
- """
- if not config.stateDict:
- try:
- with open(config.resume_filename) as f:
- config.stateDict = yaml.safe_load(f)
- except:
- log.err("Error loading YAML file")
- raise InvalidResumeFile
-
- if not config.stateDict:
- with open(config.resume_filename, 'w+') as f:
- yaml.safe_dump(dict(), f)
- config.stateDict = dict()
-
- elif isinstance(config.stateDict, dict):
- return
- else:
- log.err("The resume file is of the wrong format")
- raise InvalidResumeFile
-
-def resumeTest(test_filename, input_unit_factory):
- """
- Returns the an input_unit_factory that is at the index of the previous run of the test
- for the specified test_filename.
-
- Args:
-
- test_filename (str): the filename of the test that is being run
- including the .py extension.
-
- input_unit_factory (:class:ooni.inputunit.InputUnitFactory): with the
- same input of the past run.
-
- Returns:
-
- :class:ooni.inputunit.InputUnitFactory that is at the index of the
- previous test run.
-
- """
- try:
- idx = config.stateDict[test_filename]
- for x in range(idx):
- try:
- input_unit_factory.next()
- except StopIteration:
- log.msg("Previous run was complete")
- return input_unit_factory
-
- return input_unit_factory
-
- except KeyError:
- log.debug("No resume key found for selected test name. It is therefore 0")
- config.stateDict[test_filename] = 0
- return input_unit_factory
-
-(a)defer.inlineCallbacks
-def updateResumeFile(test_filename):
- """
- update the resume file with the current stateDict state.
- """
- log.debug("Acquiring lock for %s" % test_filename)
- yield config.resume_lock.acquire()
-
- current_resume_state = yaml.safe_load(open(config.resume_filename))
- current_resume_state = config.stateDict
- yaml.safe_dump(current_resume_state, open(config.resume_filename, 'w+'))
-
- log.debug("Releasing lock for %s" % test_filename)
- config.resume_lock.release()
- defer.returnValue(config.stateDict[test_filename])
-
-(a)defer.inlineCallbacks
-def increaseInputUnitIdx(test_filename):
- """
- Args:
-
- test_filename (str): the filename of the test that is being run
- including the .py extension.
-
- input_unit_idx (int): the current input unit index for the test.
-
- """
- config.stateDict[test_filename] += 1
- yield updateResumeFile(test_filename)
-
-def updateProgressMeters(test_filename, input_unit_factory,
- test_case_number):
- """
- Update the progress meters for keeping track of test state.
- """
- if not config.state.test_filename:
- config.state[test_filename] = Storage()
-
- config.state[test_filename].per_item_average = 2.0
-
- input_unit_idx = float(config.stateDict[test_filename])
- input_unit_items = len(input_unit_factory)
- test_case_number = float(test_case_number)
- total_iterations = input_unit_items * test_case_number
- current_iteration = input_unit_idx * test_case_number
-
- log.debug("input_unit_items: %s" % input_unit_items)
- log.debug("test_case_number: %s" % test_case_number)
-
- log.debug("Test case number: %s" % test_case_number)
- log.debug("Total iterations: %s" % total_iterations)
- log.debug("Current iteration: %s" % current_iteration)
-
- def progress():
- return (current_iteration / total_iterations) * 100.0
-
- config.state[test_filename].progress = progress
-
- def eta():
- return (total_iterations - current_iteration) \
- * config.state[test_filename].per_item_average
- config.state[test_filename].eta = eta
-
- config.state[test_filename].input_unit_idx = input_unit_idx
- config.state[test_filename].input_unit_items = input_unit_items
-
-
-(a)defer.inlineCallbacks
-def runTestCases(test_cases, options, cmd_line_options):
- log.debug("Running %s" % test_cases)
- log.debug("Options %s" % options)
- log.debug("cmd_line_options %s" % dict(cmd_line_options))
-
- test_inputs = options['inputs']
-
- # Set a default reporter
- if not cmd_line_options['collector'] and not \
- cmd_line_options['no-default-reporter']:
- with open('collector') as f:
- reporter_url = random.choice(f.readlines())
- reporter_url = reporter_url.split('#')[0].strip()
- cmd_line_options['collector'] = reporter_url
-
- oonib_reporter = OONIBReporter(cmd_line_options)
- yaml_reporter = YAMLReporter(cmd_line_options)
-
- if cmd_line_options['collector']:
- log.msg("Using remote collector, please be patient while we create the report.")
- try:
- yield oonib_reporter.createReport(options)
- except OONIBReportError:
- log.err("Error in creating new report")
- log.msg("We will only create reports to a file")
- oonib_reporter = None
- else:
- oonib_reporter = None
-
- yield yaml_reporter.createReport(options)
- log.msg("Reporting to file %s" % yaml_reporter._stream.name)
-
- try:
- input_unit_factory = InputUnitFactory(test_inputs)
- input_unit_factory.inputUnitSize = int(cmd_line_options['parallelism'])
- except Exception, e:
- log.exception(e)
-
- try:
- loadResumeFile()
- except InvalidResumeFile:
- log.err("Error in loading resume file %s" % config.resume_filename)
- log.err("Try deleting the resume file")
- raise InvalidResumeFile
-
- test_filename = os.path.basename(cmd_line_options['test'])
-
- if cmd_line_options['resume']:
- log.debug("Resuming %s" % test_filename)
- resumeTest(test_filename, input_unit_factory)
- else:
- log.debug("Not going to resume %s" % test_filename)
- config.stateDict[test_filename] = 0
-
- updateProgressMeters(test_filename, input_unit_factory, len(test_cases))
-
- try:
- for input_unit in input_unit_factory:
- log.debug("Running %s with input unit %s" % (test_filename, input_unit))
-
- yield runTestCasesWithInputUnit(test_cases, input_unit,
- yaml_reporter, oonib_reporter)
-
- yield increaseInputUnitIdx(test_filename)
-
- updateProgressMeters(test_filename, input_unit_factory, len(test_cases))
-
- except Exception:
- log.exception("Problem in running test")
- yaml_reporter.finish()
-
-def loadTest(cmd_line_options):
- """
- Takes care of parsing test command line arguments and loading their
- options.
- """
- # XXX here there is too much strong coupling with cmd_line_options
- # Ideally this would get all wrapped in a nice little class that get's
- # instanced with it's cmd_line_options as an instance attribute
- classes = findTestClassesFromFile(cmd_line_options)
- test_cases, options = loadTestsAndOptions(classes, cmd_line_options)
-
- return test_cases, options, cmd_line_options
diff --git a/ooni/tests/__init__.py b/ooni/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ooni/tests/mocks.py b/ooni/tests/mocks.py
new file mode 100644
index 0000000..99e5200
--- /dev/null
+++ b/ooni/tests/mocks.py
@@ -0,0 +1,172 @@
+from twisted.python import failure
+from twisted.internet import defer
+
+from ooni import config
+from ooni.tasks import BaseTask, TaskWithTimeout
+from ooni.nettest import NetTest
+from ooni.managers import TaskManager
+
+config.logging = False
+
+class MockMeasurementFailOnce(BaseTask):
+ def run(self):
+ f = open('dummyTaskFailOnce.txt', 'w')
+ f.write('fail')
+ f.close()
+ if self.failure >= 1:
+ return defer.succeed(self)
+ else:
+ return defer.fail(failure.Failure)
+
+class MockMeasurementManager(TaskManager):
+ def __init__(self):
+ self.successes = []
+ TaskManager.__init__(self)
+
+ def failed(self, failure, task):
+ pass
+
+ def succeeded(self, result, task):
+ self.successes.append((result, task))
+
+class MockReporter(object):
+ def __init__(self):
+ self.created = defer.Deferred()
+
+ def writeReportEntry(self, entry):
+ pass
+
+ def createReport(self):
+ self.created.callback(self)
+
+ def finish(self):
+ pass
+
+class MockFailure(Exception):
+ pass
+
+## from test_managers
+mockFailure = failure.Failure(MockFailure('mock'))
+
+class MockSuccessTask(BaseTask):
+ def run(self):
+ return defer.succeed(42)
+
+class MockFailTask(BaseTask):
+ def run(self):
+ return defer.fail(mockFailure)
+
+class MockFailOnceTask(BaseTask):
+ def run(self):
+ if self.failures >= 1:
+ return defer.succeed(42)
+ else:
+ return defer.fail(mockFailure)
+
+class MockSuccessTaskWithTimeout(TaskWithTimeout):
+ def run(self):
+ return defer.succeed(42)
+
+class MockFailTaskThatTimesOut(TaskWithTimeout):
+ def run(self):
+ return defer.Deferred()
+
+class MockTimeoutOnceTask(TaskWithTimeout):
+ def run(self):
+ if self.failures >= 1:
+ return defer.succeed(42)
+ else:
+ return defer.Deferred()
+
+class MockFailTaskWithTimeout(TaskWithTimeout):
+ def run(self):
+ return defer.fail(mockFailure)
+
+
+class MockNetTest(object):
+ def __init__(self):
+ self.successes = []
+
+ def succeeded(self, measurement):
+ self.successes.append(measurement)
+
+class MockMeasurement(TaskWithTimeout):
+ def __init__(self, net_test):
+ TaskWithTimeout.__init__(self)
+ self.netTest = net_test
+
+ def succeeded(self, result):
+ return self.netTest.succeeded(42)
+
+class MockSuccessMeasurement(MockMeasurement):
+ def run(self):
+ return defer.succeed(42)
+
+class MockFailMeasurement(MockMeasurement):
+ def run(self):
+ return defer.fail(mockFailure)
+
+class MockFailOnceMeasurement(MockMeasurement):
+ def run(self):
+ if self.failures >= 1:
+ return defer.succeed(42)
+ else:
+ return defer.fail(mockFailure)
+
+class MockDirector(object):
+ def __init__(self):
+ self.successes = []
+
+ def measurementFailed(self, failure, measurement):
+ pass
+
+ def measurementSucceeded(self, measurement):
+ self.successes.append(measurement)
+
+## from test_reporter.py
+class MockOReporter(object):
+ def __init__(self):
+ self.created = defer.Deferred()
+
+ def writeReportEntry(self, entry):
+ return defer.succeed(42)
+
+ def finish(self):
+ pass
+
+ def createReport(self):
+ from ooni.utils import log
+ log.debug("Creating report with %s" % self)
+ self.created.callback(self)
+
+class MockOReporterThatFailsWrite(MockOReporter):
+ def writeReportEntry(self, entry):
+ raise MockFailure
+
+class MockOReporterThatFailsOpen(MockOReporter):
+ def createReport(self):
+ self.created.errback(failure.Failure(MockFailure()))
+
+class MockOReporterThatFailsWriteOnce(MockOReporter):
+ def __init__(self):
+ self.failure = 0
+ MockOReporter.__init__(self)
+
+ def writeReportEntry(self, entry):
+ if self.failure >= 1:
+ return defer.succeed(42)
+ else:
+ self.failure += 1
+ raise MockFailure
+
+class MockTaskManager(TaskManager):
+ def __init__(self):
+ self.successes = []
+ TaskManager.__init__(self)
+
+ def failed(self, failure, task):
+ pass
+
+ def succeeded(self, result, task):
+ self.successes.append((result, task))
+
diff --git a/ooni/tests/test-class-design.py b/ooni/tests/test-class-design.py
new file mode 100644
index 0000000..bb80cd3
--- /dev/null
+++ b/ooni/tests/test-class-design.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+#
+# testing classes to test multiple inheritance.
+# these are not meant to be run by trial, though they could be made to be so.
+# i didn't know where to put them. --isis
+
+import abc
+from pprint import pprint
+from inspect import classify_class_attrs
+
+class PluginBase(object):
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractproperty
+ def name(self):
+ return 'you should not see this'
+
+ @name.setter
+ def name(self, value):
+ return 'you should not set this'
+
+ @name.deleter
+ def name(self):
+ return 'you should not del this'
+
+ @abc.abstractmethod
+ def inputParser(self, line):
+ """Do something to parse something."""
+ return
+
+class Foo(object):
+ woo = "this class has some shit in it"
+ def bar(self):
+ print "i'm a Foo.bar()!"
+ print woo
+
+class KwargTest(Foo):
+ _name = "isis"
+
+ #def __new__(cls, *a, **kw):
+ # return super(KwargTest, cls).__new__(cls, *a, **kw)
+
+ @property
+ def name(self):
+ return self._name
+
+ @name.setter
+ def name(self, value):
+ self._name = value
+
+ def __init__(self, *a, **kw):
+ super(KwargTest, self).__init__()
+
+ ## this causes the instantion args to override the class attrs
+ for key, value in kw.items():
+ setattr(self.__class__, key, value)
+
+ print "%s.__init__(): self.__dict__ = %s" \
+ % (type(self), pprint(type(self).__dict__))
+
+ for attr in classify_class_attrs(self):
+ print attr
+
+ @classmethod
+ def sayname(cls):
+ print cls.name
+
+class KwargTestChild(KwargTest):
+ name = "arturo"
+ def __init__(self):
+ super(KwargTestChild, self).__init__()
+ print self.name
+
+class KwargTestChildOther(KwargTest):
+ def __init__(self, name="robot", does="lasers"):
+ super(KwargTestChildOther, self).__init__()
+ print self.name
+
+
+if __name__ == "__main__":
+ print "class KwargTest attr name: %s" % KwargTest.name
+ kwargtest = KwargTest()
+ print "KwargTest instantiated wo args"
+ print "kwargtest.name: %s" % kwargtest.name
+ print "kwargtest.sayname(): %s" % kwargtest.sayname()
+ kwargtest2 = KwargTest(name="lovecruft", does="hacking")
+ print "KwargTest instantiated with name args"
+ print "kwargtest.name: %s" % kwargtest2.name
+ print "kwargtest.sayname(): %s" % kwargtest2.sayname()
+
+ print "class KwargTestChild attr name: %s" % KwargTestChild.name
+ kwargtestchild = KwargTestChild()
+ print "KwargTestChild instantiated wo args"
+ print "kwargtestchild.name: %s" % kwargtestchild.name
+ print "kwargtestchild.sayname(): %s" % kwargtestchild.sayname()
+
+ print "class KwargTestChildOther attr name: %s" % KwargTestChildOther.name
+ kwargtestchildother = KwargTestChildOther()
+ print "KwargTestChildOther instantiated wo args"
+ print "kwargtestchildother.name: %s" % kwargtestchildother.name
+ print "kwargtestchildother.sayname(): %s" % kwargtestchildother.sayname()
diff --git a/ooni/tests/test_director.py b/ooni/tests/test_director.py
new file mode 100644
index 0000000..7920fcb
--- /dev/null
+++ b/ooni/tests/test_director.py
@@ -0,0 +1,58 @@
+from twisted.internet import defer, base
+from twisted.trial import unittest
+
+from ooni.director import Director
+from ooni.nettest import NetTestLoader
+from ooni.tests.mocks import MockReporter
+base.DelayedCall.debug = True
+
+net_test_string = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham']]
+
+class DummyTestCase(NetTestCase):
+ inputFile = ['file', 'f', None, 'The input File']
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+"""
+
+
+dummyArgs = ('--spam', 1, '--file', 'dummyInputFile.txt')
+
+class TestDirector(unittest.TestCase):
+ timeout = 1
+ def setUp(self):
+ with open('dummyInputFile.txt', 'w') as f:
+ for i in range(10):
+ f.write("%s\n" % i)
+
+ self.reporters = [MockReporter()]
+ self.director = Director()
+
+ def tearDown(self):
+ pass
+
+ def test_start_net_test(self):
+ ntl = NetTestLoader(dummyArgs, test_string=net_test_string)
+
+ ntl.checkOptions()
+ d = self.director.startNetTest('', ntl, self.reporters)
+
+ @d.addCallback
+ def done(result):
+ self.assertEqual(self.director.successfulMeasurements, 20)
+
+ return d
+
+ def test_stop_net_test(self):
+ pass
+
diff --git a/ooni/tests/test_dns.py b/ooni/tests/test_dns.py
new file mode 100644
index 0000000..e9bb524
--- /dev/null
+++ b/ooni/tests/test_dns.py
@@ -0,0 +1,24 @@
+#
+# This unittest is to verify that our usage of the twisted DNS resolver does
+# not break with new versions of twisted.
+
+import pdb
+from twisted.trial import unittest
+
+from twisted.internet import reactor
+
+from twisted.names import dns
+from twisted.names.client import Resolver
+
+class DNSTest(unittest.TestCase):
+ def test_a_lookup_ooni_query(self):
+ def done_query(message, *arg):
+ answer = message.answers[0]
+ self.assertEqual(answer.type, 1)
+
+ dns_query = [dns.Query('ooni.nu', type=dns.A)]
+ resolver = Resolver(servers=[('8.8.8.8', 53)])
+ d = resolver.queryUDP(dns_query)
+ d.addCallback(done_query)
+ return d
+
diff --git a/ooni/tests/test_managers.py b/ooni/tests/test_managers.py
new file mode 100644
index 0000000..e2af7b3
--- /dev/null
+++ b/ooni/tests/test_managers.py
@@ -0,0 +1,215 @@
+from twisted.trial import unittest
+from twisted.python import failure
+from twisted.internet import defer, task
+
+from ooni.tasks import BaseTask, TaskWithTimeout, TaskTimedOut
+from ooni.managers import TaskManager, MeasurementManager
+
+from ooni.tests.mocks import MockSuccessTask, MockFailTask, MockFailOnceTask, MockFailure
+from ooni.tests.mocks import MockSuccessTaskWithTimeout, MockFailTaskThatTimesOut
+from ooni.tests.mocks import MockTimeoutOnceTask, MockFailTaskWithTimeout
+from ooni.tests.mocks import MockTaskManager, mockFailure, MockDirector
+from ooni.tests.mocks import MockNetTest, MockMeasurement, MockSuccessMeasurement
+from ooni.tests.mocks import MockFailMeasurement, MockFailOnceMeasurement
+
+class TestTaskManager(unittest.TestCase):
+ timeout = 1
+ def setUp(self):
+ self.measurementManager = MockTaskManager()
+ self.measurementManager.concurrency = 20
+ self.measurementManager.retries = 2
+
+ self.measurementManager.start()
+
+ self.clock = task.Clock()
+
+ def schedule_successful_tasks(self, task_type, number=1):
+ all_done = []
+ for x in range(number):
+ mock_task = task_type()
+ all_done.append(mock_task.done)
+ self.measurementManager.schedule(mock_task)
+
+ d = defer.DeferredList(all_done)
+ @d.addCallback
+ def done(res):
+ for task_result, task_instance in self.measurementManager.successes:
+ self.assertEqual(task_result, 42)
+ self.assertIsInstance(task_instance, task_type)
+
+ return d
+
+ def schedule_failing_tasks(self, task_type, number=1):
+ all_done = []
+ for x in range(number):
+ mock_task = task_type()
+ all_done.append(mock_task.done)
+ self.measurementManager.schedule(mock_task)
+
+ d = defer.DeferredList(all_done)
+ @d.addCallback
+ def done(res):
+ # 10*2 because 2 is the number of retries
+ self.assertEqual(len(self.measurementManager.failures), number*3)
+ for task_result, task_instance in self.measurementManager.failures:
+ self.assertEqual(task_result, mockFailure)
+ self.assertIsInstance(task_instance, task_type)
+
+ return d
+
+ def test_schedule_failing_with_mock_failure_task(self):
+ mock_task = MockFailTask()
+ self.measurementManager.schedule(mock_task)
+ self.assertFailure(mock_task.done, MockFailure)
+ return mock_task.done
+
+ def test_schedule_successful_one_task(self):
+ return self.schedule_successful_tasks(MockSuccessTask)
+
+ def test_schedule_successful_one_task_with_timeout(self):
+ return self.schedule_successful_tasks(MockSuccessTaskWithTimeout)
+
+ def test_schedule_failing_tasks_that_timesout(self):
+ self.measurementManager.retries = 0
+
+ task_type = MockFailTaskThatTimesOut
+ task_timeout = 5
+
+ mock_task = task_type()
+ mock_task.timeout = task_timeout
+ mock_task.clock = self.clock
+
+ self.measurementManager.schedule(mock_task)
+
+ self.clock.advance(task_timeout)
+
+ @mock_task.done.addBoth
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), 1)
+ for task_result, task_instance in self.measurementManager.failures:
+ self.assertIsInstance(task_instance, task_type)
+
+ return mock_task.done
+
+ def test_schedule_time_out_once(self):
+ task_type = MockTimeoutOnceTask
+ task_timeout = 5
+
+ mock_task = task_type()
+ mock_task.timeout = task_timeout
+ mock_task.clock = self.clock
+
+ self.measurementManager.schedule(mock_task)
+
+ self.clock.advance(task_timeout)
+
+ @mock_task.done.addBoth
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), 1)
+ for task_result, task_instance in self.measurementManager.failures:
+ self.assertIsInstance(task_instance, task_type)
+
+ for task_result, task_instance in self.measurementManager.successes:
+ self.assertEqual(task_result, 42)
+ self.assertIsInstance(task_instance, task_type)
+
+ return mock_task.done
+
+
+ def test_schedule_failing_one_task(self):
+ return self.schedule_failing_tasks(MockFailTask)
+
+ def test_schedule_failing_one_task_with_timeout(self):
+ return self.schedule_failing_tasks(MockFailTaskWithTimeout)
+
+ def test_schedule_successful_ten_tasks(self):
+ return self.schedule_successful_tasks(MockSuccessTask, number=10)
+
+ def test_schedule_failing_ten_tasks(self):
+ return self.schedule_failing_tasks(MockFailTask, number=10)
+
+ def test_schedule_successful_27_tasks(self):
+ return self.schedule_successful_tasks(MockSuccessTask, number=27)
+
+ def test_schedule_failing_27_tasks(self):
+ return self.schedule_failing_tasks(MockFailTask, number=27)
+
+ def test_task_retry_and_succeed(self):
+ mock_task = MockFailOnceTask()
+ self.measurementManager.schedule(mock_task)
+
+ @mock_task.done.addCallback
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), 1)
+
+ self.assertEqual(self.measurementManager.failures,
+ [(mockFailure, mock_task)])
+ self.assertEqual(self.measurementManager.successes,
+ [(42, mock_task)])
+
+ return mock_task.done
+
+ def dd_test_task_retry_and_succeed_56_tasks(self):
+ """
+ XXX this test fails in a non-deterministic manner.
+ """
+ all_done = []
+ number = 56
+ for x in range(number):
+ mock_task = MockFailOnceTask()
+ all_done.append(mock_task.done)
+ self.measurementManager.schedule(mock_task)
+
+ d = defer.DeferredList(all_done)
+
+ @d.addCallback
+ def done(res):
+ self.assertEqual(len(self.measurementManager.failures), number)
+
+ for task_result, task_instance in self.measurementManager.successes:
+ self.assertEqual(task_result, 42)
+ self.assertIsInstance(task_instance, MockFailOnceTask)
+
+ return d
+
+class TestMeasurementManager(unittest.TestCase):
+ def setUp(self):
+ mock_director = MockDirector()
+
+ self.measurementManager = MeasurementManager()
+ self.measurementManager.director = mock_director
+
+ self.measurementManager.concurrency = 10
+ self.measurementManager.retries = 2
+
+ self.measurementManager.start()
+
+ self.mockNetTest = MockNetTest()
+
+ def test_schedule_and_net_test_notified(self, number=1):
+ # XXX we should probably be inheriting from the base test class
+ mock_task = MockSuccessMeasurement(self.mockNetTest)
+ self.measurementManager.schedule(mock_task)
+
+ @mock_task.done.addCallback
+ def done(res):
+ self.assertEqual(self.mockNetTest.successes,
+ [42])
+
+ self.assertEqual(len(self.mockNetTest.successes), 1)
+ return mock_task.done
+
+ def test_schedule_failing_one_measurement(self):
+ mock_task = MockFailMeasurement(self.mockNetTest)
+ self.measurementManager.schedule(mock_task)
+
+ @mock_task.done.addErrback
+ def done(failure):
+ self.assertEqual(len(self.measurementManager.failures), 3)
+
+ self.assertEqual(failure, mockFailure)
+ self.assertEqual(len(self.mockNetTest.successes), 0)
+
+ return mock_task.done
+
+
diff --git a/ooni/tests/test_mutate.py b/ooni/tests/test_mutate.py
new file mode 100644
index 0000000..7e30586
--- /dev/null
+++ b/ooni/tests/test_mutate.py
@@ -0,0 +1,15 @@
+import unittest
+from ooni.kit import daphn3
+
+class TestDaphn3(unittest.TestCase):
+ def test_mutate_string(self):
+ original_string = '\x00\x00\x00'
+ mutated = daphn3.daphn3MutateString(original_string, 1)
+ self.assertEqual(mutated, '\x00\x01\x00')
+ def test_mutate_daphn3(self):
+ original_dict = [{'client': '\x00\x00\x00'},
+ {'server': '\x00\x00\x00'}]
+ mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
+ self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
+ {'server': '\x00\x01\x00'}])
+
diff --git a/ooni/tests/test_nettest.py b/ooni/tests/test_nettest.py
new file mode 100644
index 0000000..4d72a84
--- /dev/null
+++ b/ooni/tests/test_nettest.py
@@ -0,0 +1,268 @@
+import os
+from StringIO import StringIO
+from tempfile import TemporaryFile, mkstemp
+
+from twisted.trial import unittest
+from twisted.internet import defer, reactor
+from twisted.python.usage import UsageError
+
+from ooni.nettest import NetTest, InvalidOption, MissingRequiredOption
+from ooni.nettest import NetTestLoader, FailureToLoadNetTest, loadNetTestString, loadNetTestFile
+from ooni.tasks import BaseTask
+from ooni.utils import NotRootError
+
+from ooni.director import Director
+
+from ooni.managers import TaskManager
+
+from ooni.tests.mocks import MockMeasurement, MockMeasurementFailOnce
+from ooni.tests.mocks import MockNetTest, MockDirector, MockReporter
+from ooni.tests.mocks import MockMeasurementManager
+defer.setDebugging(True)
+
+net_test_string = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham']]
+
+class DummyTestCase(NetTestCase):
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+"""
+
+net_test_root_required = net_test_string+"""
+ requiresRoot = True
+"""
+
+net_test_string_with_file = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham']]
+
+class DummyTestCase(NetTestCase):
+ inputFile = ['file', 'f', None, 'The input File']
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+"""
+
+net_test_string_with_required_option = """
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+
+class UsageOptions(usage.Options):
+ optParameters = [['spam', 's', None, 'ham'],
+ ['foo', 'o', None, 'moo'],
+ ['bar', 'o', None, 'baz'],
+ ]
+
+class DummyTestCase(NetTestCase):
+ inputFile = ['file', 'f', None, 'The input File']
+
+ usageOptions = UsageOptions
+
+ def test_a(self):
+ self.report['bar'] = 'bar'
+
+ def test_b(self):
+ self.report['foo'] = 'foo'
+
+ requiredOptions = ['foo', 'bar']
+"""
+
+dummyInputs = range(1)
+dummyArgs = ('--spam', 'notham')
+dummyOptions = {'spam':'notham'}
+dummyInvalidArgs = ('--cram', 'jam')
+dummyInvalidOptions= {'cram':'jam'}
+dummyArgsWithRequiredOptions = ('--foo', 'moo', '--bar', 'baz')
+dummyRequiredOptions = {'foo':'moo', 'bar':'baz'}
+dummyArgsWithFile = ('--spam', 'notham', '--file', 'dummyInputFile.txt')
+
+class TestNetTest(unittest.TestCase):
+ timeout = 1
+ def setUp(self):
+ with open('dummyInputFile.txt', 'w') as f:
+ for i in range(10):
+ f.write("%s\n" % i)
+
+ def assertCallable(self, thing):
+ self.assertIn('__call__', dir(thing))
+
+ def verifyMethods(self, testCases):
+ uniq_test_methods = set()
+ for test_class, test_methods in testCases:
+ instance = test_class()
+ for test_method in test_methods:
+ c = getattr(instance, test_method)
+ self.assertCallable(c)
+ uniq_test_methods.add(test_method)
+ self.assertEqual(set(['test_a', 'test_b']), uniq_test_methods)
+
+ def test_load_net_test_from_file(self):
+ """
+ Given a file verify that the net test cases are properly
+ generated.
+ """
+ __, net_test_file = mkstemp()
+ with open(net_test_file, 'w') as f:
+ f.write(net_test_string)
+ f.close()
+
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestFile(net_test_file))
+
+ self.verifyMethods(ntl.testCases)
+ os.unlink(net_test_file)
+
+ def test_load_net_test_from_str(self):
+ """
+ Given a file like object verify that the net test cases are properly
+ generated.
+ """
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ self.verifyMethods(ntl.testCases)
+
+ def test_load_net_test_from_StringIO(self):
+ """
+ Given a file like object verify that the net test cases are properly
+ generated.
+ """
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ self.verifyMethods(ntl.testCases)
+
+ def test_load_with_option(self):
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ self.assertIsInstance(ntl, NetTestLoader)
+ for test_klass, test_meth in ntl.testCases:
+ for option in dummyOptions.keys():
+ self.assertIn(option, test_klass.usageOptions())
+
+ def test_load_with_invalid_option(self):
+ try:
+ ntl = NetTestLoader(dummyInvalidArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ ntl.checkOptions()
+ raise Exception
+ except UsageError:
+ pass
+
+ def test_load_with_required_option(self):
+ ntl = NetTestLoader(dummyArgsWithRequiredOptions)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_required_option))
+
+ self.assertIsInstance(ntl, NetTestLoader)
+
+ def test_load_with_missing_required_option(self):
+ try:
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_required_option))
+
+ except MissingRequiredOption:
+ pass
+
+ def test_net_test_inputs(self):
+ ntl = NetTestLoader(dummyArgsWithFile)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_file))
+
+ ntl.checkOptions()
+
+ # XXX: if you use the same test_class twice you will have consumed all
+ # of its inputs!
+ tested = set([])
+ for test_class, test_method in ntl.testCases:
+ if test_class not in tested:
+ tested.update([test_class])
+ self.assertEqual(len(list(test_class.inputs)), 10)
+
+ def test_setup_local_options_in_test_cases(self):
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_string))
+
+ ntl.checkOptions()
+
+ for test_class, test_method in ntl.testCases:
+ self.assertEqual(test_class.localOptions, dummyOptions)
+
+ def test_generate_measurements_size(self):
+ ntl = NetTestLoader(dummyArgsWithFile)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_file))
+
+ ntl.checkOptions()
+ net_test = NetTest(ntl, None)
+
+ measurements = list(net_test.generateMeasurements())
+ self.assertEqual(len(measurements), 20)
+
+ def test_net_test_completed_callback(self):
+ ntl = NetTestLoader(dummyArgsWithFile)
+ ntl.setupTestCases(loadNetTestString(net_test_string_with_file))
+
+ ntl.checkOptions()
+ director = Director()
+
+ d = director.startNetTest('', ntl, [MockReporter()])
+
+ @d.addCallback
+ def complete(result):
+ #XXX: why is the return type (True, None) ?
+ self.assertEqual(result, [(True,None)])
+ self.assertEqual(director.successfulMeasurements, 20)
+
+ return d
+
+ def test_require_root_succeed(self):
+ #XXX: will require root to run
+ ntl = NetTestLoader(dummyArgs)
+ ntl.setupTestCases(loadNetTestString(net_test_root_required))
+
+ for test_class, method in ntl.testCases:
+ self.assertTrue(test_class.requiresRoot)
+
+ #def test_require_root_failed(self):
+ # #XXX: will fail if you run as root
+ # try:
+ # net_test = NetTestLoader(StringIO(net_test_root_required),
+ # dummyArgs)
+ # except NotRootError:
+ # pass
+
+ #def test_create_report_succeed(self):
+ # pass
+
+ #def test_create_report_failed(self):
+ # pass
+
+ #def test_run_all_test(self):
+ # raise NotImplementedError
+
+ #def test_resume_test(self):
+ # pass
+
+ #def test_progress(self):
+ # pass
+
+ #def test_time_out(self):
+ # raise NotImplementedError
diff --git a/ooni/tests/test_otime.py b/ooni/tests/test_otime.py
new file mode 100644
index 0000000..80979f2
--- /dev/null
+++ b/ooni/tests/test_otime.py
@@ -0,0 +1,15 @@
+import unittest
+from datetime import datetime
+from ooni import otime
+
+test_date = datetime(2002, 6, 26, 22, 45, 49)
+
+class TestOtime(unittest.TestCase):
+ def test_timestamp(self):
+ self.assertEqual(otime.timestamp(test_date), "2002-06-26T224549Z")
+
+ def test_fromTimestamp(self):
+ time_stamp = otime.timestamp(test_date)
+ self.assertEqual(test_date, otime.fromTimestamp(time_stamp))
+
+
diff --git a/ooni/tests/test_reporter.py b/ooni/tests/test_reporter.py
new file mode 100644
index 0000000..d7ee907
--- /dev/null
+++ b/ooni/tests/test_reporter.py
@@ -0,0 +1,238 @@
+from twisted.internet import defer
+from twisted.trial import unittest
+
+from ooni.reporter import Report, YAMLReporter, OONIBReporter, safe_dump
+from ooni.managers import ReportEntryManager, TaskManager
+from ooni.nettest import NetTest, NetTestState
+from ooni.errors import ReportNotCreated, ReportAlreadyClosed
+
+from ooni.tasks import TaskWithTimeout
+from ooni.tests.mocks import MockOReporter, MockTaskManager
+from ooni.tests.mocks import MockMeasurement, MockNetTest
+from ooni.tests.mocks import MockOReporterThatFailsWrite
+from ooni.tests.mocks import MockOReporterThatFailsWriteOnce
+from ooni.tests.mocks import MockOReporterThatFailsOpen
+
+from twisted.python import failure
+import yaml
+
+class TestReport(unittest.TestCase):
+ def setUp(self):
+ pass
+ def tearDown(self):
+ pass
+ def test_create_report_with_no_reporter(self):
+ report = Report([],ReportEntryManager())
+ self.assertIsInstance(report, Report)
+
+ def test_create_report_with_single_reporter(self):
+ report = Report([MockOReporter()], ReportEntryManager())
+ self.assertIsInstance(report, Report)
+
+ def test_create_report_with_multiple_reporters(self):
+ report = Report([MockOReporter() for x in xrange(3)],
+ ReportEntryManager())
+ self.assertIsInstance(report, Report)
+
+ def test_report_open_with_single_reporter(self):
+ report = Report([MockOReporter()],ReportEntryManager())
+ d = report.open()
+ return d
+
+ def test_report_open_with_multiple_reporter(self):
+ report = Report([MockOReporter() for x in xrange(3)],
+ ReportEntryManager())
+ d = report.open()
+ return d
+
+ def test_fail_to_open_report_with_single_reporter(self):
+ report = Report([MockOReporterThatFailsOpen()],
+ ReportEntryManager())
+ d = report.open()
+ def f(x):
+ self.assertEquals(len(report.reporters), 0)
+ d.addCallback(f)
+ return d
+
+ def test_fail_to_open_single_report_with_multiple_reporter(self):
+ report = Report([MockOReporterThatFailsOpen(), MockOReporter(),
+ MockOReporter()], ReportEntryManager())
+ d = report.open()
+ def f(x):
+ self.assertEquals(len(report.reporters),2)
+ d.addCallback(f)
+ return d
+
+ def test_fail_to_open_all_reports_with_multiple_reporter(self):
+ report = Report([MockOReporterThatFailsOpen() for x in xrange(3)],
+ ReportEntryManager())
+ d = report.open()
+ def f(x):
+ self.assertEquals(len(report.reporters),0)
+ d.addCallback(f)
+ return d
+
+ def test_write_report_with_single_reporter_and_succeed(self):
+ #XXX: verify that the MockOReporter writeReportEntry succeeds
+ report = Report([MockOReporter()], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ return d
+
+ def test_write_report_with_single_reporter_and_fail_after_timeout(self):
+ report = Report([MockOReporterThatFailsWrite()], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ def f(err):
+ self.assertEquals(len(report.reporters),0)
+ d.addBoth(f)
+ return d
+
+ def test_write_report_with_single_reporter_and_succeed_after_timeout(self):
+ report = Report([MockOReporterThatFailsWriteOnce()], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ return d
+
+ def test_write_report_with_multiple_reporter_and_succeed(self):
+ report = Report([MockOReporter() for x in xrange(3)], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ return d
+
+ def test_write_report_with_multiple_reporter_and_fail_a_single_reporter(self):
+ report = Report([MockOReporter(), MockOReporter(), MockOReporterThatFailsWrite()], ReportEntryManager())
+ d = report.open()
+
+ self.assertEquals(len(report.reporters),3)
+ d = report.write(MockMeasurement(MockNetTest()))
+
+ def f(x):
+ # one of the reporters should have been removed
+ self.assertEquals(len(report.reporters), 2)
+ d.addBoth(f)
+ return d
+
+ def test_write_report_with_multiple_reporter_and_fail_all_reporter(self):
+ report = Report([MockOReporterThatFailsWrite() for x in xrange(3)], ReportEntryManager())
+ report.open()
+ d = report.write(MockMeasurement(MockNetTest()))
+ def f(err):
+ self.assertEquals(len(report.reporters),0)
+ d.addErrback(f)
+ return d
+
+class TestYAMLReporter(unittest.TestCase):
+ def setUp(self):
+ self.testDetails = {'software_name': 'ooniprobe', 'options':
+ {'pcapfile': None, 'help': 0, 'subargs': ['-f', 'alexa_10'], 'resume':
+ 0, 'parallelism': '10', 'no-default-reporter': 0, 'testdeck': None,
+ 'test': 'nettests/blocking/http_requests.py', 'logfile': None,
+ 'collector': None, 'reportfile': None}, 'test_version': '0.2.3',
+ 'software_version': '0.0.10', 'test_name': 'http_requests_test',
+ 'start_time': 1362054343.0, 'probe_asn': 'AS0', 'probe_ip':
+ '127.0.0.1', 'probe_cc': 'US'}
+
+ def tearDown(self):
+ pass
+ def test_create_yaml_reporter(self):
+ self.assertIsInstance(YAMLReporter(self.testDetails),
+ YAMLReporter)
+
+ def test_open_yaml_report_and_succeed(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ # verify that testDetails was written to report properly
+ def f(r):
+ r._stream.seek(0)
+ details, = yaml.safe_load_all(r._stream)
+ self.assertEqual(details, self.testDetails)
+ r.created.addCallback(f)
+ return r.created
+
+ #def test_open_yaml_report_and_fail(self):
+ # #XXX: YAMLReporter does not handle failures of this type
+ # pass
+
+ def test_write_yaml_report_entry(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+
+ report_entry = {'foo':'bar', 'bin':'baz'}
+ r.writeReportEntry(report_entry)
+
+ # verify that details and entry were written to report
+ def f(r):
+ r._stream.seek(0)
+ report = yaml.safe_load_all(r._stream)
+ details, entry = report
+ self.assertEqual(details, self.testDetails)
+ self.assertEqual(entry, report_entry)
+ r.created.addCallback(f)
+ return r.created
+
+ def test_write_multiple_yaml_report_entry(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ def reportEntry():
+ for x in xrange(10):
+ yield {'foo':'bar', 'bin':'baz', 'item':x}
+ for entry in reportEntry():
+ r.writeReportEntry(entry)
+ # verify that details and multiple entries were written to report
+ def f(r):
+ r._stream.seek(0)
+ report = yaml.safe_load_all(r._stream)
+ details = report.next()
+ self.assertEqual(details, self.testDetails)
+ self.assertEqual([r for r in report], [r for r in reportEntry()])
+ r.created.addCallback(f)
+ return r.created
+
+ def test_close_yaml_report(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ r.finish()
+ self.assertTrue(r._stream.closed)
+
+ def test_write_yaml_report_after_close(self):
+ r = YAMLReporter(self.testDetails)
+ r.createReport()
+ r.finish()
+ def f(r):
+ r.writeReportEntry("foo")
+ r.created.addCallback(f)
+ self.assertFailure(r.created, ReportAlreadyClosed)
+
+ def test_write_yaml_report_before_open(self):
+ r = YAMLReporter(self.testDetails)
+ def f(r):
+ r.writeReportEntry("foo")
+ r.created.addCallback(f)
+ self.assertFailure(r.created, ReportNotCreated)
+
+#class TestOONIBReporter(unittest.TestCase):
+# def setUp(self):
+# pass
+# def tearDown(self):
+# pass
+# def test_create_oonib_reporter(self):
+# raise NotImplementedError
+# def test_open_oonib_report_and_succeed(self):
+# raise NotImplementedError
+# def test_open_oonib_report_and_fail(self):
+# raise NotImplementedError
+# def test_write_oonib_report_entry_and_succeed(self):
+# raise NotImplementedError
+# def test_write_oonib_report_entry_and_succeed_after_timeout(self):
+# raise NotImplementedError
+# def test_write_oonib_report_entry_and_fail_after_timeout(self):
+# raise NotImplementedError
+# def test_write_oonib_report_after_close(self):
+# raise NotImplementedError
+# def test_write_oonib_report_before_open(self):
+# raise NotImplementedError
+# def test_close_oonib_report_and_succeed(self):
+# raise NotImplementedError
+# def test_close_oonib_report_and_fail(self):
+# raise NotImplementedError
diff --git a/ooni/tests/test_safe_represent.py b/ooni/tests/test_safe_represent.py
new file mode 100644
index 0000000..82a5196
--- /dev/null
+++ b/ooni/tests/test_safe_represent.py
@@ -0,0 +1,14 @@
+import yaml
+
+from twisted.trial import unittest
+
+from ooni.reporter import OSafeDumper
+
+from scapy.all import IP, UDP
+
+class TestScapyRepresent(unittest.TestCase):
+ def test_represent_scapy(self):
+ data = IP()/UDP()
+ yaml.dump_all([data], Dumper=OSafeDumper)
+
+
diff --git a/ooni/tests/test_trueheaders.py b/ooni/tests/test_trueheaders.py
new file mode 100644
index 0000000..9ac0a27
--- /dev/null
+++ b/ooni/tests/test_trueheaders.py
@@ -0,0 +1,41 @@
+from twisted.trial import unittest
+
+from ooni.utils.txagentwithsocks import TrueHeaders
+
+dummy_headers_dict = {
+ 'Header1': ['Value1', 'Value2'],
+ 'Header2': ['ValueA', 'ValueB']
+}
+
+dummy_headers_dict2 = {
+ 'Header1': ['Value1', 'Value2'],
+ 'Header2': ['ValueA', 'ValueB'],
+ 'Header3': ['ValueA', 'ValueB'],
+}
+
+dummy_headers_dict3 = {
+ 'Header1': ['Value1', 'Value2'],
+ 'Header2': ['ValueA', 'ValueB'],
+ 'Header4': ['ValueA', 'ValueB'],
+}
+
+
+class TestTrueHeaders(unittest.TestCase):
+ def test_names_match(self):
+ th = TrueHeaders(dummy_headers_dict)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
+
+ def test_names_not_match(self):
+ th = TrueHeaders(dummy_headers_dict)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
+
+ th = TrueHeaders(dummy_headers_dict3)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
+
+ def test_names_match_expect_ignore(self):
+ th = TrueHeaders(dummy_headers_dict)
+ self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
+
+
+
+
diff --git a/ooni/tests/test_utils.py b/ooni/tests/test_utils.py
new file mode 100644
index 0000000..cc648e0
--- /dev/null
+++ b/ooni/tests/test_utils.py
@@ -0,0 +1,20 @@
+import unittest
+from ooni.utils import pushFilenameStack
+
+class TestUtils(unittest.TestCase):
+ def test_pushFilenameStack(self):
+ f = open("dummyfile", "w+")
+ f.write("0\n")
+ f.close()
+ for i in xrange(1, 5):
+ f = open("dummyfile.%s" % i, "w+")
+ f.write("%s\n" % i)
+ f.close()
+
+ pushFilenameStack("dummyfile")
+ for i in xrange(1, 5):
+ f = open("dummyfile.%s" % i)
+ c = f.readlines()[0].strip()
+ self.assertEqual(str(i-1), str(c))
+ f.close()
+
diff --git a/ooni/utils/log.py b/ooni/utils/log.py
index 0740c10..141116e 100644
--- a/ooni/utils/log.py
+++ b/ooni/utils/log.py
@@ -45,14 +45,16 @@ def stop():
print "Stopping OONI"
def msg(msg, *arg, **kw):
- print "%s" % msg
+ if config.logging:
+ print "%s" % msg
def debug(msg, *arg, **kw):
- if config.advanced.debug:
+ if config.advanced.debug and config.logging:
print "[D] %s" % msg
def err(msg, *arg, **kw):
- print "[!] %s" % msg
+ if config.logging:
+ print "[!] %s" % msg
def exception(error):
"""
diff --git a/ooniprobe.conf.sample b/ooniprobe.conf.sample
index 51c60f5..8a6b825 100644
--- a/ooniprobe.conf.sample
+++ b/ooniprobe.conf.sample
@@ -30,6 +30,8 @@ advanced:
# If you do not specify start_tor, you will have to have Tor running and
# explicitly set the control port and SOCKS port
start_tor: true
+ # On which port the oonid API should be listening on
+ oonid_api_port: 50666
tor:
#socks_port: 9050
#control_port: 9051
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/tests/mocks.py b/tests/mocks.py
deleted file mode 100644
index fed683e..0000000
--- a/tests/mocks.py
+++ /dev/null
@@ -1,168 +0,0 @@
-from ooni.tasks import BaseTask, TaskWithTimeout
-from twisted.python import failure
-from ooni.nettest import NetTest
-from ooni.managers import TaskManager
-from twisted.internet import defer
-
-class MockMeasurementFailOnce(BaseTask):
- def run(self):
- f = open('dummyTaskFailOnce.txt', 'w')
- f.write('fail')
- f.close()
- if self.failure >= 1:
- return defer.succeed(self)
- else:
- return defer.fail(failure.Failure)
-
-class MockMeasurementManager(TaskManager):
- def __init__(self):
- self.successes = []
- TaskManager.__init__(self)
-
- def failed(self, failure, task):
- pass
-
- def succeeded(self, result, task):
- self.successes.append((result, task))
-
-class MockReporter(object):
- def __init__(self):
- self.created = defer.Deferred()
-
- def writeReportEntry(self, entry):
- pass
-
- def createReport(self):
- self.created.callback(self)
-
- def finish(self):
- pass
-
-class MockFailure(Exception):
- pass
-
-## from test_managers
-mockFailure = failure.Failure(MockFailure('mock'))
-
-class MockSuccessTask(BaseTask):
- def run(self):
- return defer.succeed(42)
-
-class MockFailTask(BaseTask):
- def run(self):
- return defer.fail(mockFailure)
-
-class MockFailOnceTask(BaseTask):
- def run(self):
- if self.failures >= 1:
- return defer.succeed(42)
- else:
- return defer.fail(mockFailure)
-
-class MockSuccessTaskWithTimeout(TaskWithTimeout):
- def run(self):
- return defer.succeed(42)
-
-class MockFailTaskThatTimesOut(TaskWithTimeout):
- def run(self):
- return defer.Deferred()
-
-class MockTimeoutOnceTask(TaskWithTimeout):
- def run(self):
- if self.failures >= 1:
- return defer.succeed(42)
- else:
- return defer.Deferred()
-
-class MockFailTaskWithTimeout(TaskWithTimeout):
- def run(self):
- return defer.fail(mockFailure)
-
-
-class MockNetTest(object):
- def __init__(self):
- self.successes = []
-
- def succeeded(self, measurement):
- self.successes.append(measurement)
-
-class MockMeasurement(TaskWithTimeout):
- def __init__(self, net_test):
- TaskWithTimeout.__init__(self)
- self.netTest = net_test
-
- def succeeded(self, result):
- return self.netTest.succeeded(42)
-
-class MockSuccessMeasurement(MockMeasurement):
- def run(self):
- return defer.succeed(42)
-
-class MockFailMeasurement(MockMeasurement):
- def run(self):
- return defer.fail(mockFailure)
-
-class MockFailOnceMeasurement(MockMeasurement):
- def run(self):
- if self.failures >= 1:
- return defer.succeed(42)
- else:
- return defer.fail(mockFailure)
-
-class MockDirector(object):
- def __init__(self):
- self.successes = []
-
- def measurementFailed(self, failure, measurement):
- pass
-
- def measurementSucceeded(self, measurement):
- self.successes.append(measurement)
-
-## from test_reporter.py
-class MockOReporter(object):
- def __init__(self):
- self.created = defer.Deferred()
-
- def writeReportEntry(self, entry):
- return defer.succeed(42)
-
- def finish(self):
- pass
-
- def createReport(self):
- from ooni.utils import log
- log.debug("Creating report with %s" % self)
- self.created.callback(self)
-
-class MockOReporterThatFailsWrite(MockOReporter):
- def writeReportEntry(self, entry):
- raise MockFailure
-
-class MockOReporterThatFailsOpen(MockOReporter):
- def createReport(self):
- self.created.errback(failure.Failure(MockFailure()))
-
-class MockOReporterThatFailsWriteOnce(MockOReporter):
- def __init__(self):
- self.failure = 0
- MockOReporter.__init__(self)
-
- def writeReportEntry(self, entry):
- if self.failure >= 1:
- return defer.succeed(42)
- else:
- self.failure += 1
- raise MockFailure
-
-class MockTaskManager(TaskManager):
- def __init__(self):
- self.successes = []
- TaskManager.__init__(self)
-
- def failed(self, failure, task):
- pass
-
- def succeeded(self, result, task):
- self.successes.append((result, task))
-
diff --git a/tests/test-class-design.py b/tests/test-class-design.py
deleted file mode 100644
index bb80cd3..0000000
--- a/tests/test-class-design.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-#
-# testing classes to test multiple inheritance.
-# these are not meant to be run by trial, though they could be made to be so.
-# i didn't know where to put them. --isis
-
-import abc
-from pprint import pprint
-from inspect import classify_class_attrs
-
-class PluginBase(object):
- __metaclass__ = abc.ABCMeta
-
- @abc.abstractproperty
- def name(self):
- return 'you should not see this'
-
- @name.setter
- def name(self, value):
- return 'you should not set this'
-
- @name.deleter
- def name(self):
- return 'you should not del this'
-
- @abc.abstractmethod
- def inputParser(self, line):
- """Do something to parse something."""
- return
-
-class Foo(object):
- woo = "this class has some shit in it"
- def bar(self):
- print "i'm a Foo.bar()!"
- print woo
-
-class KwargTest(Foo):
- _name = "isis"
-
- #def __new__(cls, *a, **kw):
- # return super(KwargTest, cls).__new__(cls, *a, **kw)
-
- @property
- def name(self):
- return self._name
-
- @name.setter
- def name(self, value):
- self._name = value
-
- def __init__(self, *a, **kw):
- super(KwargTest, self).__init__()
-
- ## this causes the instantion args to override the class attrs
- for key, value in kw.items():
- setattr(self.__class__, key, value)
-
- print "%s.__init__(): self.__dict__ = %s" \
- % (type(self), pprint(type(self).__dict__))
-
- for attr in classify_class_attrs(self):
- print attr
-
- @classmethod
- def sayname(cls):
- print cls.name
-
-class KwargTestChild(KwargTest):
- name = "arturo"
- def __init__(self):
- super(KwargTestChild, self).__init__()
- print self.name
-
-class KwargTestChildOther(KwargTest):
- def __init__(self, name="robot", does="lasers"):
- super(KwargTestChildOther, self).__init__()
- print self.name
-
-
-if __name__ == "__main__":
- print "class KwargTest attr name: %s" % KwargTest.name
- kwargtest = KwargTest()
- print "KwargTest instantiated wo args"
- print "kwargtest.name: %s" % kwargtest.name
- print "kwargtest.sayname(): %s" % kwargtest.sayname()
- kwargtest2 = KwargTest(name="lovecruft", does="hacking")
- print "KwargTest instantiated with name args"
- print "kwargtest.name: %s" % kwargtest2.name
- print "kwargtest.sayname(): %s" % kwargtest2.sayname()
-
- print "class KwargTestChild attr name: %s" % KwargTestChild.name
- kwargtestchild = KwargTestChild()
- print "KwargTestChild instantiated wo args"
- print "kwargtestchild.name: %s" % kwargtestchild.name
- print "kwargtestchild.sayname(): %s" % kwargtestchild.sayname()
-
- print "class KwargTestChildOther attr name: %s" % KwargTestChildOther.name
- kwargtestchildother = KwargTestChildOther()
- print "KwargTestChildOther instantiated wo args"
- print "kwargtestchildother.name: %s" % kwargtestchildother.name
- print "kwargtestchildother.sayname(): %s" % kwargtestchildother.sayname()
diff --git a/tests/test_director.py b/tests/test_director.py
deleted file mode 100644
index a9dfbe8..0000000
--- a/tests/test_director.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from twisted.internet import defer, base
-from twisted.trial import unittest
-
-from ooni.director import Director
-from ooni.nettest import NetTestLoader
-from tests.mocks import MockReporter
-base.DelayedCall.debug = True
-
-net_test_string = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham']]
-
-class DummyTestCase(NetTestCase):
- inputFile = ['file', 'f', None, 'The input File']
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-"""
-
-
-dummyArgs = ('--spam', 1, '--file', 'dummyInputFile.txt')
-
-class TestDirector(unittest.TestCase):
- timeout = 1
- def setUp(self):
- with open('dummyInputFile.txt', 'w') as f:
- for i in range(10):
- f.write("%s\n" % i)
-
- self.reporters = [MockReporter()]
- self.director = Director()
-
- def tearDown(self):
- pass
-
- def test_start_net_test(self):
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- ntl.checkOptions()
- d = self.director.startNetTest('', ntl, self.reporters)
-
- @d.addCallback
- def done(result):
- self.assertEqual(self.director.successfulMeasurements, 20)
-
- return d
-
- def test_stop_net_test(self):
- pass
-
diff --git a/tests/test_dns.py b/tests/test_dns.py
deleted file mode 100644
index e9bb524..0000000
--- a/tests/test_dns.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-# This unittest is to verify that our usage of the twisted DNS resolver does
-# not break with new versions of twisted.
-
-import pdb
-from twisted.trial import unittest
-
-from twisted.internet import reactor
-
-from twisted.names import dns
-from twisted.names.client import Resolver
-
-class DNSTest(unittest.TestCase):
- def test_a_lookup_ooni_query(self):
- def done_query(message, *arg):
- answer = message.answers[0]
- self.assertEqual(answer.type, 1)
-
- dns_query = [dns.Query('ooni.nu', type=dns.A)]
- resolver = Resolver(servers=[('8.8.8.8', 53)])
- d = resolver.queryUDP(dns_query)
- d.addCallback(done_query)
- return d
-
diff --git a/tests/test_inputunit.py b/tests/test_inputunit.py
deleted file mode 100644
index 1f9043c..0000000
--- a/tests/test_inputunit.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import unittest
-from ooni.inputunit import InputUnit, InputUnitFactory
-
-def dummyGenerator():
- for x in range(100):
- yield x
-
-class TestInputUnit(unittest.TestCase):
- def test_input_unit_factory(self):
- inputUnit = InputUnitFactory(range(100))
- for i in inputUnit:
- self.assertEqual(len(list(i)), inputUnit.inputUnitSize)
-
- def test_input_unit(self):
- inputs = range(100)
- inputUnit = InputUnit(inputs)
- idx = 0
- for i in inputUnit:
- idx += 1
-
- self.assertEqual(idx, 100)
-
- def test_input_unit_factory_length(self):
- inputUnitFactory = InputUnitFactory(range(100))
- l1 = len(inputUnitFactory)
- l2 = sum(1 for _ in inputUnitFactory)
- self.assertEqual(l1, 10)
- self.assertEqual(l2, 10)
-
diff --git a/tests/test_managers.py b/tests/test_managers.py
deleted file mode 100644
index 39f0881..0000000
--- a/tests/test_managers.py
+++ /dev/null
@@ -1,215 +0,0 @@
-from twisted.trial import unittest
-from twisted.python import failure
-from twisted.internet import defer, task
-
-from ooni.tasks import BaseTask, TaskWithTimeout, TaskTimedOut
-from ooni.managers import TaskManager, MeasurementManager
-
-from tests.mocks import MockSuccessTask, MockFailTask, MockFailOnceTask, MockFailure
-from tests.mocks import MockSuccessTaskWithTimeout, MockFailTaskThatTimesOut
-from tests.mocks import MockTimeoutOnceTask, MockFailTaskWithTimeout
-from tests.mocks import MockTaskManager, mockFailure, MockDirector
-from tests.mocks import MockNetTest, MockMeasurement, MockSuccessMeasurement
-from tests.mocks import MockFailMeasurement, MockFailOnceMeasurement
-
-class TestTaskManager(unittest.TestCase):
- timeout = 1
- def setUp(self):
- self.measurementManager = MockTaskManager()
- self.measurementManager.concurrency = 20
- self.measurementManager.retries = 2
-
- self.measurementManager.start()
-
- self.clock = task.Clock()
-
- def schedule_successful_tasks(self, task_type, number=1):
- all_done = []
- for x in range(number):
- mock_task = task_type()
- all_done.append(mock_task.done)
- self.measurementManager.schedule(mock_task)
-
- d = defer.DeferredList(all_done)
- @d.addCallback
- def done(res):
- for task_result, task_instance in self.measurementManager.successes:
- self.assertEqual(task_result, 42)
- self.assertIsInstance(task_instance, task_type)
-
- return d
-
- def schedule_failing_tasks(self, task_type, number=1):
- all_done = []
- for x in range(number):
- mock_task = task_type()
- all_done.append(mock_task.done)
- self.measurementManager.schedule(mock_task)
-
- d = defer.DeferredList(all_done)
- @d.addCallback
- def done(res):
- # 10*2 because 2 is the number of retries
- self.assertEqual(len(self.measurementManager.failures), number*3)
- for task_result, task_instance in self.measurementManager.failures:
- self.assertEqual(task_result, mockFailure)
- self.assertIsInstance(task_instance, task_type)
-
- return d
-
- def test_schedule_failing_with_mock_failure_task(self):
- mock_task = MockFailTask()
- self.measurementManager.schedule(mock_task)
- self.assertFailure(mock_task.done, MockFailure)
- return mock_task.done
-
- def test_schedule_successful_one_task(self):
- return self.schedule_successful_tasks(MockSuccessTask)
-
- def test_schedule_successful_one_task_with_timeout(self):
- return self.schedule_successful_tasks(MockSuccessTaskWithTimeout)
-
- def test_schedule_failing_tasks_that_timesout(self):
- self.measurementManager.retries = 0
-
- task_type = MockFailTaskThatTimesOut
- task_timeout = 5
-
- mock_task = task_type()
- mock_task.timeout = task_timeout
- mock_task.clock = self.clock
-
- self.measurementManager.schedule(mock_task)
-
- self.clock.advance(task_timeout)
-
- @mock_task.done.addBoth
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), 1)
- for task_result, task_instance in self.measurementManager.failures:
- self.assertIsInstance(task_instance, task_type)
-
- return mock_task.done
-
- def test_schedule_time_out_once(self):
- task_type = MockTimeoutOnceTask
- task_timeout = 5
-
- mock_task = task_type()
- mock_task.timeout = task_timeout
- mock_task.clock = self.clock
-
- self.measurementManager.schedule(mock_task)
-
- self.clock.advance(task_timeout)
-
- @mock_task.done.addBoth
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), 1)
- for task_result, task_instance in self.measurementManager.failures:
- self.assertIsInstance(task_instance, task_type)
-
- for task_result, task_instance in self.measurementManager.successes:
- self.assertEqual(task_result, 42)
- self.assertIsInstance(task_instance, task_type)
-
- return mock_task.done
-
-
- def test_schedule_failing_one_task(self):
- return self.schedule_failing_tasks(MockFailTask)
-
- def test_schedule_failing_one_task_with_timeout(self):
- return self.schedule_failing_tasks(MockFailTaskWithTimeout)
-
- def test_schedule_successful_ten_tasks(self):
- return self.schedule_successful_tasks(MockSuccessTask, number=10)
-
- def test_schedule_failing_ten_tasks(self):
- return self.schedule_failing_tasks(MockFailTask, number=10)
-
- def test_schedule_successful_27_tasks(self):
- return self.schedule_successful_tasks(MockSuccessTask, number=27)
-
- def test_schedule_failing_27_tasks(self):
- return self.schedule_failing_tasks(MockFailTask, number=27)
-
- def test_task_retry_and_succeed(self):
- mock_task = MockFailOnceTask()
- self.measurementManager.schedule(mock_task)
-
- @mock_task.done.addCallback
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), 1)
-
- self.assertEqual(self.measurementManager.failures,
- [(mockFailure, mock_task)])
- self.assertEqual(self.measurementManager.successes,
- [(42, mock_task)])
-
- return mock_task.done
-
- def dd_test_task_retry_and_succeed_56_tasks(self):
- """
- XXX this test fails in a non-deterministic manner.
- """
- all_done = []
- number = 56
- for x in range(number):
- mock_task = MockFailOnceTask()
- all_done.append(mock_task.done)
- self.measurementManager.schedule(mock_task)
-
- d = defer.DeferredList(all_done)
-
- @d.addCallback
- def done(res):
- self.assertEqual(len(self.measurementManager.failures), number)
-
- for task_result, task_instance in self.measurementManager.successes:
- self.assertEqual(task_result, 42)
- self.assertIsInstance(task_instance, MockFailOnceTask)
-
- return d
-
-class TestMeasurementManager(unittest.TestCase):
- def setUp(self):
- mock_director = MockDirector()
-
- self.measurementManager = MeasurementManager()
- self.measurementManager.director = mock_director
-
- self.measurementManager.concurrency = 10
- self.measurementManager.retries = 2
-
- self.measurementManager.start()
-
- self.mockNetTest = MockNetTest()
-
- def test_schedule_and_net_test_notified(self, number=1):
- # XXX we should probably be inheriting from the base test class
- mock_task = MockSuccessMeasurement(self.mockNetTest)
- self.measurementManager.schedule(mock_task)
-
- @mock_task.done.addCallback
- def done(res):
- self.assertEqual(self.mockNetTest.successes,
- [42])
-
- self.assertEqual(len(self.mockNetTest.successes), 1)
- return mock_task.done
-
- def test_schedule_failing_one_measurement(self):
- mock_task = MockFailMeasurement(self.mockNetTest)
- self.measurementManager.schedule(mock_task)
-
- @mock_task.done.addErrback
- def done(failure):
- self.assertEqual(len(self.measurementManager.failures), 3)
-
- self.assertEqual(failure, mockFailure)
- self.assertEqual(len(self.mockNetTest.successes), 0)
-
- return mock_task.done
-
-
diff --git a/tests/test_mutate.py b/tests/test_mutate.py
deleted file mode 100644
index 7e30586..0000000
--- a/tests/test_mutate.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import unittest
-from ooni.kit import daphn3
-
-class TestDaphn3(unittest.TestCase):
- def test_mutate_string(self):
- original_string = '\x00\x00\x00'
- mutated = daphn3.daphn3MutateString(original_string, 1)
- self.assertEqual(mutated, '\x00\x01\x00')
- def test_mutate_daphn3(self):
- original_dict = [{'client': '\x00\x00\x00'},
- {'server': '\x00\x00\x00'}]
- mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
- self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
- {'server': '\x00\x01\x00'}])
-
diff --git a/tests/test_nettest.py b/tests/test_nettest.py
deleted file mode 100644
index 78240d5..0000000
--- a/tests/test_nettest.py
+++ /dev/null
@@ -1,268 +0,0 @@
-import os
-from StringIO import StringIO
-from tempfile import TemporaryFile, mkstemp
-
-from twisted.trial import unittest
-from twisted.internet import defer, reactor
-from twisted.python.usage import UsageError
-
-from ooni.nettest import NetTest, InvalidOption, MissingRequiredOption
-from ooni.nettest import NetTestLoader, FailureToLoadNetTest
-from ooni.tasks import BaseTask
-from ooni.utils import NotRootError
-
-from ooni.director import Director
-
-from ooni.managers import TaskManager
-
-from tests.mocks import MockMeasurement, MockMeasurementFailOnce
-from tests.mocks import MockNetTest, MockDirector, MockReporter
-from tests.mocks import MockMeasurementManager
-defer.setDebugging(True)
-
-net_test_string = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham']]
-
-class DummyTestCase(NetTestCase):
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-"""
-
-net_test_root_required = net_test_string+"""
- requiresRoot = True
-"""
-
-net_test_string_with_file = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham']]
-
-class DummyTestCase(NetTestCase):
- inputFile = ['file', 'f', None, 'The input File']
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-"""
-
-net_test_string_with_required_option = """
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-
-class UsageOptions(usage.Options):
- optParameters = [['spam', 's', None, 'ham'],
- ['foo', 'o', None, 'moo'],
- ['bar', 'o', None, 'baz'],
- ]
-
-class DummyTestCase(NetTestCase):
- inputFile = ['file', 'f', None, 'The input File']
-
- usageOptions = UsageOptions
-
- def test_a(self):
- self.report['bar'] = 'bar'
-
- def test_b(self):
- self.report['foo'] = 'foo'
-
- requiredOptions = ['foo', 'bar']
-"""
-
-dummyInputs = range(1)
-dummyArgs = ('--spam', 'notham')
-dummyOptions = {'spam':'notham'}
-dummyInvalidArgs = ('--cram', 'jam')
-dummyInvalidOptions= {'cram':'jam'}
-dummyArgsWithRequiredOptions = ('--foo', 'moo', '--bar', 'baz')
-dummyRequiredOptions = {'foo':'moo', 'bar':'baz'}
-dummyArgsWithFile = ('--spam', 'notham', '--file', 'dummyInputFile.txt')
-
-class TestNetTest(unittest.TestCase):
- timeout = 1
- def setUp(self):
- with open('dummyInputFile.txt', 'w') as f:
- for i in range(10):
- f.write("%s\n" % i)
-
- def assertCallable(self, thing):
- self.assertIn('__call__', dir(thing))
-
- def verifyMethods(self, testCases):
- uniq_test_methods = set()
- for test_class, test_methods in testCases:
- instance = test_class()
- for test_method in test_methods:
- c = getattr(instance, test_method)
- self.assertCallable(c)
- uniq_test_methods.add(test_method)
- self.assertEqual(set(['test_a', 'test_b']), uniq_test_methods)
-
- def test_load_net_test_from_file(self):
- """
- Given a file verify that the net test cases are properly
- generated.
- """
- __, net_test_file = mkstemp()
- with open(net_test_file, 'w') as f:
- f.write(net_test_string)
- f.close()
-
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestFile(net_test_file)
-
- self.verifyMethods(ntl.testCases)
- os.unlink(net_test_file)
-
- def test_load_net_test_from_str(self):
- """
- Given a file like object verify that the net test cases are properly
- generated.
- """
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- self.verifyMethods(ntl.testCases)
-
- def test_load_net_test_from_StringIO(self):
- """
- Given a file like object verify that the net test cases are properly
- generated.
- """
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- self.verifyMethods(ntl.testCases)
-
- def test_load_with_option(self):
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- self.assertIsInstance(ntl, NetTestLoader)
- for test_klass, test_meth in ntl.testCases:
- for option in dummyOptions.keys():
- self.assertIn(option, test_klass.usageOptions())
-
- def test_load_with_invalid_option(self):
- try:
- ntl = NetTestLoader(dummyInvalidArgs)
- ntl.loadNetTestString(net_test_string)
-
- ntl.checkOptions()
- raise Exception
- except UsageError:
- pass
-
- def test_load_with_required_option(self):
- ntl = NetTestLoader(dummyArgsWithRequiredOptions)
- ntl.loadNetTestString(net_test_string_with_required_option)
-
- self.assertIsInstance(ntl, NetTestLoader)
-
- def test_load_with_missing_required_option(self):
- try:
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string_with_required_option)
-
- except MissingRequiredOption:
- pass
-
- def test_net_test_inputs(self):
- ntl = NetTestLoader(dummyArgsWithFile)
- ntl.loadNetTestString(net_test_string_with_file)
-
- ntl.checkOptions()
-
- # XXX: if you use the same test_class twice you will have consumed all
- # of its inputs!
- tested = set([])
- for test_class, test_method in ntl.testCases:
- if test_class not in tested:
- tested.update([test_class])
- self.assertEqual(len(list(test_class.inputs)), 10)
-
- def test_setup_local_options_in_test_cases(self):
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_string)
-
- ntl.checkOptions()
-
- for test_class, test_method in ntl.testCases:
- self.assertEqual(test_class.localOptions, dummyOptions)
-
- def test_generate_measurements_size(self):
- ntl = NetTestLoader(dummyArgsWithFile)
- ntl.loadNetTestString(net_test_string_with_file)
-
- ntl.checkOptions()
- net_test = NetTest(ntl, None)
-
- measurements = list(net_test.generateMeasurements())
- self.assertEqual(len(measurements), 20)
-
- def test_net_test_completed_callback(self):
- ntl = NetTestLoader(dummyArgsWithFile)
- ntl.loadNetTestString(net_test_string_with_file)
-
- ntl.checkOptions()
- director = Director()
-
- d = director.startNetTest('', ntl, [MockReporter()])
-
- @d.addCallback
- def complete(result):
- #XXX: why is the return type (True, None) ?
- self.assertEqual(result, [(True,None)])
- self.assertEqual(director.successfulMeasurements, 20)
-
- return d
-
- def test_require_root_succeed(self):
- #XXX: will require root to run
- ntl = NetTestLoader(dummyArgs)
- ntl.loadNetTestString(net_test_root_required)
-
- for test_class, method in ntl.testCases:
- self.assertTrue(test_class.requiresRoot)
-
- #def test_require_root_failed(self):
- # #XXX: will fail if you run as root
- # try:
- # net_test = NetTestLoader(StringIO(net_test_root_required),
- # dummyArgs)
- # except NotRootError:
- # pass
-
- #def test_create_report_succeed(self):
- # pass
-
- #def test_create_report_failed(self):
- # pass
-
- #def test_run_all_test(self):
- # raise NotImplementedError
-
- #def test_resume_test(self):
- # pass
-
- #def test_progress(self):
- # pass
-
- #def test_time_out(self):
- # raise NotImplementedError
diff --git a/tests/test_otime.py b/tests/test_otime.py
deleted file mode 100644
index 80979f2..0000000
--- a/tests/test_otime.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import unittest
-from datetime import datetime
-from ooni import otime
-
-test_date = datetime(2002, 6, 26, 22, 45, 49)
-
-class TestOtime(unittest.TestCase):
- def test_timestamp(self):
- self.assertEqual(otime.timestamp(test_date), "2002-06-26T224549Z")
-
- def test_fromTimestamp(self):
- time_stamp = otime.timestamp(test_date)
- self.assertEqual(test_date, otime.fromTimestamp(time_stamp))
-
-
diff --git a/tests/test_reporter.py b/tests/test_reporter.py
deleted file mode 100644
index e21b7a1..0000000
--- a/tests/test_reporter.py
+++ /dev/null
@@ -1,238 +0,0 @@
-from twisted.internet import defer
-from twisted.trial import unittest
-
-from ooni.reporter import Report, YAMLReporter, OONIBReporter, safe_dump
-from ooni.managers import ReportEntryManager, TaskManager
-from ooni.nettest import NetTest, NetTestState
-from ooni.errors import ReportNotCreated, ReportAlreadyClosed
-
-from ooni.tasks import TaskWithTimeout
-from tests.mocks import MockOReporter, MockTaskManager
-from tests.mocks import MockMeasurement, MockNetTest
-from tests.mocks import MockOReporterThatFailsWrite
-from tests.mocks import MockOReporterThatFailsWriteOnce
-from tests.mocks import MockOReporterThatFailsOpen
-
-from twisted.python import failure
-import yaml
-
-class TestReport(unittest.TestCase):
- def setUp(self):
- pass
- def tearDown(self):
- pass
- def test_create_report_with_no_reporter(self):
- report = Report([],ReportEntryManager())
- self.assertIsInstance(report, Report)
-
- def test_create_report_with_single_reporter(self):
- report = Report([MockOReporter()], ReportEntryManager())
- self.assertIsInstance(report, Report)
-
- def test_create_report_with_multiple_reporters(self):
- report = Report([MockOReporter() for x in xrange(3)],
- ReportEntryManager())
- self.assertIsInstance(report, Report)
-
- def test_report_open_with_single_reporter(self):
- report = Report([MockOReporter()],ReportEntryManager())
- d = report.open()
- return d
-
- def test_report_open_with_multiple_reporter(self):
- report = Report([MockOReporter() for x in xrange(3)],
- ReportEntryManager())
- d = report.open()
- return d
-
- def test_fail_to_open_report_with_single_reporter(self):
- report = Report([MockOReporterThatFailsOpen()],
- ReportEntryManager())
- d = report.open()
- def f(x):
- self.assertEquals(len(report.reporters), 0)
- d.addCallback(f)
- return d
-
- def test_fail_to_open_single_report_with_multiple_reporter(self):
- report = Report([MockOReporterThatFailsOpen(), MockOReporter(),
- MockOReporter()], ReportEntryManager())
- d = report.open()
- def f(x):
- self.assertEquals(len(report.reporters),2)
- d.addCallback(f)
- return d
-
- def test_fail_to_open_all_reports_with_multiple_reporter(self):
- report = Report([MockOReporterThatFailsOpen() for x in xrange(3)],
- ReportEntryManager())
- d = report.open()
- def f(x):
- self.assertEquals(len(report.reporters),0)
- d.addCallback(f)
- return d
-
- def test_write_report_with_single_reporter_and_succeed(self):
- #XXX: verify that the MockOReporter writeReportEntry succeeds
- report = Report([MockOReporter()], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- return d
-
- def test_write_report_with_single_reporter_and_fail_after_timeout(self):
- report = Report([MockOReporterThatFailsWrite()], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- def f(err):
- self.assertEquals(len(report.reporters),0)
- d.addBoth(f)
- return d
-
- def test_write_report_with_single_reporter_and_succeed_after_timeout(self):
- report = Report([MockOReporterThatFailsWriteOnce()], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- return d
-
- def test_write_report_with_multiple_reporter_and_succeed(self):
- report = Report([MockOReporter() for x in xrange(3)], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- return d
-
- def test_write_report_with_multiple_reporter_and_fail_a_single_reporter(self):
- report = Report([MockOReporter(), MockOReporter(), MockOReporterThatFailsWrite()], ReportEntryManager())
- d = report.open()
-
- self.assertEquals(len(report.reporters),3)
- d = report.write(MockMeasurement(MockNetTest()))
-
- def f(x):
- # one of the reporters should have been removed
- self.assertEquals(len(report.reporters), 2)
- d.addBoth(f)
- return d
-
- def test_write_report_with_multiple_reporter_and_fail_all_reporter(self):
- report = Report([MockOReporterThatFailsWrite() for x in xrange(3)], ReportEntryManager())
- report.open()
- d = report.write(MockMeasurement(MockNetTest()))
- def f(err):
- self.assertEquals(len(report.reporters),0)
- d.addErrback(f)
- return d
-
-class TestYAMLReporter(unittest.TestCase):
- def setUp(self):
- self.testDetails = {'software_name': 'ooniprobe', 'options':
- {'pcapfile': None, 'help': 0, 'subargs': ['-f', 'alexa_10'], 'resume':
- 0, 'parallelism': '10', 'no-default-reporter': 0, 'testdeck': None,
- 'test': 'nettests/blocking/http_requests.py', 'logfile': None,
- 'collector': None, 'reportfile': None}, 'test_version': '0.2.3',
- 'software_version': '0.0.10', 'test_name': 'http_requests_test',
- 'start_time': 1362054343.0, 'probe_asn': 'AS0', 'probe_ip':
- '127.0.0.1', 'probe_cc': 'US'}
-
- def tearDown(self):
- pass
- def test_create_yaml_reporter(self):
- self.assertIsInstance(YAMLReporter(self.testDetails),
- YAMLReporter)
-
- def test_open_yaml_report_and_succeed(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- # verify that testDetails was written to report properly
- def f(r):
- r._stream.seek(0)
- details, = yaml.safe_load_all(r._stream)
- self.assertEqual(details, self.testDetails)
- r.created.addCallback(f)
- return r.created
-
- #def test_open_yaml_report_and_fail(self):
- # #XXX: YAMLReporter does not handle failures of this type
- # pass
-
- def test_write_yaml_report_entry(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
-
- report_entry = {'foo':'bar', 'bin':'baz'}
- r.writeReportEntry(report_entry)
-
- # verify that details and entry were written to report
- def f(r):
- r._stream.seek(0)
- report = yaml.safe_load_all(r._stream)
- details, entry = report
- self.assertEqual(details, self.testDetails)
- self.assertEqual(entry, report_entry)
- r.created.addCallback(f)
- return r.created
-
- def test_write_multiple_yaml_report_entry(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- def reportEntry():
- for x in xrange(10):
- yield {'foo':'bar', 'bin':'baz', 'item':x}
- for entry in reportEntry():
- r.writeReportEntry(entry)
- # verify that details and multiple entries were written to report
- def f(r):
- r._stream.seek(0)
- report = yaml.safe_load_all(r._stream)
- details = report.next()
- self.assertEqual(details, self.testDetails)
- self.assertEqual([r for r in report], [r for r in reportEntry()])
- r.created.addCallback(f)
- return r.created
-
- def test_close_yaml_report(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- r.finish()
- self.assertTrue(r._stream.closed)
-
- def test_write_yaml_report_after_close(self):
- r = YAMLReporter(self.testDetails)
- r.createReport()
- r.finish()
- def f(r):
- r.writeReportEntry("foo")
- r.created.addCallback(f)
- self.assertFailure(r.created, ReportAlreadyClosed)
-
- def test_write_yaml_report_before_open(self):
- r = YAMLReporter(self.testDetails)
- def f(r):
- r.writeReportEntry("foo")
- r.created.addCallback(f)
- self.assertFailure(r.created, ReportNotCreated)
-
-#class TestOONIBReporter(unittest.TestCase):
-# def setUp(self):
-# pass
-# def tearDown(self):
-# pass
-# def test_create_oonib_reporter(self):
-# raise NotImplementedError
-# def test_open_oonib_report_and_succeed(self):
-# raise NotImplementedError
-# def test_open_oonib_report_and_fail(self):
-# raise NotImplementedError
-# def test_write_oonib_report_entry_and_succeed(self):
-# raise NotImplementedError
-# def test_write_oonib_report_entry_and_succeed_after_timeout(self):
-# raise NotImplementedError
-# def test_write_oonib_report_entry_and_fail_after_timeout(self):
-# raise NotImplementedError
-# def test_write_oonib_report_after_close(self):
-# raise NotImplementedError
-# def test_write_oonib_report_before_open(self):
-# raise NotImplementedError
-# def test_close_oonib_report_and_succeed(self):
-# raise NotImplementedError
-# def test_close_oonib_report_and_fail(self):
-# raise NotImplementedError
diff --git a/tests/test_safe_represent.py b/tests/test_safe_represent.py
deleted file mode 100644
index 82a5196..0000000
--- a/tests/test_safe_represent.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import yaml
-
-from twisted.trial import unittest
-
-from ooni.reporter import OSafeDumper
-
-from scapy.all import IP, UDP
-
-class TestScapyRepresent(unittest.TestCase):
- def test_represent_scapy(self):
- data = IP()/UDP()
- yaml.dump_all([data], Dumper=OSafeDumper)
-
-
diff --git a/tests/test_trueheaders.py b/tests/test_trueheaders.py
deleted file mode 100644
index 9ac0a27..0000000
--- a/tests/test_trueheaders.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from twisted.trial import unittest
-
-from ooni.utils.txagentwithsocks import TrueHeaders
-
-dummy_headers_dict = {
- 'Header1': ['Value1', 'Value2'],
- 'Header2': ['ValueA', 'ValueB']
-}
-
-dummy_headers_dict2 = {
- 'Header1': ['Value1', 'Value2'],
- 'Header2': ['ValueA', 'ValueB'],
- 'Header3': ['ValueA', 'ValueB'],
-}
-
-dummy_headers_dict3 = {
- 'Header1': ['Value1', 'Value2'],
- 'Header2': ['ValueA', 'ValueB'],
- 'Header4': ['ValueA', 'ValueB'],
-}
-
-
-class TestTrueHeaders(unittest.TestCase):
- def test_names_match(self):
- th = TrueHeaders(dummy_headers_dict)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set())
-
- def test_names_not_match(self):
- th = TrueHeaders(dummy_headers_dict)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3']))
-
- th = TrueHeaders(dummy_headers_dict3)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4']))
-
- def test_names_match_expect_ignore(self):
- th = TrueHeaders(dummy_headers_dict)
- self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
-
-
-
-
diff --git a/tests/test_utils.py b/tests/test_utils.py
deleted file mode 100644
index cc648e0..0000000
--- a/tests/test_utils.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import unittest
-from ooni.utils import pushFilenameStack
-
-class TestUtils(unittest.TestCase):
- def test_pushFilenameStack(self):
- f = open("dummyfile", "w+")
- f.write("0\n")
- f.close()
- for i in xrange(1, 5):
- f = open("dummyfile.%s" % i, "w+")
- f.write("%s\n" % i)
- f.close()
-
- pushFilenameStack("dummyfile")
- for i in xrange(1, 5):
- f = open("dummyfile.%s" % i)
- c = f.readlines()[0].strip()
- self.assertEqual(str(i-1), str(c))
- f.close()
-
1
0

19 Jun '13
commit 367e58128a9b7cb6c63897cfd24f6a246bcedd8a
Author: Arturo Filastò <art(a)fuffa.org>
Date: Tue Apr 23 16:58:59 2013 +0200
Move sample config file to data subdirectory
---
data/ooniprobe.conf.sample | 56 ++++++++++++++++++++++++++++++++++++++++++++
ooniprobe.conf.sample | 56 --------------------------------------------
2 files changed, 56 insertions(+), 56 deletions(-)
diff --git a/data/ooniprobe.conf.sample b/data/ooniprobe.conf.sample
new file mode 100644
index 0000000..5528199
--- /dev/null
+++ b/data/ooniprobe.conf.sample
@@ -0,0 +1,56 @@
+# This is the configuration file for OONIProbe
+# This file follows the YAML markup format: http://yaml.org/spec/1.2/spec.html
+# Keep in mind that indentation matters.
+
+basic:
+ # Where OONIProbe should be writing it's log file
+ logfile: ooniprobe.log
+privacy:
+ # Should we include the IP address of the probe in the report?
+ includeip: false
+ # Should we include the ASN of the probe in the report?
+ includeasn: false
+ # Should we include the country as reported by GeoIP in the report?
+ includecountry: false
+ # Should we include the city as reported by GeoIP in the report?
+ includecity: false
+ # Should we collect a full packet capture on the client?
+ includepcap: false
+reports:
+ # This is a packet capture file (.pcap) to load as a test:
+ pcap: Null
+advanced:
+ # XXX change this to point to the directory where you have stored the GeoIP
+ # database file. This should be the directory in which OONI is installed
+ # /path/to/ooni-probe/data/
+ geoip_data_dir: /usr/share/GeoIP/
+ debug: true
+ tor_binary: /usr/sbin/tor
+ # For auto detection
+ interface: auto
+ # Of specify a specific interface
+ #interface: wlan0
+ # If you do not specify start_tor, you will have to have Tor running and
+ # explicitly set the control port and SOCKS port
+ start_tor: true
+ # After how many seconds we should give up on a particular measurement
+ measurement_timeout: 30
+ # After how many retries we should give up on a measurement
+ measurement_retries: 2
+ # How many measurments to perform concurrently
+ measurement_concurrency: 100
+ # After how may seconds we should give up reporting
+ reporting_timeout: 30
+ # After how many retries to give up on reporting
+ reporting_retries: 3
+ # How many reports to perform concurrently
+ reporting_concurrency: 20
+tor:
+ #socks_port: 9050
+ #control_port: 9051
+ # Specify the absolute path to the Tor bridges to use for testing
+ #bridges: bridges.list
+ # Specify path of the tor datadirectory.
+ # This should be set to something to avoid having Tor download each time
+ # the descriptors and consensus data.
+ #data_dir: ~/.tor/
diff --git a/ooniprobe.conf.sample b/ooniprobe.conf.sample
deleted file mode 100644
index 27a4fb3..0000000
--- a/ooniprobe.conf.sample
+++ /dev/null
@@ -1,56 +0,0 @@
-# This is the configuration file for OONIProbe
-# This file follows the YAML markup format: http://yaml.org/spec/1.2/spec.html
-# Keep in mind that indentation matters.
-
-basic:
- # Where OONIProbe should be writing it's log file
- logfile: ooniprobe.log
-privacy:
- # Should we include the IP address of the probe in the report?
- includeip: false
- # Should we include the ASN of the probe in the report?
- includeasn: false
- # Should we include the country as reported by GeoIP in the report?
- includecountry: false
- # Should we include the city as reported by GeoIP in the report?
- includecity: false
- # Should we collect a full packet capture on the client?
- includepcap: false
-reports:
- # This is a packet capture file (.pcap) to load as a test:
- pcap: Null
-advanced:
- # XXX change this to point to the directory where you have stored the GeoIP
- # database file. This should be the directory in which OONI is installed
- # /path/to/ooni-probe/data/
- geoip_data_dir: /usr/share/GeoIP/
- debug: true
- tor_binary: '/usr/sbin/tor'
- # For auto detection
- interface: auto
- # Of specify a specific interface
- #interface: wlan0
- # If you do not specify start_tor, you will have to have Tor running and
- # explicitly set the control port and SOCKS port
- start_tor: true
- # After how many seconds we should give up on a particular measurement
- measurement_timeout: 30
- # After how many retries we should give up on a measurement
- measurement_retries: 2
- # How many measurments to perform concurrently
- measurement_concurrency: 100
- # After how may seconds we should give up reporting
- reporting_timeout: 30
- # After how many retries to give up on reporting
- reporting_retries: 3
- # How many reports to perform concurrently
- reporting_concurrency: 20
-tor:
- #socks_port: 9050
- #control_port: 9051
- # Specify the absolute path to the Tor bridges to use for testing
- bridges: bridges.list
- # Specify path of the tor datadirectory.
- # This should be set to something to avoid having Tor download each time
- # the descriptors and consensus data.
- data_dir: ~/.tor/
1
0