tor-commits
Threads by month
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
November 2012
- 18 participants
- 1509 discussions
[ooni-probe/master] * Refactor all the bridget tests into a better dir structure
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 04a61965d73cb96745e898c5bfd4cf097a090803
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Tue Oct 23 10:44:46 2012 +0000
* Refactor all the bridget tests into a better dir structure
---
bin/canary | 27 ++
ooni/bridget/__init__.py | 14 +
ooni/bridget/custodiet.py | 421 ++++++++++++++++++++++++
ooni/bridget/tests/__init__.py | 14 +
ooni/bridget/tests/bridget.py | 499 ++++++++++++++++++++++++++++
ooni/bridget/utils/__init__.py | 1 +
ooni/bridget/utils/inputs.py | 174 ++++++++++
ooni/bridget/utils/interface.py | 54 +++
ooni/bridget/utils/log.py | 98 ++++++
ooni/bridget/utils/nodes.py | 176 ++++++++++
ooni/bridget/utils/onion.py | 686 +++++++++++++++++++++++++++++++++++++++
ooni/bridget/utils/reports.py | 144 ++++++++
ooni/bridget/utils/tests.py | 141 ++++++++
ooni/bridget/utils/work.py | 147 +++++++++
ooni/plugins/bridget.py | 500 ----------------------------
15 files changed, 2596 insertions(+), 500 deletions(-)
diff --git a/bin/canary b/bin/canary
new file mode 100755
index 0000000..1473ae4
--- /dev/null
+++ b/bin/canary
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+###############################################################################
+#
+# canary
+# -----------------
+# Test Tor bridge reachability.
+#
+# :authors: Isis Lovecruft
+# :copyright: 2012 Isis Lovecruft, The Tor Project
+# :licence: see included LICENSE file
+# :version: 0.2.0-beta
+###############################################################################
+
+import os, sys
+import copy_reg
+
+# Hack to set the proper sys.path. Overcomes the export PYTHONPATH pain.
+sys.path[:] = map(os.path.abspath, sys.path)
+sys.path.insert(0, os.path.abspath(os.getcwd()))
+
+# This is a hack to overcome a bug in python
+from ooni.utils.hacks import patched_reduce_ex
+copy_reg._reduce_ex = patched_reduce_ex
+
+from ooni.bridget import spelunker
+spelunker.descend()
diff --git a/ooni/bridget/__init__.py b/ooni/bridget/__init__.py
new file mode 100644
index 0000000..4648d77
--- /dev/null
+++ b/ooni/bridget/__init__.py
@@ -0,0 +1,14 @@
+#-*- coding: utf-8 -*-
+
+#import os, sys
+#import copy_reg
+
+## Hack to set the proper sys.path. Overcomes the export PYTHONPATH pain.
+#sys.path[:] = map(os.path.abspath, sys.path)
+#sys.path.insert(0, os.path.abspath(os.getcwd()))
+
+## This is a hack to overcome a bug in python
+#from ooni.utils.hacks import patched_reduce_ex
+#copy_reg._reduce_ex = patched_reduce_ex
+
+__all__ = ['custodiet']
diff --git a/ooni/bridget/custodiet.py b/ooni/bridget/custodiet.py
new file mode 100755
index 0000000..8cbcfce
--- /dev/null
+++ b/ooni/bridget/custodiet.py
@@ -0,0 +1,421 @@
+#!/usr/bin/env python
+# -*- coding: UTF-8
+#
+# custodiet
+# *********
+#
+# "...quis custodiet ipsos custodes?"
+# - Juvenal, Satires VI.347-348 (circa 2nd Century, C.E.)
+#
+# "'Hand me the Custodian,' Goodchild demands, inserting the waiflike
+# robot into Bambara's opened navel. 'Providing conscience for those who
+# have none.' Goodchild and the other Breen government agents disappear
+# into the surrounding desert in a vehicle, kicking up cloud of white dust.
+# Bambara awakens, and, patting the dust from his clothing, turns to
+# greet a one-armed child. 'Hi, my name's Bambara; I'm a
+# thirty-six-year-old Virgo and a former killer, who's hobbies include
+# performing recreational autopsies, defecating, and drinking rum. I've
+# recently been given a conscience, and would very much like to help you.'
+# Cut to Bambara and the child, now with one of Bambara's arms, leaving
+# a surgical clinic."
+# - AeonFlux, "The Purge" (sometime in the late 90s)
+#
+# :copyright: (c) 2012 Isis Lovecruft
+# :license: see LICENSE for more details.
+# :version: 0.1.0-beta
+#
+
+# ooniprobe.py imports
+import sys
+from signal import SIGTERM, signal
+from pprint import pprint
+
+from twisted.python import usage
+from twisted.internet import reactor
+from twisted.plugin import getPlugins
+
+from zope.interface.verify import verifyObject
+from zope.interface.exceptions import BrokenImplementation
+from zope.interface.exceptions import BrokenMethodImplementation
+
+from ooni.bridget.tests import bridget
+from ooni.bridget.utils import log, tests, work, reports
+from ooni.bridget.utils.interface import ITest
+from ooni.utils.logo import getlogo
+
+# runner.py imports
+import os
+import types
+import time
+import inspect
+import yaml
+
+from twisted.internet import defer, reactor
+from twisted.python import reflect, failure, usage
+from twisted.python import log as tlog
+
+from twisted.trial import unittest
+from twisted.trial.runner import TrialRunner, TestLoader
+from twisted.trial.runner import isPackage, isTestCase, ErrorHolder
+from twisted.trial.runner import filenameToModule, _importFromFile
+
+from ooni import nettest
+from ooni.inputunit import InputUnitFactory
+from ooni.nettest import InputTestSuite
+from ooni.plugoo import tests as oonitests
+from ooni.reporter import ReporterFactory
+from ooni.utils import log, geodata, date
+from ooni.utils.legacy import LegacyOONITest
+from ooni.utils.legacy import start_legacy_test, adapt_legacy_test
+
+
+__version__ = "0.1.0-beta"
+
+
+#def retrieve_plugoo():
+# """
+# Get all the plugins that implement the ITest interface and get the data
+# associated to them into a dict.
+# """
+# interface = ITest
+# d = {}
+# error = False
+# for p in getPlugins(interface, plugins):
+# try:
+# verifyObject(interface, p)
+# d[p.shortName] = p
+# except BrokenImplementation, bi:
+# print "Plugin Broken"
+# print bi
+# error = True
+# if error != False:
+# print "Plugin Loaded!"
+# return d
+#
+#plugoo = retrieve_plugoo()
+
+"""
+
+ai to watch over which tests to run - custodiet
+
+ * runTest() or getPrefixMethodNames() to run the tests in order for each
+ test (esp. the tcp and icmp parts) to be oonicompat we should use the
+ test_icmp_ping API framework for those.
+
+ * should handle calling
+
+tests to run:
+ echo
+ syn
+ fin
+ conn
+ tls
+ tor
+need fakebridge - canary
+
+"""
+
+def runTest(test, options, global_options, reactor=reactor):
+ """
+ Run an OONI probe test by name.
+
+ @param test: a string specifying the test name as specified inside of
+ shortName.
+
+ @param options: the local options to be passed to the test.
+
+ @param global_options: the global options for OONI
+ """
+ parallelism = int(global_options['parallelism'])
+ worker = work.Worker(parallelism, reactor=reactor)
+ test_class = plugoo[test].__class__
+ report = reports.Report(test, global_options['output'])
+
+ log_to_stdout = True
+ if global_options['quiet']:
+ log_to_stdout = False
+
+ log.start(log_to_stdout,
+ global_options['log'],
+ global_options['verbosity'])
+
+ resume = 0
+ if not options:
+ options = {}
+ if 'resume' in options:
+ resume = options['resume']
+
+ test = test_class(options, global_options, report, reactor=reactor)
+ if test.tool:
+ test.runTool()
+ return True
+
+ if test.ended:
+ print "Ending test"
+ return None
+
+ wgen = work.WorkGenerator(test,
+ dict(options),
+ start=resume)
+ for x in wgen:
+ worker.push(x)
+
+class MainOptions(usage.Options):
+ tests = [bridget, ]
+ subCommands = []
+ for test in tests:
+ print test
+ testopt = getattr(test, 'options')
+ subCommands.append([test, None, testopt, "Run the %s test" % test])
+
+ optFlags = [
+ ['quiet', 'q', "Don't log to stdout"]
+ ]
+
+ optParameters = [
+ ['parallelism', 'n', 10, "Specify the number of parallel tests to run"],
+ #['target-node', 't', 'localhost:31415', 'Select target node'],
+ ['output', 'o', 'bridge.log', "Specify output report file"],
+ ['reportfile', 'o', 'bridge.log', "Specify output log file"],
+ ['verbosity', 'v', 1, "Specify the logging level"],
+ ]
+
+ def opt_version(self):
+ """
+ Display OONI version and exit.
+ """
+ print "OONI version:", __version__
+ sys.exit(0)
+
+ def __str__(self):
+ """
+ Hack to get the sweet ascii art into the help output and replace the
+ strings "Commands" with "Tests".
+ """
+ return getlogo() + '\n' + self.getSynopsis() + '\n' + \
+ self.getUsage(width=None).replace("Commands:", "Tests:")
+
+
+
+def isTestCase(thing):
+ try:
+ return issubclass(thing, unittest.TestCase)
+ except TypeError:
+ return False
+
+def isLegacyTest(obj):
+ """
+ Returns True if the test in question is written using the OONITest legacy
+ class.
+ We do this for backward compatibility of the OONIProbe API.
+ """
+ try:
+ if issubclass(obj, oonitests.OONITest) and not obj == oonitests.OONITest:
+ return True
+ else:
+ return False
+ except TypeError:
+ return False
+
+def processTest(obj, config):
+ """
+ Process the parameters and :class:`twisted.python.usage.Options` of a
+ :class:`ooni.nettest.Nettest`.
+
+ :param obj:
+ An uninstantiated old test, which should be a subclass of
+ :class:`ooni.plugoo.tests.OONITest`.
+ :param config:
+ A configured and instantiated :class:`twisted.python.usage.Options`
+ class.
+ """
+
+ inputFile = obj.inputFile
+
+ if obj.optParameters or inputFile:
+ if not obj.optParameters:
+ obj.optParameters = []
+
+ if inputFile:
+ obj.optParameters.append(inputFile)
+
+ class Options(usage.Options):
+ optParameters = obj.optParameters
+
+ options = Options()
+ options.parseOptions(config['subArgs'])
+ obj.localOptions = options
+
+ if inputFile:
+ obj.inputFile = options[inputFile[0]]
+ try:
+ tmp_obj = obj()
+ tmp_obj.getOptions()
+ except usage.UsageError:
+ options.opt_help()
+
+ return obj
+
+def findTestClassesFromConfig(config):
+ """
+ Takes as input the command line config parameters and returns the test
+ case classes.
+ If it detects that a certain test class is using the old OONIProbe format,
+ then it will adapt it to the new testing system.
+
+ :param config:
+ A configured and instantiated :class:`twisted.python.usage.Options`
+ class.
+ :return:
+ A list of class objects found in a file or module given on the
+ commandline.
+ """
+
+ filename = config['test']
+ classes = []
+
+ module = filenameToModule(filename)
+ for name, val in inspect.getmembers(module):
+ if isTestCase(val):
+ classes.append(processTest(val, config))
+ elif isLegacyTest(val):
+ classes.append(adapt_legacy_test(val, config))
+ return classes
+
+def makeTestCases(klass, tests, methodPrefix):
+ """
+ Takes a class some tests and returns the test cases. methodPrefix is how
+ the test case functions should be prefixed with.
+ """
+
+ cases = []
+ for test in tests:
+ cases.append(klass(methodPrefix+test))
+ return cases
+
+def loadTestsAndOptions(classes, config):
+ """
+ Takes a list of classes and returns their testcases and options.
+ Legacy tests will be adapted.
+ """
+
+ methodPrefix = 'test'
+ suiteFactory = InputTestSuite
+ options = []
+ testCases = []
+ names = []
+
+ _old_klass_type = LegacyOONITest
+
+ for klass in classes:
+ if isinstance(klass, _old_klass_type):
+ try:
+ cases = start_legacy_test(klass)
+ #cases.callback()
+ if cases:
+ print cases
+ return [], []
+ testCases.append(cases)
+ except Exception, e:
+ log.err(e)
+ else:
+ try:
+ opts = klass.local_options
+ options.append(opts)
+ except AttributeError, ae:
+ options.append([])
+ log.err(ae)
+ elif not isinstance(klass, _old_klass_type):
+ tests = reflect.prefixedMethodNames(klass, methodPrefix)
+ if tests:
+ cases = makeTestCases(klass, tests, methodPrefix)
+ testCases.append(cases)
+ try:
+ k = klass()
+ opts = k.getOptions()
+ options.append(opts)
+ except AttributeError, ae:
+ options.append([])
+ log.err(ae)
+ else:
+ try:
+ raise RuntimeError, "Class is some strange type!"
+ except RuntimeError, re:
+ log.err(re)
+
+ return testCases, options
+
+class ORunner(object):
+ """
+ This is a specialized runner used by the ooniprobe command line tool.
+ I am responsible for reading the inputs from the test files and splitting
+ them in input units. I also create all the report instances required to run
+ the tests.
+ """
+ def __init__(self, cases, options=None, config=None, *arg, **kw):
+ self.baseSuite = InputTestSuite
+ self.cases = cases
+ self.options = options
+
+ try:
+ assert len(options) != 0, "Length of options is zero!"
+ except AssertionError, ae:
+ self.inputs = []
+ log.err(ae)
+ else:
+ try:
+ first = options.pop(0)
+ except:
+ first = {}
+ if 'inputs' in first:
+ self.inputs = options['inputs']
+ else:
+ log.msg("Could not find inputs!")
+ log.msg("options[0] = %s" % first)
+ self.inputs = [None]
+
+ try:
+ reportFile = open(config['reportfile'], 'a+')
+ except:
+ filename = 'report_'+date.timestamp()+'.yaml'
+ reportFile = open(filename, 'a+')
+ self.reporterFactory = ReporterFactory(reportFile,
+ testSuite=self.baseSuite(self.cases))
+
+ def runWithInputUnit(self, inputUnit):
+ idx = 0
+ result = self.reporterFactory.create()
+
+ for inputs in inputUnit:
+ result.reporterFactory = self.reporterFactory
+
+ suite = self.baseSuite(self.cases)
+ suite.input = inputs
+ suite(result, idx)
+
+ # XXX refactor all of this index bullshit to avoid having to pass
+ # this index around. Probably what I want to do is go and make
+ # changes to report to support the concept of having multiple runs
+ # of the same test.
+ # We currently need to do this addition in order to get the number
+ # of times the test cases that have run inside of the test suite.
+ idx += (suite._idx - idx)
+
+ result.done()
+
+ def run(self):
+ self.reporterFactory.options = self.options
+ for inputUnit in InputUnitFactory(self.inputs):
+ self.runWithInputUnit(inputUnit)
+
+if __name__ == "__main__":
+ config = Options()
+ config.parseOptions()
+
+ if not config.subCommand:
+ config.opt_help()
+ signal(SIGTERM)
+ #sys.exit(1)
+
+ runTest(config.subCommand, config.subOptions, config)
+ reactor.run()
diff --git a/ooni/bridget/tests/__init__.py b/ooni/bridget/tests/__init__.py
new file mode 100644
index 0000000..9ecc88d
--- /dev/null
+++ b/ooni/bridget/tests/__init__.py
@@ -0,0 +1,14 @@
+# -*- coding: UTF-8
+#
+# bridget/tests/__init__.py
+# *************************
+#
+# "...quis custodiet ipsos custodes?"
+# - Juvenal, Satires VI.347-348 (circa 2nd Century, C.E.)
+#
+# :copyright: (c) 2012 Isis Lovecruft
+# :license: see LICENSE for more details.
+# :version: 0.1.0-beta
+#
+
+all = ['bridget']
diff --git a/ooni/bridget/tests/bridget.py b/ooni/bridget/tests/bridget.py
new file mode 100644
index 0000000..a334747
--- /dev/null
+++ b/ooni/bridget/tests/bridget.py
@@ -0,0 +1,499 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# +-----------+
+# | BRIDGET |
+# | +--------------------------------------------+
+# +--------| Use a Tor process to test making a Tor |
+# | connection to a list of bridges or relays. |
+# +--------------------------------------------+
+#
+# :authors: Isis Lovecruft, Arturo Filasto
+# :licence: see included LICENSE
+# :version: 0.1.0-alpha
+
+from __future__ import with_statement
+from functools import partial
+from random import randint
+
+import os
+import sys
+
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import defer, error, reactor
+from zope.interface import implements
+
+from ooni.utils import log, date
+from ooni.utils.config import ValueChecker
+
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset, MissingAssetException
+from ooni.utils.onion import TxtorconImportError
+from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
+
+try:
+ from ooni.utils.onion import parse_data_dir
+except:
+ log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
+
+class RandomPortException(Exception):
+ """Raised when using a random port conflicts with configured ports."""
+ def __init__(self):
+ log.msg("Unable to use random and specific ports simultaneously")
+ return sys.exit()
+
+class BridgetArgs(usage.Options):
+ """Commandline options."""
+ allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
+ sock_check = ValueChecker(allowed % "SocksPort").port_check
+ ctrl_check = ValueChecker(allowed % "ControlPort").port_check
+
+ optParameters = [
+ ['bridges', 'b', None,
+ 'File listing bridge IP:ORPorts to test'],
+ ['relays', 'f', None,
+ 'File listing relay IPs to test'],
+ ['socks', 's', 9049, None, sock_check],
+ ['control', 'c', 9052, None, ctrl_check],
+ ['torpath', 'p', None,
+ 'Path to the Tor binary to use'],
+ ['datadir', 'd', None,
+ 'Tor DataDirectory to use'],
+ ['transport', 't', None,
+ 'Tor ClientTransportPlugin'],
+ ['resume', 'r', 0,
+ 'Resume at this index']]
+ optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
+
+ def postOptions(self):
+ if not self['bridges'] and not self['relays']:
+ raise MissingAssetException(
+ "Bridget can't run without bridges or relays to test!")
+ if self['transport']:
+ ValueChecker.uid_check(
+ "Can't run bridget as root with pluggable transports!")
+ if not self['bridges']:
+ raise PTNoBridgesException
+ if self['socks'] or self['control']:
+ if self['random']:
+ raise RandomPortException
+ if self['datadir']:
+ ValueChecker.dir_check(self['datadir'])
+ if self['torpath']:
+ ValueChecker.file_check(self['torpath'])
+
+class BridgetAsset(Asset):
+ """Class for parsing bridget Assets ignoring commented out lines."""
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+ def parse_line(self, line):
+ if line.startswith('#'):
+ return
+ else:
+ return line.replace('\n','')
+
+class BridgetTest(OONITest):
+ """
+ XXX fill me in
+
+ :ivar config:
+ An :class:`ooni.lib.txtorcon.TorConfig` instance.
+ :ivar relays:
+ A list of all provided relays to test.
+ :ivar bridges:
+ A list of all provided bridges to test.
+ :ivar socks_port:
+ Integer for Tor's SocksPort.
+ :ivar control_port:
+ Integer for Tor's ControlPort.
+ :ivar transport:
+ String defining the Tor's ClientTransportPlugin, for testing
+ a bridge's pluggable transport functionality.
+ :ivar tor_binary:
+ Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
+ """
+ implements(IPlugin, ITest)
+
+ shortName = "bridget"
+ description = "Use a Tor process to test connecting to bridges or relays"
+ requirements = None
+ options = BridgetArgs
+ blocking = False
+
+ def initialize(self):
+ """
+ Extra initialization steps. We only want one child Tor process
+ running, so we need to deal with most of the TorConfig() only once,
+ before the experiment runs.
+ """
+ self.socks_port = 9049
+ self.control_port = 9052
+ self.circuit_timeout = 90
+ self.tor_binary = '/usr/sbin/tor'
+ self.data_directory = None
+
+ def __make_asset_list__(opt, lst):
+ log.msg("Loading information from %s ..." % opt)
+ with open(opt) as opt_file:
+ for line in opt_file.readlines():
+ if line.startswith('#'):
+ continue
+ else:
+ lst.append(line.replace('\n',''))
+
+ def __count_remaining__(which):
+ total, reach, unreach = map(lambda x: which[x],
+ ['all', 'reachable', 'unreachable'])
+ count = len(total) - reach() - unreach()
+ return count
+
+ ## XXX should we do report['bridges_up'].append(self.bridges['current'])
+ self.bridges = {}
+ self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
+ ([] for i in range(3))
+ self.bridges['reachable'] = lambda: len(self.bridges['up'])
+ self.bridges['unreachable'] = lambda: len(self.bridges['down'])
+ self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
+ self.bridges['current'] = None
+ self.bridges['pt_type'] = None
+ self.bridges['use_pt'] = False
+
+ self.relays = {}
+ self.relays['all'], self.relays['up'], self.relays['down'] = \
+ ([] for i in range(3))
+ self.relays['reachable'] = lambda: len(self.relays['up'])
+ self.relays['unreachable'] = lambda: len(self.relays['down'])
+ self.relays['remaining'] = lambda: __count_remaining__(self.relays)
+ self.relays['current'] = None
+
+ if self.local_options:
+ try:
+ from ooni.lib.txtorcon import TorConfig
+ except ImportError:
+ raise TxtorconImportError
+ else:
+ self.config = TorConfig()
+ finally:
+ options = self.local_options
+
+ if options['bridges']:
+ self.config.UseBridges = 1
+ __make_asset_list__(options['bridges'], self.bridges['all'])
+ if options['relays']:
+ ## first hop must be in TorState().guards
+ self.config.EntryNodes = ','.join(relay_list)
+ __make_asset_list__(options['relays'], self.relays['all'])
+ if options['socks']:
+ self.socks_port = options['socks']
+ if options['control']:
+ self.control_port = options['control']
+ if options['random']:
+ log.msg("Using randomized ControlPort and SocksPort ...")
+ self.socks_port = randint(1024, 2**16)
+ self.control_port = randint(1024, 2**16)
+ if options['torpath']:
+ self.tor_binary = options['torpath']
+ if options['datadir']:
+ self.data_directory = parse_data_dir(options['datadir'])
+ if options['transport']:
+ ## ClientTransportPlugin transport exec pathtobinary [options]
+ ## XXX we need a better way to deal with all PTs
+ log.msg("Using ClientTransportPlugin %s" % options['transport'])
+ self.bridges['use_pt'] = True
+ [self.bridges['pt_type'], pt_exec] = \
+ options['transport'].split(' ', 1)
+
+ if self.bridges['pt_type'] == "obfs2":
+ self.config.ClientTransportPlugin = \
+ self.bridges['pt_type'] + " " + pt_exec
+ else:
+ raise PTNotFoundException
+
+ self.config.SocksPort = self.socks_port
+ self.config.ControlPort = self.control_port
+ self.config.CookieAuthentication = 1
+
+ def __load_assets__(self):
+ """
+ Load bridges and/or relays from files given in user options. Bridges
+ should be given in the form IP:ORport. We don't want to load these as
+ assets, because it's inefficient to start a Tor process for each one.
+
+ We cannot use the Asset model, because that model calls
+ self.experiment() with the current Assets, which would be one relay
+ and one bridge, then it gives the defer.Deferred returned from
+ self.experiment() to self.control(), which means that, for each
+ (bridge, relay) pair, experiment gets called again, which instantiates
+ an additional Tor process that attempts to bind to the same
+ ports. Thus, additionally instantiated Tor processes return with
+ RuntimeErrors, which break the final defer.chainDeferred.callback(),
+ sending it into the errback chain.
+ """
+ assets = {}
+ if self.local_options:
+ if self.local_options['bridges']:
+ assets.update({'bridge':
+ BridgetAsset(self.local_options['bridges'])})
+ if self.local_options['relays']:
+ assets.update({'relay':
+ BridgetAsset(self.local_options['relays'])})
+ return assets
+
+ def experiment(self, args):
+ """
+ if bridges:
+ 1. configure first bridge line
+ 2a. configure data_dir, if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor } if any of these
+ 4. remove bridges which are public relays } fail, add current
+ 5. SIGHUP for each bridge } bridge to unreach-
+ } able bridges.
+ if relays:
+ 1a. configure the data_dir, if it doesn't exist
+ 1b. write torrc to a tempfile in data_dir
+ 2. start tor
+ 3. remove any of our relays which are already part of current
+ circuits
+ 4a. attach CustomCircuit() to self.state
+ 4b. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+ 5.
+ if bridges and relays:
+ 1. configure first bridge line
+ 2a. configure data_dir if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor
+ 4. remove bridges which are public relays
+ 5. remove any of our relays which are already part of current
+ circuits
+ 6a. attach CustomCircuit() to self.state
+ 6b. for each bridge, build three circuits, with three
+ relays each
+ 6c. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+
+ :param args:
+ The :class:`BridgetAsset` line currently being used. Except that it
+ in Bridget it doesn't, so it should be ignored and avoided.
+ """
+ try:
+ from ooni.utils import process
+ from ooni.utils.onion import remove_public_relays, start_tor
+ from ooni.utils.onion import start_tor_filter_nodes
+ from ooni.utils.onion import setup_fail, setup_done
+ from ooni.utils.onion import CustomCircuit
+ from ooni.utils.timer import deferred_timeout, TimeoutError
+ from ooni.lib.txtorcon import TorConfig, TorState
+ except ImportError:
+ raise TxtorconImportError
+ except TxtorconImportError, tie:
+ log.err(tie)
+ sys.exit()
+
+ def reconfigure_done(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['up'].
+ """
+ log.msg("Reconfiguring with 'Bridge %s' successful"
+ % bridges['current'])
+ bridges['up'].append(bridges['current'])
+ return state
+
+ def reconfigure_fail(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['down'].
+ """
+ log.msg("Reconfiguring TorConfig with parameters %s failed"
+ % state)
+ bridges['down'].append(bridges['current'])
+ return state
+
+ @defer.inlineCallbacks
+ def reconfigure_bridge(state, bridges):
+ """
+ Rewrite the Bridge line in our torrc. If use of pluggable
+ transports was specified, rewrite the line as:
+ Bridge <transport_type> <IP>:<ORPort>
+ Otherwise, rewrite in the standard form:
+ Bridge <IP>:<ORPort>
+
+ :param state:
+ A fully bootstrapped instance of
+ :class:`ooni.lib.txtorcon.TorState`.
+ :param bridges:
+ A dictionary of bridges containing the following keys:
+
+ bridges['remaining'] :: A function returning and int for the
+ number of remaining bridges to test.
+ bridges['current'] :: A string containing the <IP>:<ORPort>
+ of the current bridge.
+ bridges['use_pt'] :: A boolean, True if we're testing
+ bridges with a pluggable transport;
+ False otherwise.
+ bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
+ this is a string containing the type
+ of pluggable transport to test.
+ :return:
+ :param:`state`
+ """
+ log.msg("Current Bridge: %s" % bridges['current'])
+ log.msg("We now have %d bridges remaining to test..."
+ % bridges['remaining']())
+ try:
+ if bridges['use_pt'] is False:
+ controller_response = yield state.protocol.set_conf(
+ 'Bridge', bridges['current'])
+ elif bridges['use_pt'] and bridges['pt_type'] is not None:
+ controller_reponse = yield state.protocol.set_conf(
+ 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
+ else:
+ raise PTNotFoundException
+
+ if controller_response == 'OK':
+ finish = yield reconfigure_done(state, bridges)
+ else:
+ log.err("SETCONF for %s responded with error:\n %s"
+ % (bridges['current'], controller_response))
+ finish = yield reconfigure_fail(state, bridges)
+
+ defer.returnValue(finish)
+
+ except Exception, e:
+ log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
+ % (bridges['current'], e))
+ defer.returnValue(None)
+
+ def attacher_extend_circuit(attacher, deferred, router):
+ ## XXX todo write me
+ ## state.attacher.extend_circuit
+ raise NotImplemented
+ #attacher.extend_circuit
+
+ def state_attach(state, path):
+ log.msg("Setting up custom circuit builder...")
+ attacher = CustomCircuit(state)
+ state.set_attacher(attacher, reactor)
+ state.add_circuit_listener(attacher)
+ return state
+
+ ## OLD
+ #for circ in state.circuits.values():
+ # for relay in circ.path:
+ # try:
+ # relay_list.remove(relay)
+ # except KeyError:
+ # continue
+ ## XXX how do we attach to circuits with bridges?
+ d = defer.Deferred()
+ attacher.request_circuit_build(d)
+ return d
+
+ def state_attach_fail(state):
+ log.err("Attaching custom circuit builder failed: %s" % state)
+
+ log.msg("Bridget: initiating test ... ") ## Start the experiment
+
+ ## if we've at least one bridge, and our config has no 'Bridge' line
+ if self.bridges['remaining']() >= 1 \
+ and not 'Bridge' in self.config.config:
+
+ ## configure our first bridge line
+ self.bridges['current'] = self.bridges['all'][0]
+ self.config.Bridge = self.bridges['current']
+ ## avoid starting several
+ self.config.save() ## processes
+ assert self.config.config.has_key('Bridge'), "No Bridge Line"
+
+ ## start tor and remove bridges which are public relays
+ from ooni.utils.onion import start_tor_filter_nodes
+ state = start_tor_filter_nodes(reactor, self.config,
+ self.control_port, self.tor_binary,
+ self.data_directory, self.bridges)
+ #controller = defer.Deferred()
+ #controller.addCallback(singleton_semaphore, tor)
+ #controller.addErrback(setup_fail)
+ #bootstrap = defer.gatherResults([controller, filter_bridges],
+ # consumeErrors=True)
+
+ if state is not None:
+ log.debug("state:\n%s" % state)
+ log.debug("Current callbacks on TorState():\n%s"
+ % state.callbacks)
+
+ ## if we've got more bridges
+ if self.bridges['remaining']() >= 2:
+ #all = []
+ for bridge in self.bridges['all'][1:]:
+ self.bridges['current'] = bridge
+ #new = defer.Deferred()
+ #new.addCallback(reconfigure_bridge, state, self.bridges)
+ #all.append(new)
+ #check_remaining = defer.DeferredList(all, consumeErrors=True)
+ #state.chainDeferred(check_remaining)
+ state.addCallback(reconfigure_bridge, self.bridges)
+
+ if self.relays['remaining']() > 0:
+ while self.relays['remaining']() >= 3:
+ #path = list(self.relays.pop() for i in range(3))
+ #log.msg("Trying path %s" % '->'.join(map(lambda node:
+ # node, path)))
+ self.relays['current'] = self.relays['all'].pop()
+ for circ in state.circuits.values():
+ for node in circ.path:
+ if node == self.relays['current']:
+ self.relays['up'].append(self.relays['current'])
+ if len(circ.path) < 3:
+ try:
+ ext = attacher_extend_circuit(state.attacher, circ,
+ self.relays['current'])
+ ext.addCallback(attacher_extend_circuit_done,
+ state.attacher, circ,
+ self.relays['current'])
+ except Exception, e:
+ log.err("Extend circuit failed: %s" % e)
+ else:
+ continue
+
+ #state.callback(all)
+ #self.reactor.run()
+ return state
+
+ def startTest(self, args):
+ """
+ Local override of :meth:`OONITest.startTest` to bypass calling
+ self.control.
+
+ :param args:
+ The current line of :class:`Asset`, not used but kept for
+ compatibility reasons.
+ :return:
+ A fired deferred which callbacks :meth:`experiment` and
+ :meth:`OONITest.finished`.
+ """
+ self.start_time = date.now()
+ self.d = self.experiment(args)
+ self.d.addErrback(log.err)
+ self.d.addCallbacks(self.finished, log.err)
+ return self.d
+
+## So that getPlugins() can register the Test:
+#bridget = BridgetTest(None, None, None)
+
+## ISIS' NOTES
+## -----------
+## TODO:
+## x cleanup documentation
+## x add DataDirectory option
+## x check if bridges are public relays
+## o take bridge_desc file as input, also be able to give same
+## format as output
+## x Add asynchronous timeout for deferred, so that we don't wait
+## o Add assychronous timout for deferred, so that we don't wait
+## forever for bridges that don't work.
diff --git a/ooni/bridget/utils/__init__.py b/ooni/bridget/utils/__init__.py
new file mode 100644
index 0000000..92893d6
--- /dev/null
+++ b/ooni/bridget/utils/__init__.py
@@ -0,0 +1 @@
+all = ['inputs', 'log', 'onion', 'tests', 'interface', 'nodes', 'reports', 'work']
diff --git a/ooni/bridget/utils/inputs.py b/ooni/bridget/utils/inputs.py
new file mode 100644
index 0000000..fe058cc
--- /dev/null
+++ b/ooni/bridget/utils/inputs.py
@@ -0,0 +1,174 @@
+#-*- coding: utf-8 -*-
+#
+# inputs.py
+# *********
+#
+# "...quis custodiet ipsos custodes?"
+# - Juvenal, Satires VI.347-348 (circa 2nd Century, C.E.)
+#
+# :copyright: (c) 2012 Isis Lovecruft
+# :license: see LICENSE for more details.
+# :version: 0.1.0-beta
+#
+
+#from types import FunctionType, FileType
+import types
+
+from ooni.bridget import log
+from ooni.utils import date, Storage
+
+class InputFile:
+ """
+ This is a class describing a file used to store Tor bridge or relays
+ inputs. It is a python iterator object, allowing it to be efficiently
+ looped.
+
+ This class should not be used directly, but rather its subclasses,
+ BridgeFile and RelayFile should be used instead.
+ """
+
+ def __init__(self, file, **kw):
+ """
+ ## This is an InputAsset file, created because you tried to pass a
+ ## non-existent filename to a test.
+ ##
+ ## To use this file, place one input to be tested per line. Each
+ ## test takes different inputs. Lines which are commented out with
+ ## a '#' are not used.
+ """
+ self.file = file
+ self.eof = False
+ self.all = Storage()
+
+ for key, value in input_dict:
+ self.all[key] = value
+
+ try:
+ self.handler = open(self.file, 'r')
+ except IOError:
+ with open(self.file, 'w') as explain:
+ for line in self.__init__.__doc__:
+ explain.writeline(line)
+ self.handler = open(self.file, 'r')
+ try:
+ assert isinstance(self.handler, file), "That's not a file!"
+ except AssertionError, ae:
+ log.err(ae)
+
+ # def __handler__(self):
+ # """
+ # Attempt to open InputFile.file and check that it is actually a file.
+ # If it's not, create it and add an explaination for how InputFile files
+ # should be used.
+
+ # :return:
+ # A :type:`file` which has been opened in read-only mode.
+ # """
+ # try:
+ # handler = open(self.file, 'r')
+ # except IOError, ioerror: ## not the hacker <(A)3
+ # log.err(ioerror)
+ # explanation = (
+ # with open(self.file, 'w') as explain:
+ # for line in explanation:
+ # explain.writeline(line)
+ # handler = open(self.file, 'r')
+ # try:
+ # assert isinstance(handler, file), "That's not a file!"
+ # except AssertionError, ae:
+ # log.err(ae)
+ # else:
+ # return handler
+
+ def __iter__(next, StopIteration):
+ """
+ Returns the next input from the file.
+ """
+ #return self.next()
+ return self
+
+ def len(self):
+ """
+ Returns the number of the lines in the InputFile.
+ """
+ with open(self.file, 'r') as input_file:
+ lines = input_file.readlines()
+ for number, line in enumerate(lines):
+ self.input_dict[number] = line
+ return number + 1
+
+ def next(self):
+ try:
+ return self.next_input()
+ except:
+ raise StopIteration
+
+ def next_input(self):
+ """
+ Return the next input.
+ """
+ line = self.handler.readline()
+ if line:
+ parsed_line = self.parse_line(line)
+ if parsed_line:
+ return parsed_line
+ else:
+ self.fh.seek(0)
+ raise StopIteration
+
+ def default_parser(self, line):
+ """
+ xxx fill me in
+ """
+ if not line.startswith('#'):
+ return line.replace('\n', '')
+ else:
+ return False
+
+ def parse_line(self, line):
+ """
+ Override this method if you need line by line parsing of an Asset.
+
+ The default parsing action is to ignore lines which are commented out
+ with a '#', and to strip the newline character from the end of the
+ line.
+
+ If the line was commented out return an empty string instead.
+
+ If a subclass Foo incorporates another class Bar, when Bar is not
+ also a subclass of InputFile, and Bar.parse_line() exists, then
+ do not overwrite Bar's parse_line method.
+ """
+ assert not hasattr(super(InputFile, self), 'parse_line')
+
+ if self.parser is None:
+ if not line.startswith('#'):
+ return line.replace('\n', '')
+ else:
+ return ''
+ else:
+ try:
+ assert isinstance(self.parser, FunctionType),"Not a function!"
+ except AssertionError, ae:
+ log.err(ae)
+ else:
+ return self.parser(line)
+
+class BridgeFile(InputFile):
+ """
+ xxx fill me in
+ """
+ def __init__(self, **kw):
+ super(BridgeFile, self).__init__(**kw)
+
+class MissingInputException(Exception):
+ """
+
+ Raised when an :class:`InputFile` necessary for running the Test is
+ missing.
+
+ """
+ def __init__(self, error_message):
+ print error_message
+ import sys
+ return sys.exit()
diff --git a/ooni/bridget/utils/interface.py b/ooni/bridget/utils/interface.py
new file mode 100644
index 0000000..aa55436
--- /dev/null
+++ b/ooni/bridget/utils/interface.py
@@ -0,0 +1,54 @@
+from zope.interface import implements, Interface, Attribute
+
+class ITest(Interface):
+ """
+ This interface represents an OONI test. It fires a deferred on completion.
+ """
+
+ shortName = Attribute("""A short user facing description for this test""")
+ description = Attribute("""A string containing a longer description for the test""")
+
+ requirements = Attribute("""What is required to run this this test, for example raw socket access or UDP or TCP""")
+
+ options = Attribute("""These are the arguments to be passed to the test for it's execution""")
+
+ blocking = Attribute("""True or False, stating if the test should be run in a thread or not.""")
+
+ def control(experiment_result, args):
+ """
+ @param experiment_result: The result returned by the experiment method.
+
+ @param args: the keys of this dict are the names of the assets passed in
+ from load_assets. The value is one item of the asset.
+
+ Must return a dict containing what should be written to the report.
+ Anything returned by control ends up inside of the YAMLOONI report.
+ """
+
+ def experiment(args):
+ """
+ Perform all the operations that are necessary to running a test.
+
+ @param args: the keys of this dict are the names of the assets passed in
+ from load_assets. The value is one item of the asset.
+
+ Must return a dict containing the values to be passed to control.
+ """
+
+ def load_assets():
+ """
+ Load the assets that should be passed to the Test. These are the inputs
+ to the OONI test.
+ Must return a dict that has as keys the asset names and values the
+ asset contents.
+ If the test does not have any assets it should return an empty dict.
+ """
+
+ def end():
+ """
+ This can be called at any time to terminate the execution of all of
+ these test instances.
+
+ What this means is that no more test instances with new parameters will
+ be created. A report will be written.
+ """
diff --git a/ooni/bridget/utils/log.py b/ooni/bridget/utils/log.py
new file mode 100644
index 0000000..eef50d8
--- /dev/null
+++ b/ooni/bridget/utils/log.py
@@ -0,0 +1,98 @@
+"""
+OONI logging facility.
+"""
+from sys import stderr, stdout
+
+from twisted.python import log, util
+from twisted.python.failure import Failure
+
+def _get_log_level(level):
+ english = ['debug', 'info', 'warn', 'err', 'crit']
+
+ levels = dict(zip(range(len(english)), english))
+ number = dict(zip(english, range(len(english))))
+
+ if not level:
+ return number['info']
+ else:
+ ve = "Unknown log level: %s\n" % level
+ ve += "Allowed levels: %s\n" % [word for word in english]
+
+ if type(level) is int:
+ if 0 <= level <= 4:
+ return level
+ elif type(level) is str:
+ if number.has_key(level.lower()):
+ return number[level]
+ else:
+ raise ValueError, ve
+ else:
+ raise ValueError, ve
+
+class OONITestFailure(Failure):
+ """
+ For handling Exceptions asynchronously.
+
+ Can be given an Exception as an argument, else will use the
+ most recent Exception from the current stack frame.
+ """
+ def __init__(self, exception=None, _type=None,
+ _traceback=None, _capture=False):
+ Failure.__init__(self, exc_type=_type,
+ exc_tb=_traceback, captureVars=_capture)
+
+class OONILogObserver(log.FileLogObserver):
+ """
+ Supports logging level verbosity.
+ """
+ def __init__(self, logfile, verb=None):
+ log.FileLogObserver.__init__(self, logfile)
+ self.level = _get_log_level(verb) if verb is not None else 1
+ assert type(self.level) is int
+
+ def emit(self, eventDict):
+ if 'logLevel' in eventDict:
+ msgLvl = _get_log_level(eventDict['logLevel'])
+ assert type(msgLvl) is int
+ ## only log our level and higher
+ if self.level <= msgLvl:
+ text = log.textFromEventDict(eventDict)
+ else:
+ text = None
+ else:
+ text = log.textFromEventDict(eventDict)
+
+ if text is None:
+ return
+
+ timeStr = self.formatTime(eventDict['time'])
+ fmtDict = {'system': eventDict['system'],
+ 'text': text.replace('\n','\n\t')}
+ msgStr = log._safeFormat("[%(system)s] %(text)s\n", fmtDict)
+
+ util.untilConcludes(self.write, timeStr + " " + msgStr)
+ util.untilConcludes(self.flush)
+
+def start(logfile=None, verbosity=None):
+ if log.defaultObserver:
+ verbosity = _get_log_level(verbosity)
+
+ ## Always log to file, keep level at info
+ file = open(logfile, 'a') if logfile else stderr
+ OONILogObserver(file, "info").start()
+
+ log.msg("Starting OONI...")
+
+def debug(message, level="debug", **kw):
+ print "[%s] %s" % (level, message)
+ ## If we want debug messages in the logfile:
+ #log.msg(message, logLevel=level, **kw)
+
+def msg(message, level="info", **kw):
+ log.msg(message, logLevel=level, **kw)
+
+def err(message, level="err", **kw):
+ log.err(logLevel=level, **kw)
+
+def fail(message, exception, level="crit", **kw):
+ log.failure(message, OONITestFailure(exception, **kw), logLevel=level)
diff --git a/ooni/bridget/utils/nodes.py b/ooni/bridget/utils/nodes.py
new file mode 100644
index 0000000..155f183
--- /dev/null
+++ b/ooni/bridget/utils/nodes.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+# -*- coding: UTF-8
+"""
+ nodes
+ *****
+
+ This contains all the code related to Nodes
+ both network and code execution.
+
+ :copyright: (c) 2012 by Arturo Filastò, Isis Lovecruft
+ :license: see LICENSE for more details.
+
+"""
+
+import os
+from binascii import hexlify
+
+try:
+ import paramiko
+except:
+ print "Error: module paramiko is not installed."
+from pprint import pprint
+import sys
+import socks
+import xmlrpclib
+
+class Node(object):
+ def __init__(self, address, port):
+ self.address = address
+ self.port = port
+
+class LocalNode(object):
+ def __init__(self):
+ pass
+
+"""
+[]: node = NetworkNode("192.168.0.112", 5555, "SOCKS5")
+[]: node_socket = node.wrap_socket()
+"""
+class NetworkNode(Node):
+ def __init__(self, address, port, node_type="SOCKS5", auth_creds=None):
+ self.node = Node(address,port)
+
+ # XXX support for multiple types
+ # node type (SOCKS proxy, HTTP proxy, GRE tunnel, ...)
+ self.node_type = node_type
+ # type-specific authentication credentials
+ self.auth_creds = auth_creds
+
+ def _get_socksipy_socket(self, proxy_type, auth_creds):
+ import socks
+ s = socks.socksocket()
+ # auth_creds[0] -> username
+ # auth_creds[1] -> password
+ s.setproxy(proxy_type, self.node.address, self.node.port,
+ self.auth_creds[0], self.auth_creds[1])
+ return s
+
+ def _get_socket_wrapper(self):
+ if (self.node_type.startswith("SOCKS")): # SOCKS proxies
+ if (self.node_type != "SOCKS5"):
+ proxy_type = socks.PROXY_TYPE_SOCKS5
+ elif (self.node_type != "SOCKS4"):
+ proxy_type = socks.PROXY_TYPE_SOCKS4
+ else:
+ print "We don't know this proxy type."
+ sys.exit(1)
+
+ return self._get_socksipy_socket(proxy_type)
+ elif (self.node_type == "HTTP"): # HTTP proxies
+ return self._get_socksipy_socket(PROXY_TYPE_HTTP)
+ else: # Unknown proxies
+ print "We don't know this proxy type."
+ sys.exit(1)
+
+ def wrap_socket(self):
+ return self._get_socket_wrapper()
+
+class CodeExecNode(Node):
+ def __init__(self, address, port, node_type, auth_creds):
+ self.node = Node(address,port)
+
+ # node type (SSH proxy, etc.)
+ self.node_type = node_type
+ # type-specific authentication credentials
+ self.auth_creds = auth_creds
+
+ def add_unit(self):
+ pass
+
+ def get_status(self):
+ pass
+
+class PlanetLab(CodeExecNode):
+ def __init__(self, address, auth_creds, ooni):
+ self.auth_creds = auth_creds
+
+ self.config = ooni.utils.config
+ self.logger = ooni.logger
+ self.name = "PlanetLab"
+
+ def _api_auth(self):
+ api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/')
+ auth = {}
+ ## should be changed to separate node.conf file
+ auth['Username'] = self.config.main.pl_username
+ auth['AuthString'] = self.config.main.pl_password
+ auth['AuthMethod'] = "password"
+ authorized = api_server.AuthCheck(auth)
+
+ if authorized:
+ print 'We are authorized!'
+ return auth
+ else:
+ print 'Authorization failed. Please check your settings for pl_username and pl_password in the ooni-probe.conf file.'
+
+ def _search_for_nodes(self, node_filter=None):
+ api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/', allow_none=True)
+ node_filter = {'hostname': '*.cert.org.cn'}
+ return_fields = ['hostname', 'site_id']
+ all_nodes = api_server.GetNodes(self.api_auth(), node_filter, boot_state_filter)
+ pprint(all_nodes)
+ return all_nodes
+
+ def _add_nodes_to_slice(self):
+ api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/', allow_none=True)
+ all_nodes = self.search_for_nodes()
+ for node in all_nodes:
+ api_server.AddNode(self.api_auth(), node['site_id'], all_nodes)
+ print 'Adding nodes %s' % node['hostname']
+
+ def _auth_login(slicename, machinename):
+ """Attempt to authenticate to the given PL node, slicename and
+ machinename, using any of the private keys in ~/.ssh/ """
+
+ agent = paramiko.Agent()
+ agent_keys = agent.get_keys()
+ if len(agent_keys) == 0:
+ return
+
+ for key in agent_keys:
+ print 'Trying ssh-agent key %s' % hexlify(key.get_fingerprint()),
+ try:
+ paramiko.transport.auth_publickey(machinename, slicename)
+ print 'Public key authentication to PlanetLab node %s successful.' % machinename,
+ return
+ except paramiko.SSHException:
+ print 'Public key authentication to PlanetLab node %s failed.' % machinename,
+
+ def _get_command():
+ pass
+
+ def ssh_and_run_(slicename, machinename, command):
+ """Attempt to make a standard OpenSSH client to PL node, and run
+ commands from a .conf file."""
+
+ ## needs a way to specify 'ssh -l <slicename> <machinename>'
+ ## with public key authentication.
+
+ command = PlanetLab.get_command()
+
+ client = paramiko.SSHClient()
+ client.load_system_host_keys()
+ client.connect(machinename)
+
+ stdin, stdout, stderr = client.exec_command(command)
+
+ def send_files_to_node(directory, files):
+ """Attempt to rsync a tree to the PL node."""
+ pass
+
+ def add_unit():
+ pass
+
+ def get_status():
+ pass
diff --git a/ooni/bridget/utils/onion.py b/ooni/bridget/utils/onion.py
new file mode 100644
index 0000000..9d4cae7
--- /dev/null
+++ b/ooni/bridget/utils/onion.py
@@ -0,0 +1,686 @@
+#
+# onion.py
+# ----------
+# Utilities for working with Tor.
+#
+# This code is largely taken from txtorcon and its documentation, and as such
+# any and all credit should go to Meejah. Minor adjustments have been made to
+# use OONI's logging system, and to build custom circuits without actually
+# attaching streams.
+#
+# :author: Meejah, Isis Lovecruft
+# :license: see included LICENSE file
+# :copyright: copyright (c) 2012 The Tor Project, Inc.
+# :version: 0.1.0-alpha
+#
+# XXX TODO add report keys for onion methods
+
+import random
+import sys
+
+from twisted.internet import defer
+from zope.interface import implements
+
+from ooni.lib.txtorcon import CircuitListenerMixin, IStreamAttacher
+from ooni.lib.txtorcon import TorState, TorConfig
+from ooni.utils import log
+from ooni.utils.timer import deferred_timeout, TimeoutError
+
+def parse_data_dir(data_dir):
+ """
+ Parse a string that a has been given as a DataDirectory and determine
+ its absolute path on the filesystem.
+
+ :param data_dir:
+ A directory for Tor's DataDirectory, to be parsed.
+ :return:
+ The absolute path of :param:data_dir.
+ """
+ from os import path, getcwd
+ import sys
+
+ try:
+ assert isinstance(data_dir, str), \
+ "Parameter type(data_dir) must be str"
+ except AssertionError, ae:
+ log.err(ae)
+
+ if data_dir.startswith('~'):
+ data_dir = path.expanduser(data_dir)
+ elif data_dir.startswith('/'):
+ data_dir = path.join(getcwd(), data_dir)
+ elif data_dir.startswith('./'):
+ data_dir = path.abspath(data_dir)
+ else:
+ data_dir = path.join(getcwd(), data_dir)
+
+ try:
+ assert path.isdir(data_dir), "Could not find %s" % data_dir
+ except AssertionError, ae:
+ log.err(ae)
+ sys.exit()
+ else:
+ return data_dir
+
+def write_torrc(conf, data_dir=None):
+ """
+ Create a torrc in our data_dir. If we don't yet have a data_dir, create a
+ temporary one. Any temporary files or folders are added to delete_list.
+
+ :param conf:
+ A :class:`ooni.lib.txtorcon.TorConfig` object, with all configuration
+ values saved.
+ :param data_dir:
+ The Tor DataDirectory to use.
+ :return: torrc, data_dir, delete_list
+ """
+ try:
+ from os import write, close
+ from tempfile import mkstemp, mkdtemp
+ except ImportError, ie:
+ log.err(ie)
+
+ delete_list = []
+
+ if data_dir is None:
+ data_dir = mkdtemp(prefix='bridget-tordata')
+ delete_list.append(data_dir)
+ conf.DataDirectory = data_dir
+
+ (fd, torrc) = mkstemp(dir=data_dir)
+ delete_list.append(torrc)
+ write(fd, conf.create_torrc())
+ close(fd)
+
+ return torrc, data_dir, delete_list
+
+def delete_files_or_dirs(delete_list):
+ """
+ Given a list of files or directories to delete, delete all and suppress
+ all errors.
+
+ :param delete_list:
+ A list of files or directories to delete.
+ """
+ try:
+ from os import unlink
+ from shutil import rmtree
+ except ImportError, ie:
+ log.err(ie)
+
+ for temp in delete_list:
+ try:
+ unlink(temp)
+ except OSError:
+ rmtree(temp, ignore_errors=True)
+
+def remove_node_from_list(node, list):
+ for item in list: ## bridges don't match completely
+ if item.startswith(node): ## due to the :<port>.
+ try:
+ log.msg("Removing %s because it is a public relay" % node)
+ list.remove(item)
+ except ValueError, ve:
+ log.err(ve)
+
+def remove_public_relays(state, bridges):
+ """
+ Remove bridges from our bridge list which are also listed as public
+ relays. This must be called after Tor has fully bootstrapped and we have a
+ :class:`ooni.lib.txtorcon.TorState` with the
+ :attr:`ooni.lib.txtorcon.TorState.routers` attribute assigned.
+
+ XXX Does state.router.values() have all of the relays in the consensus, or
+ just the ones we know about so far?
+
+ XXX FIXME: There is a problem in that Tor needs a Bridge line to already be
+ configured in order to bootstrap. However, after bootstrapping, we grab the
+ microdescriptors of all the relays and check if any of our bridges are
+ listed as public relays. Because of this, the first bridge does not get
+ checked for being a relay.
+ """
+ IPs = map(lambda addr: addr.split(':',1)[0], bridges['all'])
+ both = set(state.routers.values()).intersection(IPs)
+
+ if len(both) > 0:
+ try:
+ updated = map(lambda node: remove_node_from_list(node), both)
+ log.debug("Bridges in both: %s" % both)
+ log.debug("Updated = %s" % updated)
+ #if not updated:
+ # defer.returnValue(state)
+ #else:
+ # defer.returnValue(state)
+ return state
+ except Exception, e:
+ log.err("Removing public relays %s from bridge list failed:\n%s"
+ % (both, e))
+
+def setup_done(proto):
+ log.msg("Setup Complete")
+ state = TorState(proto.tor_protocol)
+ state.post_bootstrap.addCallback(state_complete)
+ state.post_bootstrap.addErrback(setup_fail)
+
+def setup_fail(proto):
+ log.msg("Setup Failed:\n%s" % proto)
+ return proto
+ #reactor.stop()
+
+def state_complete(state):
+ """Called when we've got a TorState."""
+ log.msg("We've completely booted up a Tor version %s at PID %d"
+ % (state.protocol.version, state.tor_pid))
+ log.msg("This Tor has the following %d Circuits:"
+ % len(state.circuits))
+ for circ in state.circuits.values():
+ log.msg("%s" % circ)
+ return state
+
+def updates(_progress, _tag, _summary):
+ """Log updates on the Tor bootstrapping process."""
+ log.msg("%d%%: %s" % (_progress, _summary))
+
+def bootstrap(ctrl):
+ """
+ Bootstrap Tor from an instance of
+ :class:`ooni.lib.txtorcon.TorControlProtocol`.
+ """
+ conf = TorConfig(ctrl)
+ conf.post_bootstrap.addCallback(setup_done).addErrback(setup_fail)
+ log.msg("Tor process connected, bootstrapping ...")
+
+def start_tor(reactor, config, control_port, tor_binary, data_dir,
+ report=None, progress=updates,
+ process_cb=None, process_eb=None):
+ """
+ Use a txtorcon.TorConfig() instance, config, to write a torrc to a
+ tempfile in our DataDirectory, data_dir. If data_dir is None, a temp
+ directory will be created. Finally, create a TCP4ClientEndpoint at our
+ control_port, and connect it to our reactor and a spawned Tor
+ process. Compare with :meth:`txtorcon.launch_tor` for differences.
+
+ :param reactor:
+ An instance of class:`twisted.internet.reactor`.
+ :param config:
+ An instance of class:`txtorcon.TorConfig` with all torrc options
+ already configured. ivar:`config.ControlPort`,
+ ivar:`config.SocksPort`, ivar:`config.CookieAuthentication`, should
+ already be set, as well as ivar:`config.UseBridges` and
+ ivar:`config.Bridge` if bridges are to be used.
+ ivar:`txtorcon.DataDirectory` does not need to be set.
+ :param control_port:
+ The port number to use for Tor's ControlPort.
+ :param tor_binary:
+ The full path to the Tor binary to use.
+ :param data_dir:
+ The directory to use as Tor's DataDirectory.
+ :param report:
+ The class:`ooni.plugoo.reports.Report` instance.
+ :param progress:
+ A non-blocking function to handle bootstrapping updates, which takes
+ three parameters: _progress, _tag, and _summary.
+ :param process_cb:
+ The function to callback to after
+ class:`ooni.lib.txtorcon.TorProcessProtocol` returns with the fully
+ bootstrapped Tor process.
+ :param process_eb:
+ The function to errback to if
+ class:`ooni.lib.txtorcon.TorProcessProtocol` fails.
+ :return:
+ The result of the callback of a
+ class:`ooni.lib.txtorcon.TorProcessProtocol` which callbacks with a
+ class:`txtorcon.TorControlProtocol` as .protocol.
+ """
+ try:
+ from functools import partial
+ from twisted.internet.endpoints import TCP4ClientEndpoint
+ from ooni.lib.txtorcon import TorProtocolFactory
+ from ooni.lib.txtorcon import TorProcessProtocol
+ except ImportError, ie:
+ log.err(ie)
+
+ ## TODO: add option to specify an already existing torrc, which
+ ## will require prior parsing to enforce necessary lines
+ (torrc, data_dir, to_delete) = write_torrc(config, data_dir)
+
+ log.msg("Starting Tor ...")
+ log.msg("Using the following as our torrc:\n%s" % config.create_torrc())
+ if report is None:
+ report = {'torrc': config.create_torrc()}
+ else:
+ report.update({'torrc': config.create_torrc()})
+
+ end_point = TCP4ClientEndpoint(reactor, 'localhost', control_port)
+ connection_creator = partial(end_point.connect, TorProtocolFactory())
+ process_protocol = TorProcessProtocol(connection_creator, progress)
+ process_protocol.to_delete = to_delete
+
+ if process_cb is not None and process_eb is not None:
+ process_protocol.connected_cb.addCallbacks(process_cb, process_eb)
+
+ reactor.addSystemEventTrigger('before', 'shutdown',
+ partial(delete_files_or_dirs, to_delete))
+ try:
+ transport = reactor.spawnProcess(process_protocol,
+ tor_binary,
+ args=(tor_binary,'-f',torrc),
+ env={'HOME': data_dir},
+ path=data_dir)
+ transport.closeStdin()
+ except RuntimeError, e:
+ log.err("Starting Tor failed:")
+ process_protocol.connected_cb.errback(e)
+ except NotImplementedError, e:
+ url = "http://starship.python.net/crew/mhammond/win32/Downloads.html"
+ log.msg("Running bridget on Windows requires pywin32: %s" % url)
+ process_protocol.connected_cb.errback(e)
+
+ return process_protocol.connected_cb
+
+(a)defer.inlineCallbacks
+def start_tor_filter_nodes(reactor, config, control_port, tor_binary,
+ data_dir, bridges):
+ """
+ Bootstrap a Tor process and return a fully-setup
+ :class:`ooni.lib.txtorcon.TorState`. Then search for our bridges
+ to test in the list of known public relays,
+ :ivar:`ooni.lib.txtorcon.TorState.routers`, and remove any bridges
+ which are known public relays.
+
+ :param reactor:
+ The :class:`twisted.internet.reactor`.
+ :param config:
+ An instance of :class:`ooni.lib.txtorcon.TorConfig`.
+ :param control_port:
+ The port to use for Tor's ControlPort. If already configured in
+ the TorConfig instance, this can be given as
+ TorConfig.config.ControlPort.
+ :param tor_binary:
+ The full path to the Tor binary to execute.
+ :param data_dir:
+ The full path to the directory to use as Tor's DataDirectory.
+ :param bridges:
+ A dictionary which has a key 'all' which is a list of bridges to
+ test connecting to, e.g.:
+ bridges['all'] = ['1.1.1.1:443', '22.22.22.22:9001']
+ :return:
+ A fully initialized :class:`ooni.lib.txtorcon.TorState`.
+ """
+ setup = yield start_tor(reactor, config, control_port,
+ tor_binary, data_dir,
+ process_cb=setup_done, process_eb=setup_fail)
+ filter_nodes = yield remove_public_relays(setup, bridges)
+ defer.returnValue(filter_nodes)
+
+(a)defer.inlineCallbacks
+def start_tor_with_timer(reactor, config, control_port, tor_binary, data_dir,
+ bridges, timeout):
+ """
+ Start bootstrapping a Tor process wrapped with an instance of the class
+ decorator :func:`ooni.utils.timer.deferred_timeout` and complete callbacks
+ to either :func:`setup_done` or :func:`setup_fail`. Return a fully-setup
+ :class:`ooni.lib.txtorcon.TorState`. Then search for our bridges to test
+ in the list of known public relays,
+ :ivar:`ooni.lib.txtorcon.TorState.routers`, and remove any bridges which
+ are listed as known public relays.
+
+ :param reactor:
+ The :class:`twisted.internet.reactor`.
+ :param config:
+ An instance of :class:`ooni.lib.txtorcon.TorConfig`.
+ :param control_port:
+ The port to use for Tor's ControlPort. If already configured in
+ the TorConfig instance, this can be given as
+ TorConfig.config.ControlPort.
+ :param tor_binary:
+ The full path to the Tor binary to execute.
+ :param data_dir:
+ The full path to the directory to use as Tor's DataDirectory.
+ :param bridges:
+ A dictionary which has a key 'all' which is a list of bridges to
+ test connecting to, e.g.:
+ bridges['all'] = ['1.1.1.1:443', '22.22.22.22:9001']
+ :param timeout:
+ The number of seconds to attempt to bootstrap the Tor process before
+ raising a :class:`ooni.utils.timer.TimeoutError`.
+ :return:
+ If the timeout limit is not exceeded, return a fully initialized
+ :class:`ooni.lib.txtorcon.TorState`, else return None.
+ """
+ error_msg = "Bootstrapping has exceeded the timeout limit..."
+ with_timeout = deferred_timeout(timeout, e=error_msg)(start_tor)
+ try:
+ setup = yield with_timeout(reactor, config, control_port, tor_binary,
+ data_dir, process_cb=setup_done,
+ process_eb=setup_fail)
+ except TimeoutError, te:
+ log.err(te)
+ defer.returnValue(None)
+ #except Exception, e:
+ # log.err(e)
+ # defer.returnValue(None)
+ else:
+ state = yield remove_public_relays(setup, bridges)
+ defer.returnValue(state)
+
+(a)defer.inlineCallbacks
+def start_tor_filter_nodes_with_timer(reactor, config, control_port,
+ tor_binary, data_dir, bridges, timeout):
+ """
+ Start bootstrapping a Tor process wrapped with an instance of the class
+ decorator :func:`ooni.utils.timer.deferred_timeout` and complete callbacks
+ to either :func:`setup_done` or :func:`setup_fail`. Then, filter our list
+ of bridges to remove known public relays by calling back to
+ :func:`remove_public_relays`. Return a fully-setup
+ :class:`ooni.lib.txtorcon.TorState`. Then search for our bridges to test
+ in the list of known public relays,
+ :ivar:`ooni.lib.txtorcon.TorState.routers`, and remove any bridges which
+ are listed as known public relays.
+
+ :param reactor:
+ The :class:`twisted.internet.reactor`.
+ :param config:
+ An instance of :class:`ooni.lib.txtorcon.TorConfig`.
+ :param control_port:
+ The port to use for Tor's ControlPort. If already configured in
+ the TorConfig instance, this can be given as
+ TorConfig.config.ControlPort.
+ :param tor_binary:
+ The full path to the Tor binary to execute.
+ :param data_dir:
+ The full path to the directory to use as Tor's DataDirectory.
+ :param bridges:
+ A dictionary which has a key 'all' which is a list of bridges to
+ test connecting to, e.g.:
+ bridges['all'] = ['1.1.1.1:443', '22.22.22.22:9001']
+ :param timeout:
+ The number of seconds to attempt to bootstrap the Tor process before
+ raising a :class:`ooni.utils.timer.TimeoutError`.
+ :return:
+ If the timeout limit is not exceeded, return a fully initialized
+ :class:`ooni.lib.txtorcon.TorState`, else return None.
+ """
+ error_msg = "Bootstrapping has exceeded the timeout limit..."
+ with_timeout = deferred_timeout(timeout, e=error_msg)(start_tor_filter_nodes)
+ try:
+ state = yield with_timeout(reactor, config, control_port,
+ tor_binary, data_dir, bridges)
+ except TimeoutError, te:
+ log.err(te)
+ defer.returnValue(None)
+ #except Exception, e:
+ # log.err(e)
+ # defer.returnValue(None)
+ else:
+ defer.returnValue(state)
+
+class CustomCircuit(CircuitListenerMixin):
+ """
+ Utility class for controlling circuit building. See
+ 'attach_streams_by_country.py' in the txtorcon documentation.
+
+ :param state:
+ A fully bootstrapped instance of :class:`ooni.lib.txtorcon.TorState`.
+ :param relays:
+ A dictionary containing a key 'all', which is a list of relays to
+ test connecting to.
+ :ivar waiting_circuits:
+ The list of circuits which we are waiting to attach to. You shouldn't
+ need to touch this.
+ """
+ implements(IStreamAttacher)
+
+ def __init__(self, state, relays=None):
+ self.state = state
+ self.waiting_circuits = []
+ self.relays = relays
+
+ def waiting_on(self, circuit):
+ """
+ Whether or not we are waiting on the given circuit before attaching to
+ it.
+
+ :param circuit:
+ An item from :ivar:`ooni.lib.txtorcon.TorState.circuits`.
+ :return:
+ True if we are waiting on the circuit, False if not waiting.
+ """
+ for (circid, d) in self.waiting_circuits:
+ if circuit.id == circid:
+ return True
+ return False
+
+ def circuit_extend(self, circuit, router):
+ "ICircuitListener"
+ if circuit.purpose != 'GENERAL':
+ return
+ if self.waiting_on(circuit):
+ log.msg("Circuit %d (%s)" % (circuit.id, router.id_hex))
+
+ def circuit_built(self, circuit):
+ "ICircuitListener"
+ if circuit.purpose != 'GENERAL':
+ return
+ log.msg("Circuit %s built ..." % circuit.id)
+ log.msg("Full path of %s: %s" % (circuit.id, circuit.path))
+ for (circid, d) in self.waiting_circuits:
+ if circid == circuit.id:
+ self.waiting_circuits.remove((circid, d))
+ d.callback(circuit)
+
+ def circuit_failed(self, circuit, reason):
+ """
+ If building a circuit has failed, try to remove it from our list of
+ :ivar:`waiting_circuits`, else request to build it.
+
+ :param circuit:
+ An item from :ivar:`ooni.lib.txtorcon.TorState.circuits`.
+ :param reason:
+ A :class:`twisted.python.fail.Failure` instance.
+ :return:
+ None
+ """
+ if self.waiting_on(circuit):
+ log.msg("Circuit %s failed for reason %s" % (circuit.id, reason))
+ circid, d = None, None
+ for c in self.waiting_circuits:
+ if c[0] == circuit.id:
+ circid, d = c
+ if d is None:
+ raise Exception("Expected to find circuit.")
+
+ self.waiting_circuits.remove((circid, d))
+ log.msg("Trying to build a circuit for %s" % circid)
+ self.request_circuit_build(d)
+
+ def check_circuit_route(self, router):
+ """
+ Check if a relay is a hop in one of our already built circuits.
+
+ :param router:
+ An item from the list
+ :func:`ooni.lib.txtorcon.TorState.routers.values()`.
+ """
+ for circ in self.state.circuits.values():
+ if router in circ.path:
+ #router.update() ## XXX can i use without args? no.
+ TorInfo.dump(self)
+
+ def request_circuit_build(self, deferred, path=None):
+ """
+ Request a custom circuit.
+
+ :param deferred:
+ A :class:`twisted.internet.defer.Deferred` for this circuit.
+ :param path:
+ A list of router ids to build a circuit from. The length of this
+ list must be at least three.
+ """
+ if path is None:
+
+ pick = self.relays['all'].pop
+ n = self.state.entry_guards.values()
+ choose = random.choice
+
+ first, middle, last = (None for i in range(3))
+
+ if self.relays['remaining']() >= 3:
+ first, middle, last = (pick() for i in range(3))
+ elif self.relays['remaining']() < 3:
+ first = choose(n)
+ middle = pick()
+ if self.relays['remaining'] == 2:
+ middle, last = (pick() for i in range(2))
+ elif self.relay['remaining'] == 1:
+ middle = pick()
+ last = choose(n)
+ else:
+ log.msg("Qu'est-que fuque?")
+ else:
+ middle, last = (random.choice(self.state.routers.values())
+ for i in range(2))
+
+ path = [first, middle, last]
+
+ else:
+ assert isinstance(path, list), \
+ "Circuit path must be a list of relays!"
+ assert len(path) >= 3, \
+ "Circuit path must be at least three hops!"
+
+ log.msg("Requesting a circuit: %s"
+ % '->'.join(map(lambda node: node, path)))
+
+ class AppendWaiting:
+ def __init__(self, attacher, deferred):
+ self.attacher = attacher
+ self.d = deferred
+ def __call__(self, circ):
+ """
+ Return from build_circuit is a Circuit, however,
+ we want to wait until it is built before we can
+ issue an attach on it and callback to the Deferred
+ we issue here.
+ """
+ log.msg("Circuit %s is in progress ..." % circ.id)
+ self.attacher.waiting_circuits.append((circ.id, self.d))
+
+ return self.state.build_circuit(path).addCallback(
+ AppendWaiting(self, deferred)).addErrback(
+ log.err)
+
+class TxtorconImportError(ImportError):
+ """
+ Raised when ooni.lib.txtorcon cannot be imported from. Checks our current
+ working directory and the path given to see if txtorcon has been
+ initialized via /ooni/lib/Makefile.
+ """
+ from os import getcwd, path
+
+ cwd, tx = getcwd(), 'lib/txtorcon/torconfig.py'
+ try:
+ log.msg("Unable to import from ooni.lib.txtorcon")
+ if cwd.endswith('ooni'):
+ check = path.join(cwd, tx)
+ elif cwd.endswith('utils'):
+ check = path.join(cwd, '../'+tx)
+ else:
+ check = path.join(cwd, 'ooni/'+tx)
+ assert path.isfile(check)
+ except:
+ log.msg("Error: Some OONI libraries are missing!")
+ log.msg("Please go to /ooni/lib/ and do \"make all\"")
+
+class PTNoBridgesException(Exception):
+ """Raised when a pluggable transport is specified, but not bridges."""
+ def __init__(self):
+ log.msg("Pluggable transport requires the bridges option")
+ return sys.exit()
+
+class PTNotFoundException(Exception):
+ def __init__(self, transport_type):
+ m = "Pluggable Transport type %s was unaccounted " % transport_type
+ m += "for, please contact isis(at)torproject(dot)org and it will "
+ m += "get included."
+ log.msg("%s" % m)
+ return sys.exit()
+
+(a)defer.inlineCallbacks
+def __start_tor_with_timer__(reactor, config, control_port, tor_binary,
+ data_dir, bridges=None, relays=None, timeout=None,
+ retry=None):
+ """
+ A wrapper for :func:`start_tor` which wraps the bootstrapping of a Tor
+ process and its connection to a reactor with a
+ :class:`twisted.internet.defer.Deferred` class decorator utility,
+ :func:`ooni.utils.timer.deferred_timeout`, and a mechanism for resets.
+
+ ## XXX fill me in
+ """
+ raise NotImplementedError
+
+ class RetryException(Exception):
+ pass
+
+ import sys
+ from ooni.utils.timer import deferred_timeout, TimeoutError
+
+ def __make_var__(old, default, _type):
+ if old is not None:
+ assert isinstance(old, _type)
+ new = old
+ else:
+ new = default
+ return new
+
+ reactor = reactor
+ timeout = __make_var__(timeout, 120, int)
+ retry = __make_var__(retry, 1, int)
+
+ with_timeout = deferred_timeout(timeout)(start_tor)
+
+ @defer.inlineCallbacks
+ def __start_tor__(rc=reactor, cf=config, cp=control_port, tb=tor_binary,
+ dd=data_dir, br=bridges, rl=relays, cb=setup_done,
+ eb=setup_fail, af=remove_public_relays, retry=retry):
+ try:
+ setup = yield with_timeout(rc,cf,cp,tb,dd)
+ except TimeoutError:
+ retry -= 1
+ defer.returnValue(retry)
+ else:
+ if setup.callback:
+ setup = yield cb(setup)
+ elif setup.errback:
+ setup = yield eb(setup)
+ else:
+ setup = setup
+
+ if br is not None:
+ state = af(setup,br)
+ else:
+ state = setup
+ defer.returnValue(state)
+
+ @defer.inlineCallbacks
+ def __try_until__(tries):
+ result = yield __start_tor__()
+ try:
+ assert isinstance(result, int)
+ except AssertionError:
+ defer.returnValue(result)
+ else:
+ if result >= 0:
+ tried = yield __try_until__(result)
+ defer.returnValue(tried)
+ else:
+ raise RetryException
+ try:
+ tried = yield __try_until__(retry)
+ except RetryException:
+ log.msg("All retry attempts to bootstrap Tor have timed out.")
+ log.msg("Exiting ...")
+ defer.returnValue(sys.exit())
+ else:
+ defer.returnValue(tried)
diff --git a/ooni/bridget/utils/reports.py b/ooni/bridget/utils/reports.py
new file mode 100644
index 0000000..ae67b13
--- /dev/null
+++ b/ooni/bridget/utils/reports.py
@@ -0,0 +1,144 @@
+from __future__ import with_statement
+
+import os
+import yaml
+
+import itertools
+from ooni.utils import log, date, net
+
+class Report:
+ """This is the ooni-probe reporting mechanism. It allows
+ reporting to multiple destinations and file formats.
+
+ :scp the string of <host>:<port> of an ssh server
+
+ :yaml the filename of a the yaml file to write
+
+ :file the filename of a simple txt file to write
+
+ :tcp the <host>:<port> of a TCP server that will just listen for
+ inbound connection and accept a stream of data (think of it
+ as a `nc -l -p <port> > filename.txt`)
+ """
+ def __init__(self, testname=None, file="report.log",
+ scp=None,
+ tcp=None):
+
+ self.testname = testname
+ self.file = file
+ self.tcp = tcp
+ self.scp = scp
+ #self.config = ooni.config.report
+
+ #if self.config.timestamp:
+ # tmp = self.file.split('.')
+ # self.file = '.'.join(tmp[:-1]) + "-" + \
+ # datetime.now().isoformat('-') + '.' + \
+ # tmp[-1]
+ # print self.file
+
+ self.scp = None
+ self.write_header()
+
+ def write_header(self):
+ pretty_date = date.pretty_date()
+ header = "# OONI Probe Report for Test %s\n" % self.testname
+ header += "# %s\n\n" % pretty_date
+ self._write_to_report(header)
+ # XXX replace this with something proper
+ address = net.getClientAddress()
+ test_details = {'start_time': str(date.now()),
+ 'asn': address['asn'],
+ 'test_name': self.testname,
+ 'addr': address['ip']}
+ self(test_details)
+
+ def _write_to_report(self, dump):
+ reports = []
+
+ if self.file:
+ reports.append("file")
+
+ if self.tcp:
+ reports.append("tcp")
+
+ if self.scp:
+ reports.append("scp")
+
+ #XXX make this non blocking
+ for report in reports:
+ self.send_report(dump, report)
+
+ def __call__(self, data):
+ """
+ This should be invoked every time you wish to write some
+ data to the reporting system
+ """
+ dump = yaml.dump([data])
+ self._write_to_report(dump)
+
+ def file_report(self, data):
+ """
+ This reports to a file in YAML format
+ """
+ with open(self.file, 'a+') as f:
+ f.write(data)
+
+ def send_report(self, data, type):
+ """
+ This sends the report using the
+ specified type.
+ """
+ #print "Reporting %s to %s" % (data, type)
+ log.msg("Reporting to %s" % type)
+ getattr(self, type+"_report").__call__(data)
+
+class NewReport(object):
+ filename = 'report.log'
+ startTime = None
+ endTime = None
+ testName = None
+ ipAddr = None
+ asnAddr = None
+
+ def _open():
+ self.fp = open(self.filename, 'a+')
+
+ @property
+ def header():
+ pretty_date = date.pretty_date()
+ report_header = "# OONI Probe Report for Test %s\n" % self.testName
+ report_header += "# %s\n\n" % pretty_date
+ test_details = {'start_time': self.startTime,
+ 'asn': asnAddr,
+ 'test_name': self.testName,
+ 'addr': ipAddr}
+ report_header += yaml.dump([test_details])
+ return report_header
+
+ def create():
+ """
+ Create a new report by writing it's header.
+ """
+ self.fp = open(self.filename, 'w+')
+ self.fp.write(self.header)
+
+ def exists():
+ """
+ Returns False if the file does not exists.
+ """
+ return os.path.exists(self.filename)
+
+ def write(data):
+ """
+ Write a report to the file.
+
+ :data: python data structure to be written to report.
+ """
+ if not self.exists():
+ self.create()
+ else:
+ self._open()
+ yaml_encoded_data = yaml.dump([data])
+ self.fp.write(yaml_encoded_data)
+ self.fp.close()
diff --git a/ooni/bridget/utils/tests.py b/ooni/bridget/utils/tests.py
new file mode 100644
index 0000000..ea4be0b
--- /dev/null
+++ b/ooni/bridget/utils/tests.py
@@ -0,0 +1,141 @@
+import os
+import yaml
+from zope.interface import Interface, Attribute
+
+import logging
+import itertools
+from twisted.internet import reactor, defer, threads
+## XXX why is this imported and not used?
+from twisted.python import failure
+
+from ooni.utils import log, date
+from ooni.plugoo import assets, work
+from ooni.plugoo.reports import Report
+from ooni.plugoo.interface import ITest
+
+class OONITest(object):
+ """
+ This is the base class for writing OONI Tests.
+
+ It should be used in conjunction with the ITest Interface. It allows the
+ developer to benefit from OONIs reporting system and command line argument
+ parsing system.
+ """
+ name = "oonitest"
+ # By default we set this to False, meaning that we don't block
+ blocking = False
+ reactor = reactor
+ tool = False
+ ended = False
+
+ def __init__(self, local_options, global_options, report, ooninet=None,
+ reactor=reactor):
+ # These are the options that are read through the tests suboptions
+ self.local_options = local_options
+ # These are the options global to all of OONI
+ self.global_options = global_options
+ self.report = report
+ #self.ooninet = ooninet
+ self.reactor = reactor
+ self.result = {}
+ self.initialize()
+ self.assets = self.load_assets()
+
+ def initialize(self):
+ """
+ Override this method if you are interested in having some extra
+ behavior when your test class is instantiated.
+ """
+ pass
+
+ def load_assets(self):
+ """
+ This method should be overriden by the test writer to provide the
+ logic for loading their assets.
+ """
+ return {}
+
+ def __repr__(self):
+ return "<OONITest %s %s %s>" % (self.local_options,
+ self.global_options,
+ self.assets)
+
+ def end(self):
+ """
+ State that the current test should finish.
+ """
+ self.ended = True
+
+ def finished(self, return_value):
+ """
+ The Test has finished running, we must now calculate the test runtime
+ and add all time data to the report.
+ """
+ #self.ooninet.report(result)
+ self.end_time = date.now()
+ result = self.result
+ result['start_time'] = str(self.start_time)
+ result['end_time'] = str(self.end_time)
+ result['run_time'] = str(self.end_time - self.start_time)
+ result['return_value'] = return_value
+ log.msg("FINISHED %s" % result)
+ self.report(result)
+ return result
+
+ def _do_experiment(self, args):
+ """
+ A wrapper around the launch of experiment.
+ If we are running a blocking test experiment will be run in a thread if
+ not we expect it to return a Deferred.
+
+ @param args: the asset line(s) that we are working on.
+
+ returns a deferred.
+ """
+ if self.blocking:
+ self.d = threads.deferToThread(self.experiment, args)
+ else:
+ self.d = self.experiment(args)
+
+ self.d.addCallback(self.control, args)
+ self.d.addCallback(self.finished)
+ self.d.addErrback(self.finished)
+ return self.d
+
+ def control(self, result, args):
+ """
+ Run the control.
+
+ @param result: what was returned by experiment.
+
+ @param args: the asset(s) lines that we are working on.
+ """
+ log.msg("Doing control")
+ return result
+
+ def experiment(self, args):
+ """
+ Run the experiment. This sample implementation returns a deferred,
+ making it a non-blocking test.
+
+ @param args: the asset(s) lines that we are working on.
+ """
+ log.msg("Doing experiment")
+ d = defer.Deferred()
+ return d
+
+ def startTest(self, args):
+ """
+ This method is invoked by the worker to start the test with one line of
+ the asset file.
+
+ @param args: the asset(s) lines that we are working on.
+ """
+ self.start_time = date.now()
+
+ if self.shortName:
+ log.msg("Starting test %s" % self.shortName)
+ else:
+ log.msg("Starting test %s" % self.__class__)
+
+ return self._do_experiment(args)
diff --git a/ooni/bridget/utils/work.py b/ooni/bridget/utils/work.py
new file mode 100644
index 0000000..c329c20
--- /dev/null
+++ b/ooni/bridget/utils/work.py
@@ -0,0 +1,147 @@
+# -*- coding: UTF-8
+"""
+ work.py
+ **********
+
+ This contains all code related to generating
+ Units of Work and processing it.
+
+ :copyright: (c) 2012 by Arturo Filastò.
+ :license: see LICENSE for more details.
+
+"""
+import itertools
+import yaml
+from datetime import datetime
+
+from zope.interface import Interface, Attribute
+
+from twisted.python import failure
+from twisted.internet import reactor, defer
+
+class Worker(object):
+ """
+ This is the core of OONI. It takes as input Work Units and
+ runs them concurrently.
+ """
+ def __init__(self, maxconcurrent=10, reactor=reactor):
+ """
+ @param maxconcurrent: how many test instances should be run
+ concurrently.
+ """
+ self.reactor = reactor
+ self.maxconcurrent = maxconcurrent
+ self._running = 0
+ self._queued = []
+
+ def _run(self, r):
+ """
+ Check if we should start another test because we are below maximum
+ concurrency.
+
+ This function is called every time a test finishes running.
+
+ @param r: the return value of a previous test.
+ """
+ if self._running > 0:
+ self._running -= 1
+
+ if self._running < self.maxconcurrent and self._queued:
+ workunit, d = self._queued.pop(0)
+ asset, test, idx = workunit
+ while test.ended and workunit:
+ try:
+ workunit, d = self._queued.pop(0)
+ asset, test, idx = workunit
+ except:
+ workunit = None
+
+ if not test.ended:
+ self._running += 1
+ actuald = test.startTest(asset).addBoth(self._run)
+
+ if isinstance(r, failure.Failure):
+ # XXX probably we should be doing something to retry test running
+ r.trap()
+
+ if self._running == 0 and not self._queued:
+ self.reactor.stop()
+
+ return r
+
+ def push(self, workunit):
+ """
+ Add a test to the test queue and run it if we are not maxed out on
+ concurrency.
+
+ @param workunit: a tuple containing the (asset, test, idx), where asset
+ is the line of the asset(s) we are working on, test
+ is an instantiated test and idx is the index we are
+ currently at.
+ """
+ if self._running < self.maxconcurrent:
+ asset, test, idx = workunit
+ if not test.ended:
+ self._running += 1
+ return test.startTest(asset).addBoth(self._run)
+
+ d = defer.Deferred()
+ self._queued.append((workunit, d))
+ return d
+
+class WorkGenerator(object):
+ """
+ Factory responsible for creating units of work.
+
+ This shall be run on the machine running OONI-cli. The returned WorkUnits
+ can either be run locally or on a remote OONI Node or Network Node.
+ """
+ size = 10
+
+ def __init__(self, test, arguments=None, start=None):
+ self.Test = test
+
+ if self.Test.assets and self.Test.assets.values()[0]:
+ self.assetGenerator = itertools.product(*self.Test.assets.values())
+ else:
+ self.assetGenerator = None
+
+ self.assetNames = self.Test.assets.keys()
+
+ self.idx = 0
+ self.end = False
+ if start:
+ self.skip(start)
+
+ def __iter__(self):
+ return self
+
+ def skip(self, start):
+ """
+ Skip the first x number of lines of the asset.
+
+ @param start: int how many items we should skip.
+ """
+ for j in xrange(0, start-1):
+ for i in xrange(0, self.size):
+ self.assetGenerator.next()
+ self.idx += 1
+
+ def next(self):
+ if self.end:
+ raise StopIteration
+
+ if not self.assetGenerator:
+ self.end = True
+ return ({}, self.Test, self.idx)
+
+ try:
+ asset = self.assetGenerator.next()
+ ret = {}
+ for i, v in enumerate(asset):
+ ret[self.assetNames[i]] = v
+ except StopIteration:
+ raise StopIteration
+
+ self.idx += 1
+ return (ret, self.Test, self.idx)
diff --git a/ooni/plugins/bridget.py b/ooni/plugins/bridget.py
deleted file mode 100644
index 5ff7b3f..0000000
--- a/ooni/plugins/bridget.py
+++ /dev/null
@@ -1,500 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-# +-----------+
-# | BRIDGET |
-# | +--------------------------------------------+
-# +--------| Use a Tor process to test making a Tor |
-# | connection to a list of bridges or relays. |
-# +--------------------------------------------+
-#
-# :authors: Isis Lovecruft, Arturo Filasto
-# :licence: see included LICENSE
-# :version: 0.1.0-alpha
-
-from __future__ import with_statement
-from functools import partial
-from random import randint
-
-import os
-import sys
-
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import defer, error, reactor
-from zope.interface import implements
-
-from ooni.utils import log, date
-from ooni.utils.config import ValueChecker
-
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset, MissingAssetException
-from ooni.utils.onion import TxtorconImportError
-from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
-
-try:
- from ooni.utils.onion import parse_data_dir
-except:
- log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
-
-class RandomPortException(Exception):
- """Raised when using a random port conflicts with configured ports."""
- def __init__(self):
- log.msg("Unable to use random and specific ports simultaneously")
- return sys.exit()
-
-class BridgetArgs(usage.Options):
- """Commandline options."""
- allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
- sock_check = ValueChecker(allowed % "SocksPort").port_check
- ctrl_check = ValueChecker(allowed % "ControlPort").port_check
-
- optParameters = [
- ['bridges', 'b', None,
- 'File listing bridge IP:ORPorts to test'],
- ['relays', 'f', None,
- 'File listing relay IPs to test'],
- ['socks', 's', 9049, None, sock_check],
- ['control', 'c', 9052, None, ctrl_check],
- ['torpath', 'p', None,
- 'Path to the Tor binary to use'],
- ['datadir', 'd', None,
- 'Tor DataDirectory to use'],
- ['transport', 't', None,
- 'Tor ClientTransportPlugin'],
- ['resume', 'r', 0,
- 'Resume at this index']]
- optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
-
- def postOptions(self):
- if not self['bridges'] and not self['relays']:
- raise MissingAssetException(
- "Bridget can't run without bridges or relays to test!")
- if self['transport']:
- ValueChecker.uid_check(
- "Can't run bridget as root with pluggable transports!")
- if not self['bridges']:
- raise PTNoBridgesException
- if self['socks'] or self['control']:
- if self['random']:
- raise RandomPortException
- if self['datadir']:
- ValueChecker.dir_check(self['datadir'])
- if self['torpath']:
- ValueChecker.file_check(self['torpath'])
-
-class BridgetAsset(Asset):
- """Class for parsing bridget Assets ignoring commented out lines."""
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- if line.startswith('#'):
- return
- else:
- return line.replace('\n','')
-
-class BridgetTest(OONITest):
- """
- XXX fill me in
-
- :ivar config:
- An :class:`ooni.lib.txtorcon.TorConfig` instance.
- :ivar relays:
- A list of all provided relays to test.
- :ivar bridges:
- A list of all provided bridges to test.
- :ivar socks_port:
- Integer for Tor's SocksPort.
- :ivar control_port:
- Integer for Tor's ControlPort.
- :ivar transport:
- String defining the Tor's ClientTransportPlugin, for testing
- a bridge's pluggable transport functionality.
- :ivar tor_binary:
- Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
- """
- implements(IPlugin, ITest)
-
- shortName = "bridget"
- description = "Use a Tor process to test connecting to bridges or relays"
- requirements = None
- options = BridgetArgs
- blocking = False
-
- def initialize(self):
- """
- Extra initialization steps. We only want one child Tor process
- running, so we need to deal with most of the TorConfig() only once,
- before the experiment runs.
- """
- self.socks_port = 9049
- self.control_port = 9052
- self.circuit_timeout = 90
- self.tor_binary = '/usr/sbin/tor'
- self.data_directory = None
-
- def __make_asset_list__(opt, lst):
- log.msg("Loading information from %s ..." % opt)
- with open(opt) as opt_file:
- for line in opt_file.readlines():
- if line.startswith('#'):
- continue
- else:
- lst.append(line.replace('\n',''))
-
- def __count_remaining__(which):
- total, reach, unreach = map(lambda x: which[x],
- ['all', 'reachable', 'unreachable'])
- count = len(total) - reach() - unreach()
- return count
-
- ## XXX should we do report['bridges_up'].append(self.bridges['current'])
- self.bridges = {}
- self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
- ([] for i in range(3))
- self.bridges['reachable'] = lambda: len(self.bridges['up'])
- self.bridges['unreachable'] = lambda: len(self.bridges['down'])
- self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
- self.bridges['current'] = None
- self.bridges['pt_type'] = None
- self.bridges['use_pt'] = False
-
- self.relays = {}
- self.relays['all'], self.relays['up'], self.relays['down'] = \
- ([] for i in range(3))
- self.relays['reachable'] = lambda: len(self.relays['up'])
- self.relays['unreachable'] = lambda: len(self.relays['down'])
- self.relays['remaining'] = lambda: __count_remaining__(self.relays)
- self.relays['current'] = None
-
- if self.local_options:
- try:
- from ooni.lib.txtorcon import TorConfig
- except ImportError:
- raise TxtorconImportError
- else:
- self.config = TorConfig()
- finally:
- options = self.local_options
-
- if options['bridges']:
- self.config.UseBridges = 1
- __make_asset_list__(options['bridges'], self.bridges['all'])
- if options['relays']:
- ## first hop must be in TorState().guards
- self.config.EntryNodes = ','.join(relay_list)
- __make_asset_list__(options['relays'], self.relays['all'])
- if options['socks']:
- self.socks_port = options['socks']
- if options['control']:
- self.control_port = options['control']
- if options['random']:
- log.msg("Using randomized ControlPort and SocksPort ...")
- self.socks_port = randint(1024, 2**16)
- self.control_port = randint(1024, 2**16)
- if options['torpath']:
- self.tor_binary = options['torpath']
- if options['datadir']:
- self.data_directory = parse_data_dir(options['datadir'])
- if options['transport']:
- ## ClientTransportPlugin transport exec pathtobinary [options]
- ## XXX we need a better way to deal with all PTs
- log.msg("Using ClientTransportPlugin %s" % options['transport'])
- self.bridges['use_pt'] = True
- [self.bridges['pt_type'], pt_exec] = \
- options['transport'].split(' ', 1)
-
- if self.bridges['pt_type'] == "obfs2":
- self.config.ClientTransportPlugin = \
- self.bridges['pt_type'] + " " + pt_exec
- else:
- raise PTNotFoundException
-
- self.config.SocksPort = self.socks_port
- self.config.ControlPort = self.control_port
- self.config.CookieAuthentication = 1
-
- def __load_assets__(self):
- """
- Load bridges and/or relays from files given in user options. Bridges
- should be given in the form IP:ORport. We don't want to load these as
- assets, because it's inefficient to start a Tor process for each one.
-
- We cannot use the Asset model, because that model calls
- self.experiment() with the current Assets, which would be one relay
- and one bridge, then it gives the defer.Deferred returned from
- self.experiment() to self.control(), which means that, for each
- (bridge, relay) pair, experiment gets called again, which instantiates
- an additional Tor process that attempts to bind to the same
- ports. Thus, additionally instantiated Tor processes return with
- RuntimeErrors, which break the final defer.chainDeferred.callback(),
- sending it into the errback chain.
- """
- assets = {}
- if self.local_options:
- if self.local_options['bridges']:
- assets.update({'bridge':
- BridgetAsset(self.local_options['bridges'])})
- if self.local_options['relays']:
- assets.update({'relay':
- BridgetAsset(self.local_options['relays'])})
- return assets
-
- def experiment(self, args):
- """
- if bridges:
- 1. configure first bridge line
- 2a. configure data_dir, if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor } if any of these
- 4. remove bridges which are public relays } fail, add current
- 5. SIGHUP for each bridge } bridge to unreach-
- } able bridges.
- if relays:
- 1a. configure the data_dir, if it doesn't exist
- 1b. write torrc to a tempfile in data_dir
- 2. start tor
- 3. remove any of our relays which are already part of current
- circuits
- 4a. attach CustomCircuit() to self.state
- 4b. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
- 5.
- if bridges and relays:
- 1. configure first bridge line
- 2a. configure data_dir if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor
- 4. remove bridges which are public relays
- 5. remove any of our relays which are already part of current
- circuits
- 6a. attach CustomCircuit() to self.state
- 6b. for each bridge, build three circuits, with three
- relays each
- 6c. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
-
- :param args:
- The :class:`BridgetAsset` line currently being used. Except that it
- in Bridget it doesn't, so it should be ignored and avoided.
- """
- try:
- from ooni.utils import process
- from ooni.utils.onion import remove_public_relays, start_tor
- from ooni.utils.onion import start_tor_filter_nodes
- from ooni.utils.onion import setup_fail, setup_done
- from ooni.utils.onion import CustomCircuit
- from ooni.utils.timer import deferred_timeout, TimeoutError
- from ooni.lib.txtorcon import TorConfig, TorState
- except ImportError:
- raise TxtorconImportError
- except TxtorconImportError, tie:
- log.err(tie)
- sys.exit()
-
- def reconfigure_done(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['up'].
- """
- log.msg("Reconfiguring with 'Bridge %s' successful"
- % bridges['current'])
- bridges['up'].append(bridges['current'])
- return state
-
- def reconfigure_fail(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['down'].
- """
- log.msg("Reconfiguring TorConfig with parameters %s failed"
- % state)
- bridges['down'].append(bridges['current'])
- return state
-
- @defer.inlineCallbacks
- def reconfigure_bridge(state, bridges):
- """
- Rewrite the Bridge line in our torrc. If use of pluggable
- transports was specified, rewrite the line as:
- Bridge <transport_type> <IP>:<ORPort>
- Otherwise, rewrite in the standard form:
- Bridge <IP>:<ORPort>
-
- :param state:
- A fully bootstrapped instance of
- :class:`ooni.lib.txtorcon.TorState`.
- :param bridges:
- A dictionary of bridges containing the following keys:
-
- bridges['remaining'] :: A function returning and int for the
- number of remaining bridges to test.
- bridges['current'] :: A string containing the <IP>:<ORPort>
- of the current bridge.
- bridges['use_pt'] :: A boolean, True if we're testing
- bridges with a pluggable transport;
- False otherwise.
- bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
- this is a string containing the type
- of pluggable transport to test.
- :return:
- :param:`state`
- """
- log.msg("Current Bridge: %s" % bridges['current'])
- log.msg("We now have %d bridges remaining to test..."
- % bridges['remaining']())
- try:
- if bridges['use_pt'] is False:
- controller_response = yield state.protocol.set_conf(
- 'Bridge', bridges['current'])
- elif bridges['use_pt'] and bridges['pt_type'] is not None:
- controller_reponse = yield state.protocol.set_conf(
- 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
- else:
- raise PTNotFoundException
-
- if controller_response == 'OK':
- finish = yield reconfigure_done(state, bridges)
- else:
- log.err("SETCONF for %s responded with error:\n %s"
- % (bridges['current'], controller_response))
- finish = yield reconfigure_fail(state, bridges)
-
- defer.returnValue(finish)
-
- except Exception, e:
- log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
- % (bridges['current'], e))
- defer.returnValue(None)
-
- def attacher_extend_circuit(attacher, deferred, router):
- ## XXX todo write me
- ## state.attacher.extend_circuit
- raise NotImplemented
- #attacher.extend_circuit
-
- def state_attach(state, path):
- log.msg("Setting up custom circuit builder...")
- attacher = CustomCircuit(state)
- state.set_attacher(attacher, reactor)
- state.add_circuit_listener(attacher)
- return state
-
- ## OLD
- #for circ in state.circuits.values():
- # for relay in circ.path:
- # try:
- # relay_list.remove(relay)
- # except KeyError:
- # continue
- ## XXX how do we attach to circuits with bridges?
- d = defer.Deferred()
- attacher.request_circuit_build(d)
- return d
-
- def state_attach_fail(state):
- log.err("Attaching custom circuit builder failed: %s" % state)
-
- log.msg("Bridget: initiating test ... ") ## Start the experiment
-
- ## if we've at least one bridge, and our config has no 'Bridge' line
- if self.bridges['remaining']() >= 1 \
- and not 'Bridge' in self.config.config:
-
- ## configure our first bridge line
- self.bridges['current'] = self.bridges['all'][0]
- self.config.Bridge = self.bridges['current']
- ## avoid starting several
- self.config.save() ## processes
- assert self.config.config.has_key('Bridge'), "No Bridge Line"
-
- ## start tor and remove bridges which are public relays
- from ooni.utils.onion import start_tor_filter_nodes
- state = start_tor_filter_nodes(reactor, self.config,
- self.control_port, self.tor_binary,
- self.data_directory, self.bridges)
- #controller = defer.Deferred()
- #controller.addCallback(singleton_semaphore, tor)
- #controller.addErrback(setup_fail)
- #bootstrap = defer.gatherResults([controller, filter_bridges],
- # consumeErrors=True)
-
- if state is not None:
- log.debug("state:\n%s" % state)
- log.debug("Current callbacks on TorState():\n%s"
- % state.callbacks)
-
- ## if we've got more bridges
- if self.bridges['remaining']() >= 2:
- #all = []
- for bridge in self.bridges['all'][1:]:
- self.bridges['current'] = bridge
- #new = defer.Deferred()
- #new.addCallback(reconfigure_bridge, state, self.bridges)
- #all.append(new)
- #check_remaining = defer.DeferredList(all, consumeErrors=True)
- #state.chainDeferred(check_remaining)
- state.addCallback(reconfigure_bridge, self.bridges)
-
- if self.relays['remaining']() > 0:
- while self.relays['remaining']() >= 3:
- #path = list(self.relays.pop() for i in range(3))
- #log.msg("Trying path %s" % '->'.join(map(lambda node:
- # node, path)))
- self.relays['current'] = self.relays['all'].pop()
- for circ in state.circuits.values():
- for node in circ.path:
- if node == self.relays['current']:
- self.relays['up'].append(self.relays['current'])
- if len(circ.path) < 3:
- try:
- ext = attacher_extend_circuit(state.attacher, circ,
- self.relays['current'])
- ext.addCallback(attacher_extend_circuit_done,
- state.attacher, circ,
- self.relays['current'])
- except Exception, e:
- log.err("Extend circuit failed: %s" % e)
- else:
- continue
-
- #state.callback(all)
- #self.reactor.run()
- return state
-
- def startTest(self, args):
- """
- Local override of :meth:`OONITest.startTest` to bypass calling
- self.control.
-
- :param args:
- The current line of :class:`Asset`, not used but kept for
- compatibility reasons.
- :return:
- A fired deferred which callbacks :meth:`experiment` and
- :meth:`OONITest.finished`.
- """
- self.start_time = date.now()
- self.d = self.experiment(args)
- self.d.addErrback(log.err)
- self.d.addCallbacks(self.finished, log.err)
- return self.d
-
-## So that getPlugins() can register the Test:
-#bridget = BridgetTest(None, None, None)
-
-
-## ISIS' NOTES
-## -----------
-## TODO:
-## x cleanup documentation
-## x add DataDirectory option
-## x check if bridges are public relays
-## o take bridge_desc file as input, also be able to give same
-## format as output
-## x Add asynchronous timeout for deferred, so that we don't wait
-## o Add assychronous timout for deferred, so that we don't wait
-## forever for bridges that don't work.
1
0
[ooni-probe/master] * Keeping bridge thing in the ooni/bridget directory. Some of these are
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit d257a577cdc72967076a49784deba2468057d7fb
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 10:15:05 2012 +0000
* Keeping bridge thing in the ooni/bridget directory. Some of these are
generic enough that they should be in nettests, but I remember something
about git hating on symlinks...
---
nettests/core/echo.py | 205 -----------------------------------
ooni/bridget/tests/echo.py | 205 +++++++++++++++++++++++++++++++++++
ooni/bridget/tests/tls-handshake.py | 32 ++++++
ooni/tls-handshake.py | 32 ------
4 files changed, 237 insertions(+), 237 deletions(-)
diff --git a/nettests/core/echo.py b/nettests/core/echo.py
deleted file mode 100644
index a0826b6..0000000
--- a/nettests/core/echo.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-# +---------+
-# | echo.py |
-# +---------+
-# A simple ICMP-8 ping test.
-#
-# :author: Isis Lovecruft
-# :version: 0.0.1-pre-alpha
-# :license: (c) 2012 Isis Lovecruft
-# see attached LICENCE file
-#
-
-import os
-import sys
-
-from pprint import pprint
-
-from twisted.internet import reactor
-from twisted.plugin import IPlugin
-from twisted.python import usage
-from ooni.nettest import TestCase
-from ooni.utils import log, Storage
-from ooni.utils.net import PermissionsError, IfaceError
-
-try:
- from scapy.all import sr1, IP, ICMP ## XXX v4/v6?
- from ooni.lib import txscapy
- from ooni.lib.txscapy import txsr, txsend
- from ooni.templates.scapyt import ScapyTest
-except:
- log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
-
-## xxx TODO: move these to a utility function for determining OSes
-LINUX=sys.platform.startswith("linux")
-OPENBSD=sys.platform.startswith("openbsd")
-FREEBSD=sys.platform.startswith("freebsd")
-NETBSD=sys.platform.startswith("netbsd")
-DARWIN=sys.platform.startswith("darwin")
-SOLARIS=sys.platform.startswith("sunos")
-WINDOWS=sys.platform.startswith("win32")
-
-class EchoTest(ScapyTest):
- """
- xxx fill me in
- """
- name = 'echo'
- author = 'Isis Lovecruft <isis(a)torproject.org>'
- description = 'A simple ICMP-8 test to see if a host is reachable.'
- version = '0.0.1'
- inputFile = ['file', 'f', None, 'File of list of IPs to ping']
- requirements = None
- report = Storage()
-
- optParameters = [
- ['interface', 'i', None, 'Network interface to use'],
- ['count', 'c', 5, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 'l', 25, 'Set the IP Time to Live', int],
- ['timeout', 't', 2, 'Seconds until timeout if no response', int],
- ['pcap', 'p', None, 'Save pcap to this file'],
- ['receive', 'r', True, 'Receive response packets']
- ]
-
- def setUpClass(self, *a, **kw):
- '''
- :ivar ifaces:
- Struct returned from getifaddrs(3) and turned into a tuple in the
- form (*ifa_name, AF_FAMILY, *ifa_addr)
- '''
- super(EchoTest, self).__init__(*a, **kw)
-
- ## allow subclasses which register/implement external classes
- ## to define their own reactor without overrides:
- if not hasattr(super(EchoTest, self), 'reactor'):
- log.debug("%s test: Didn't find reactor!" % self.name)
- self.reactor = reactor
-
- if self.localOptions:
- log.debug("%s localOptions found" % self.name)
- log.debug("%s test options: %s" % (self.name, self.subOptions))
- self.local_options = self.localOptions.parseOptions(self.subOptions)
- for key, value in self.local_options:
- log.debug("Set attribute %s[%s] = %s" % (self.name, key, value))
- setattr(self, key, value)
-
- ## xxx is this now .subOptions?
- #self.inputFile = self.localOptions['file']
- self.timeout *= 1000 ## convert to milliseconds
-
- if not self.interface:
- log.msg("No network interface specified!")
- log.debug("OS detected: %s" % sys.platform)
- if LINUX or OPENBSD or NETBSD or FREEBSD or DARWIN or SOLARIS:
- from twisted.internet.test import _posixifaces
- log.msg("Attempting to discover network interfaces...")
- ifaces = _posixifaces._interfaces()
- elif WINDOWS:
- from twisted.internet.test import _win32ifaces
- log.msg("Attempting to discover network interfaces...")
- ifaces = _win32ifaces._interfaces()
- else:
- log.debug("Client OS %s not accounted for!" % sys.platform)
- log.debug("Unable to discover network interfaces...")
- ifaces = [('lo', '')]
-
- ## found = {'eth0': '1.1.1.1'}
- found = [{i[0]: i[2]} for i in ifaces if i[0] != 'lo']
- log.info("Found interfaces:\n%s" % pprint(found))
- self.interfaces = self.tryInterfaces(found)
- else:
- ## xxx need a way to check that iface exists, is up, and
- ## we have permissions on it
- log.debug("Our interface has been set to %s" % self.interface)
-
- if self.pcap:
- try:
- self.pcapfile = open(self.pcap, 'a+')
- except:
- log.msg("Unable to write to pcap file %s" % self.pcap)
- self.pcapfile = None
-
- try:
- assert os.path.isfile(self.file)
- fp = open(self.file, 'r')
- except Exception, e:
- hosts = ['8.8.8.8', '38.229.72.14']
- log.err(e)
- else:
- self.inputs = self.inputProcessor(fp)
- self.removePorts(hosts)
-
- log.debug("Initialization of %s test completed with:\n%s"
- % (self.name, ''.join(self.__dict__)))
-
- @staticmethod
- def inputParser(inputs):
- log.debug("Removing possible ports from host addresses...")
- log.debug("Initial inputs:\n%s" % pprint(inputs))
-
- assert isinstance(inputs, list)
- hosts = [h.rsplit(':', 1)[0] for h in inputs]
- log.debug("Inputs converted to:\n%s" % hosts)
-
- return hosts
-
- def tryInterfaces(self, ifaces):
- try:
- from scapy.all import sr1 ## we want this check to be blocking
- except:
- log.msg("This test requires scapy: www.secdev.org/projects/scapy")
- raise SystemExit
-
- ifup = {}
- while ifaces:
- for ifname, ifaddr in ifaces:
- log.debug("Currently testing network capabilities of interface"
- + "%s by sending a packet to our address %s"
- % (ifname, ifaddr))
- try:
- pkt = IP(dst=ifaddr)/ICMP()
- ans, unans = sr(pkt, iface=ifname, timeout=self.timeout)
- except Exception, e:
- raise PermissionsError if e.find("Errno 1") else log.err(e)
- else:
- ## xxx i think this logic might be wrong
- log.debug("Interface test packet\n%s\n\n%s"
- % (pkt.summary(), pkt.show2()))
- if ans.summary():
- log.info("Received answer for test packet on interface"
- +"%s :\n%s" % (ifname, ans.summary()))
- ifup.update(ifname, ifaddr)
- else:
- log.info("Our interface test packet was unanswered:\n%s"
- % unans.summary())
-
- if len(ifup) > 0:
- log.msg("Discovered the following working network interfaces: %s"
- % ifup)
- return ifup
- else:
- raise IfaceError("Could not find a working network interface.")
-
- def buildPackets(self):
- log.debug("self.input is %s" % self.input)
- log.debug("self.hosts is %s" % self.hosts)
- for addr in self.input:
- packet = IP(dst=self.input)/ICMP()
- self.request.append(packet)
- return packet
-
- def test_icmp(self):
- if self.recieve:
- self.buildPackets()
- all = []
- for packet in self.request:
- d = self.sendReceivePackets(packets=packet)
- all.append(d)
- self.response.update({packet: d})
- d_list = defer.DeferredList(all)
- return d_list
- else:
- d = self.sendPackets()
- return d
diff --git a/ooni/bridget/tests/echo.py b/ooni/bridget/tests/echo.py
new file mode 100644
index 0000000..a0826b6
--- /dev/null
+++ b/ooni/bridget/tests/echo.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simple ICMP-8 ping test.
+#
+# :author: Isis Lovecruft
+# :version: 0.0.1-pre-alpha
+# :license: (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from pprint import pprint
+
+from twisted.internet import reactor
+from twisted.plugin import IPlugin
+from twisted.python import usage
+from ooni.nettest import TestCase
+from ooni.utils import log, Storage
+from ooni.utils.net import PermissionsError, IfaceError
+
+try:
+ from scapy.all import sr1, IP, ICMP ## XXX v4/v6?
+ from ooni.lib import txscapy
+ from ooni.lib.txscapy import txsr, txsend
+ from ooni.templates.scapyt import ScapyTest
+except:
+ log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
+
+## xxx TODO: move these to a utility function for determining OSes
+LINUX=sys.platform.startswith("linux")
+OPENBSD=sys.platform.startswith("openbsd")
+FREEBSD=sys.platform.startswith("freebsd")
+NETBSD=sys.platform.startswith("netbsd")
+DARWIN=sys.platform.startswith("darwin")
+SOLARIS=sys.platform.startswith("sunos")
+WINDOWS=sys.platform.startswith("win32")
+
+class EchoTest(ScapyTest):
+ """
+ xxx fill me in
+ """
+ name = 'echo'
+ author = 'Isis Lovecruft <isis(a)torproject.org>'
+ description = 'A simple ICMP-8 test to see if a host is reachable.'
+ version = '0.0.1'
+ inputFile = ['file', 'f', None, 'File of list of IPs to ping']
+ requirements = None
+ report = Storage()
+
+ optParameters = [
+ ['interface', 'i', None, 'Network interface to use'],
+ ['count', 'c', 5, 'Number of packets to send', int],
+ ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+ ['ttl', 'l', 25, 'Set the IP Time to Live', int],
+ ['timeout', 't', 2, 'Seconds until timeout if no response', int],
+ ['pcap', 'p', None, 'Save pcap to this file'],
+ ['receive', 'r', True, 'Receive response packets']
+ ]
+
+ def setUpClass(self, *a, **kw):
+ '''
+ :ivar ifaces:
+ Struct returned from getifaddrs(3) and turned into a tuple in the
+ form (*ifa_name, AF_FAMILY, *ifa_addr)
+ '''
+ super(EchoTest, self).__init__(*a, **kw)
+
+ ## allow subclasses which register/implement external classes
+ ## to define their own reactor without overrides:
+ if not hasattr(super(EchoTest, self), 'reactor'):
+ log.debug("%s test: Didn't find reactor!" % self.name)
+ self.reactor = reactor
+
+ if self.localOptions:
+ log.debug("%s localOptions found" % self.name)
+ log.debug("%s test options: %s" % (self.name, self.subOptions))
+ self.local_options = self.localOptions.parseOptions(self.subOptions)
+ for key, value in self.local_options:
+ log.debug("Set attribute %s[%s] = %s" % (self.name, key, value))
+ setattr(self, key, value)
+
+ ## xxx is this now .subOptions?
+ #self.inputFile = self.localOptions['file']
+ self.timeout *= 1000 ## convert to milliseconds
+
+ if not self.interface:
+ log.msg("No network interface specified!")
+ log.debug("OS detected: %s" % sys.platform)
+ if LINUX or OPENBSD or NETBSD or FREEBSD or DARWIN or SOLARIS:
+ from twisted.internet.test import _posixifaces
+ log.msg("Attempting to discover network interfaces...")
+ ifaces = _posixifaces._interfaces()
+ elif WINDOWS:
+ from twisted.internet.test import _win32ifaces
+ log.msg("Attempting to discover network interfaces...")
+ ifaces = _win32ifaces._interfaces()
+ else:
+ log.debug("Client OS %s not accounted for!" % sys.platform)
+ log.debug("Unable to discover network interfaces...")
+ ifaces = [('lo', '')]
+
+ ## found = {'eth0': '1.1.1.1'}
+ found = [{i[0]: i[2]} for i in ifaces if i[0] != 'lo']
+ log.info("Found interfaces:\n%s" % pprint(found))
+ self.interfaces = self.tryInterfaces(found)
+ else:
+ ## xxx need a way to check that iface exists, is up, and
+ ## we have permissions on it
+ log.debug("Our interface has been set to %s" % self.interface)
+
+ if self.pcap:
+ try:
+ self.pcapfile = open(self.pcap, 'a+')
+ except:
+ log.msg("Unable to write to pcap file %s" % self.pcap)
+ self.pcapfile = None
+
+ try:
+ assert os.path.isfile(self.file)
+ fp = open(self.file, 'r')
+ except Exception, e:
+ hosts = ['8.8.8.8', '38.229.72.14']
+ log.err(e)
+ else:
+ self.inputs = self.inputProcessor(fp)
+ self.removePorts(hosts)
+
+ log.debug("Initialization of %s test completed with:\n%s"
+ % (self.name, ''.join(self.__dict__)))
+
+ @staticmethod
+ def inputParser(inputs):
+ log.debug("Removing possible ports from host addresses...")
+ log.debug("Initial inputs:\n%s" % pprint(inputs))
+
+ assert isinstance(inputs, list)
+ hosts = [h.rsplit(':', 1)[0] for h in inputs]
+ log.debug("Inputs converted to:\n%s" % hosts)
+
+ return hosts
+
+ def tryInterfaces(self, ifaces):
+ try:
+ from scapy.all import sr1 ## we want this check to be blocking
+ except:
+ log.msg("This test requires scapy: www.secdev.org/projects/scapy")
+ raise SystemExit
+
+ ifup = {}
+ while ifaces:
+ for ifname, ifaddr in ifaces:
+ log.debug("Currently testing network capabilities of interface"
+ + "%s by sending a packet to our address %s"
+ % (ifname, ifaddr))
+ try:
+ pkt = IP(dst=ifaddr)/ICMP()
+ ans, unans = sr(pkt, iface=ifname, timeout=self.timeout)
+ except Exception, e:
+ raise PermissionsError if e.find("Errno 1") else log.err(e)
+ else:
+ ## xxx i think this logic might be wrong
+ log.debug("Interface test packet\n%s\n\n%s"
+ % (pkt.summary(), pkt.show2()))
+ if ans.summary():
+ log.info("Received answer for test packet on interface"
+ +"%s :\n%s" % (ifname, ans.summary()))
+ ifup.update(ifname, ifaddr)
+ else:
+ log.info("Our interface test packet was unanswered:\n%s"
+ % unans.summary())
+
+ if len(ifup) > 0:
+ log.msg("Discovered the following working network interfaces: %s"
+ % ifup)
+ return ifup
+ else:
+ raise IfaceError("Could not find a working network interface.")
+
+ def buildPackets(self):
+ log.debug("self.input is %s" % self.input)
+ log.debug("self.hosts is %s" % self.hosts)
+ for addr in self.input:
+ packet = IP(dst=self.input)/ICMP()
+ self.request.append(packet)
+ return packet
+
+ def test_icmp(self):
+ if self.recieve:
+ self.buildPackets()
+ all = []
+ for packet in self.request:
+ d = self.sendReceivePackets(packets=packet)
+ all.append(d)
+ self.response.update({packet: d})
+ d_list = defer.DeferredList(all)
+ return d_list
+ else:
+ d = self.sendPackets()
+ return d
diff --git a/ooni/bridget/tests/tls-handshake.py b/ooni/bridget/tests/tls-handshake.py
new file mode 100644
index 0000000..eba950e
--- /dev/null
+++ b/ooni/bridget/tests/tls-handshake.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+import subprocess
+from subprocess import PIPE
+serverport = "129.21.124.215:443"
+# a subset of those from firefox
+ciphers = [
+ "ECDHE-ECDSA-AES256-SHA",
+ "ECDHE-RSA-AES256-SHA",
+ "DHE-RSA-CAMELLIA256-SHA",
+ "DHE-DSS-CAMELLIA256-SHA",
+ "DHE-RSA-AES256-SHA",
+ "DHE-DSS-AES256-SHA",
+ "ECDH-ECDSA-AES256-CBC-SHA",
+ "ECDH-RSA-AES256-CBC-SHA",
+ "CAMELLIA256-SHA",
+ "AES256-SHA",
+ "ECDHE-ECDSA-RC4-SHA",
+ "ECDHE-ECDSA-AES128-SHA",
+ "ECDHE-RSA-RC4-SHA",
+ "ECDHE-RSA-AES128-SHA",
+ "DHE-RSA-CAMELLIA128-SHA",
+ "DHE-DSS-CAMELLIA128-SHA"
+]
+def checkBridgeConnection(host, port)
+ cipher_arg = ":".join(ciphers)
+ cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
+ cmd += ["-cipher", cipher_arg]
+ proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
+ out, error = proc.communicate()
+ success = "Cipher is DHE-RSA-AES256-SHA" in out
+ return success
diff --git a/ooni/tls-handshake.py b/ooni/tls-handshake.py
deleted file mode 100644
index eba950e..0000000
--- a/ooni/tls-handshake.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-import subprocess
-from subprocess import PIPE
-serverport = "129.21.124.215:443"
-# a subset of those from firefox
-ciphers = [
- "ECDHE-ECDSA-AES256-SHA",
- "ECDHE-RSA-AES256-SHA",
- "DHE-RSA-CAMELLIA256-SHA",
- "DHE-DSS-CAMELLIA256-SHA",
- "DHE-RSA-AES256-SHA",
- "DHE-DSS-AES256-SHA",
- "ECDH-ECDSA-AES256-CBC-SHA",
- "ECDH-RSA-AES256-CBC-SHA",
- "CAMELLIA256-SHA",
- "AES256-SHA",
- "ECDHE-ECDSA-RC4-SHA",
- "ECDHE-ECDSA-AES128-SHA",
- "ECDHE-RSA-RC4-SHA",
- "ECDHE-RSA-AES128-SHA",
- "DHE-RSA-CAMELLIA128-SHA",
- "DHE-DSS-CAMELLIA128-SHA"
-]
-def checkBridgeConnection(host, port)
- cipher_arg = ":".join(ciphers)
- cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
- cmd += ["-cipher", cipher_arg]
- proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
- out, error = proc.communicate()
- success = "Cipher is DHE-RSA-AES256-SHA" in out
- return success
1
0
[ooni-probe/master] * Removing old tests which have already been ported: dnstamper, echo,
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 77c07070e7e8575abc7e6b9fdeed4d7664736ec3
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 16:47:40 2012 +0000
* Removing old tests which have already been ported: dnstamper, echo,
blocking.
---
nettests/core/dnstamper.py | 29 +++--
ooni/plugins/blocking.py | 46 ------
ooni/plugins/dnstamper.py | 338 --------------------------------------------
ooni/plugins/echo.py | 127 -----------------
4 files changed, 19 insertions(+), 521 deletions(-)
diff --git a/nettests/core/dnstamper.py b/nettests/core/dnstamper.py
index b5fcea3..aad2ef3 100644
--- a/nettests/core/dnstamper.py
+++ b/nettests/core/dnstamper.py
@@ -1,6 +1,5 @@
# -*- encoding: utf-8 -*-
#
-#
# dnstamper
# *********
#
@@ -25,16 +24,13 @@ from twisted.names.error import DNSQueryRefusedError
class DNSTamperTest(nettest.TestCase):
name = "DNS tamper"
-
description = "DNS censorship detection test"
version = "0.2"
-
lookupTimeout = [1]
-
requirements = None
+
inputFile = ['file', 'f', None,
'Input file of list of hostnames to attempt to resolve']
-
optParameters = [['controlresolver', 'c', '8.8.8.8',
'Known good DNS server'],
['testresolvers', 't', None,
@@ -43,20 +39,18 @@ class DNSTamperTest(nettest.TestCase):
def setUp(self):
self.report['test_lookups'] = {}
self.report['test_reverse'] = {}
-
self.report['control_lookup'] = []
-
self.report['a_lookups'] = {}
-
self.report['tampering'] = {}
self.test_a_lookups = {}
self.control_a_lookups = []
-
self.control_reverse = None
self.test_reverse = {}
if not self.localOptions['testresolvers']:
+ log.msg("You did not specify a file of DNS servers to test!",
+ "See the '--testresolvers' option.")
self.test_resolvers = ['8.8.8.8']
return
@@ -181,6 +175,14 @@ class DNSTamperTest(nettest.TestCase):
return r
def do_reverse_lookups(self, result):
+ """
+ Take a resolved address in the form "176.139.79.178.in-addr.arpa." and
+ attempt to reverse the domain with both the control and test DNS
+ servers to see if they match.
+
+ :param result:
+ A resolved domain name.
+ """
log.msg("Doing the reverse lookups %s" % self.input)
list_of_ds = []
@@ -209,6 +211,12 @@ class DNSTamperTest(nettest.TestCase):
return dl
def compare_results(self, *arg, **kw):
+ """
+ Take the set intersection of two test result sets. If the intersection
+ is greater than zero (there are matching addresses in both sets) then
+ the no censorship is reported. Else, if no IP addresses match other
+ addresses, then we mark it as a censorship event.
+ """
log.msg("Comparing results for %s" % self.input)
log.msg(self.test_a_lookups)
@@ -222,7 +230,8 @@ class DNSTamperTest(nettest.TestCase):
# Address has not tampered with on DNS server
self.report['tampering'][test] = False
- elif self.control_reverse and set([self.control_reverse]) & set([self.report['test_reverse'][test]]):
+ elif self.control_reverse and set([self.control_reverse]) \
+ & set([self.report['test_reverse'][test]]):
# Further testing has eliminated false positives
self.report['tampering'][test] = 'reverse-match'
diff --git a/ooni/plugins/blocking.py b/ooni/plugins/blocking.py
deleted file mode 100644
index 4dd2db1..0000000
--- a/ooni/plugins/blocking.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-
-from plugoo.assets import Asset
-from plugoo.tests import ITest, OONITest
-
-class BlockingArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file'],
- ['resume', 'r', 0, 'Resume at this index'],
- ['shit', 'o', None, 'Other arguments']]
-
-class BlockingTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "blocking"
- description = "Blocking plugin"
- requirements = None
- options = BlockingArgs
- # Tells this to be blocking.
- blocking = True
-
- def control(self, experiment_result, args):
- print "Experiment Result:", experiment_result
- print "Args", args
- return experiment_result
-
- def experiment(self, args):
- import urllib
- url = 'http://torproject.org/' if not 'asset' in args else args['asset']
- try:
- req = urllib.urlopen(url)
- except:
- return {'error': 'Connection failed!'}
-
- return {'page': req.readlines()}
-
- def load_assets(self):
- if self.local_options and self.local_options['asset']:
- return {'asset': Asset(self.local_options['asset'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#blocking = BlockingTest(None, None, None)
diff --git a/ooni/plugins/dnstamper.py b/ooni/plugins/dnstamper.py
deleted file mode 100644
index 40df505..0000000
--- a/ooni/plugins/dnstamper.py
+++ /dev/null
@@ -1,338 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- dnstamper
- *********
-
- This test resolves DNS for a list of domain names, one per line, in the
- file specified in the ooni-config under the setting "dns_experiment". If
- the file is top-1m.txt, the test will be run using Amazon's list of top
- one million domains. The experimental dns servers to query should
- be specified one per line in assets/dns_servers.txt.
-
- The test reports censorship if the cardinality of the intersection of
- the query result set from the control server and the query result set
- from the experimental server is zero, which is to say, if the two sets
- have no matching results whatsoever.
-
- NOTE: This test frequently results in false positives due to GeoIP-based
- load balancing on major global sites such as google, facebook, and
- youtube, etc.
-
- :author: Isis Lovecruft, Arturo Filastò
- :license: see LICENSE for more details
-
- TODO:
- * Finish porting to twisted
- * Finish the client.Resolver() subclass and test it
- * Use the DNS tests from captiveportal
- * Use plugoo/reports.py for final data
-"""
-
-import os
-
-from twisted.names import client, dns
-from twisted.internet import reactor, defer
-from twisted.internet.error import CannotListenError
-from twisted.internet.protocol import Factory, Protocol
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from zope.interface import implements
-
-from ooni.plugoo.assets import Asset
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.utils import log
-
-class AlexaAsset(Asset):
- """
- Class for parsing the Alexa top-1m.txt as an asset.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- self = Asset.parse_line(self, line)
- return line.split(',')[1].replace('\n','')
-
-class DNSTamperArgs(usage.Options):
- optParameters = [['hostnames', 'h', None,
- 'Asset file of hostnames to resolve'],
- ['controlresolver', 'c', '8.8.8.8',
- 'Known good DNS server'],
- ['testresolvers', 't', None,
- 'Asset file of DNS servers to test'],
- ['localresolvers', 'l', False,
- 'Also test local servers'],
- ['port', 'p', None,
- 'Local UDP port to send queries over'],
- ['usereverse', 'r', False,
- 'Also try reverse DNS resolves'],
- ['resume', 's', 0,
- 'Resume at this index in the asset file']]
-
-class DNSTamperResolver(client.Resolver):
- """
- Twisted by default issues DNS queries over cryptographically random
- UDP ports to mitigate the Berstein/Kaminsky attack on limited DNS
- Transaction ID numbers.[1][2][3]
-
- This is fine, unless the client has external restrictions which require
- DNS queries to be conducted over UDP port 53. Twisted does not provide
- an easy way to change this, ergo subclassing client.Resolver.[4] It
- would perhaps be wise to patch twisted.names.client and request a merge
- into upstream.
-
- [1] https://twistedmatrix.com/trac/ticket/3342
- [2] http://blog.netherlabs.nl/articles/2008/07/09/ \
- some-thoughts-on-the-recent-dns-vulnerability
- [3] http://www.blackhat.com/presentations/bh-dc-09/Kaminsky/ \
- BlackHat-DC-09-Kaminsky-DNS-Critical-Infrastructure.pdf
- [4] http://comments.gmane.org/gmane.comp.python.twisted/22794
- """
- def __init__(self):
- super(DNSTamperResolver, self).__init__(self, resolv, servers,
- timeout, reactor)
- #client.Resolver.__init__(self)
-
- if self.local_options['port']:
- self.port = self.local_options['port']
- else:
- self.port = '53'
-
- def _connectedProtocol(self):
- """
- Return a new DNSDatagramProtocol bound to a specific port
- rather than the default cryptographically-random port.
- """
- if 'protocol' in self.__dict__:
- return self.protocol
- proto = dns.DNSDatagramProtocol(self)
-
- ## XXX We may need to remove the while loop, which was
- ## originally implemented to safeguard against attempts to
- ## bind to the same random port twice...but then the code
- ## would be blocking...
- while True:
- try:
- self._reactor.listenUDP(self.port, proto)
- except error.CannotListenError:
- pass
- else:
- return proto
-
-class DNSTamperTest(OONITest):
- """
- XXX fill me in
- """
- implements(IPlugin, ITest)
-
- shortName = "dnstamper"
- description = "DNS censorship detection test"
- requirements = None
- options = DNSTamperArgs
- blocking = False
-
- def __init__(self, local_options, global_options,
- report, ooninet=None, reactor=None):
- super(DNSTamperTest, self).__init__(local_options, global_options,
- report, ooninet, reactor)
-
- def __repr__(self):
- represent = "DNSTamperTest(OONITest): local_options=%r, " \
- "global_options=%r, assets=%r" % (self.local_options,
- self.global_options,
- self.assets)
- return represent
-
- def initialize(self):
- if self.local_options:
- ## client.createResolver() turns 'None' into '/etc/resolv.conf' on
- ## posix systems, ignored on Windows.
- if self.local_options['localresolvers']:
- self.resolvconf = None
- else:
- self.resolvconf = ''
-
- def load_assets(self):
- assets = {}
-
- #default_hostnames = ['baidu.com', 'torrentz.eu', 'twitter.com',
- # 'ooni.nu', 'google.com', 'torproject.org']
- #default_resolvers = ['209.244.0.3', '208.67.222.222']
-
- def asset_file(asset_option):
- return self.local_options[asset_option]
-
- def list_to_asset(list_):
- def next(list_):
- host = list_.pop()
- if host is not None:
- yield str(host)
- while len(list_) > 0:
- next(list_)
-
- if self.local_options:
- if asset_file('hostnames'):
- ## The default filename for the Alexa Top 1 Million:
- if asset_file('hostnames') == 'top-1m.txt':
- assets.update({'hostnames':
- AlexaAsset(asset_file('hostnames'))})
- else:
- assets.update({'hostnames':
- Asset(asset_file('hostnames'))})
- else:
- log.msg("Error! We need an asset file containing the " +
- "hostnames that we should test DNS with! Please use " +
- "the '-h' option. Using pre-defined hostnames...")
-
- if asset_file('testresolvers'):
- assets.update({'testresolvers':
- Asset(asset_file('testresolvers'))})
-
- return assets
-
- def lookup(self, hostname, resolver):
- """
- Resolves a hostname through a DNS nameserver to the corresponding IP
- addresses.
- """
- def got_result(result, hostname, resolver):
- log.msg('Resolved %s through %s to %s'
- % (hostname, resolver, result))
- report = {'resolved': True,
- 'domain': hostname,
- 'nameserver': resolver,
- 'address': result }
- log.msg(report)
- return result
-
- def got_error(err, hostname, resolver):
- log.msg(err.printTraceback())
- report = {'resolved': False,
- 'domain': hostname,
- 'nameserver': resolver,
- 'address': err }
- log.msg(report)
- return err
-
- res = client.createResolver(resolvconf=self.resolvconf,
- servers=[(resolver, 53)])
-
- ## XXX should we do self.d.addCallback(resHostByName, hostname)?
- #d = res.getHostByName(hostname)
- #d.addCallbacks(got_result, got_error)
-
- #d = defer.Deferred()
- #d.addCallback(res.getHostByName, hostname)
-
- #d = res.getHostByName(hostname)
- #d.addCallback(got_result, result, hostname, resolver)
- #d.addErrback(got_error, err, hostname, resolver)
-
- res.addCallback(getHostByName, hostname)
- res.addCallback(got_result, result, hostname, resolver)
- res.addErrback(got_error, err, hostname, resolver)
-
- if self.local_options['usereverse']:
- #d.addCallback(self.reverse_lookup, result, resolver)
- #d.addErrback(log.msg(err.printTraceback()))
-
- #d.addCallback(self.reverse_lookup, result, resolver)
- #d.addErrback(log.msg(err.printTraceback()))
-
- res.addCallback(self.reverse_lookup, result, resolver)
- res.addErraback(log.msg(err.printTraceback()))
-
- return res
-
- def reverse_lookup(self, address, resolver):
- """
- Attempt to do a reverse DNS lookup to determine if the control and exp
- sets from a positive result resolve to the same domain, in order to
- remove false positives due to GeoIP load balancing.
- """
- res = client.createResolver(resolvconf=self.resolvconf,
- servers=[(resolver, 53)])
- ptr = '.'.join(addr.split('.')[::-1]) + '.in-addr.arpa'
- reverse = res.lookupPointer(ptr)
- reverse.addCallback(lambda (address, auth, add):
- util.println(address[0].payload.name))
- reverse.addErrback(log.err)
-
- ## XXX do we need to stop the reactor?
- #d.addBoth(lambda r: reactor.stop())
-
- return reverse
-
- def experiment(self, args):
- """
- Compares the lookup() sets of the control and experiment groups.
- """
- for hostname in args:
- for testresolver in self.assets['testresolvers']:
- #addressd = defer.Deferred()
- #addressd.addCallback(self.lookup, hostname, testresolver)
- #addressd.addErrback(log.err)
-
- self.d.addCallback(self.lookup, hostname, testresolver)
- self.d.addErrback(log.err)
-
- #addressd = self.lookup(hostname, testresolver)
-
- #self.d.addCallback(self.lookup, hostname, testserver)
-
- print "%s" % type(addressd)
-
- return self.d
-
- def control(self, experiment_result, args):
- print "EXPERIMENT RESULT IS %s" % experiment_result
- (exp_address, hostname, testserver, exp_reversed) = experiment_result
- control_server = self.local_options['controlserver']
- ctrl_address = self.lookup(hostname, control_server)
-
- ## XXX getHostByName() appears to be returning only one IP...
-
- if len(set(exp_address) & set(ctrl_address)) > 0:
- log.msg("Address %s has not tampered with on DNS server %s"
- % (hostname, test_server))
- return {'hostname': hostname,
- 'test-nameserver': test_server,
- 'test-address': exp_address,
- 'control-nameserver': control_server,
- 'control-address': ctrl_address,
- 'tampering-detected': False}
- else:
- log.msg("Address %s has possibly been tampered on %s:"
- % (hostname, test_server))
- log.msg("DNS resolution through testserver %s yeilds: %s"
- % (test_server, exp_address))
- log.msg("However, DNS resolution through controlserver %s yeilds: %s"
- % (control_server, ctrl_address))
-
- if self.local_options['usereverse']:
- ctrl_reversed = self.reverse_lookup(experiment_result, control_server)
- if len(set(ctrl_reversed) & set(exp_reversed)) > 0:
- log.msg("Further testing has eliminated false positives")
- else:
- log.msg("Reverse DNS on the results returned by %s returned:"
- % (test_server))
- log.msg("%s" % exp_reversed)
- log.msg("which does not match the expected domainname: %s"
- % ctrl_reversed)
- return {'hostname': hostname,
- 'test-nameserver': test_server,
- 'test-address': exp_address,
- 'test-reversed': exp_reversed,
- 'control-nameserver': control_server,
- 'control-address': ctrl_address,
- 'control-reversed': ctrl_reversed,
- 'tampering-detected': True}
- else:
- return {'hostname': hostname,
- 'test-nameserver': test_server,
- 'test-address': exp_address,
- 'control-nameserver': control_server,
- 'control-address': ctrl_address,
- 'tampering-detected': False}
-
-#dnstamper = DNSTamperTest(None, None, None)
diff --git a/ooni/plugins/echo.py b/ooni/plugins/echo.py
deleted file mode 100644
index bc1b2a8..0000000
--- a/ooni/plugins/echo.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-# +---------+
-# | echo.py |
-# +---------+
-# A simply ICMP-8 ping test.
-#
-# :author: Isis Lovecruft
-# :version: 0.1.0-pre-alpha
-# :license: (c) 2012 Isis Lovecruft
-# see attached LICENCE file
-#
-
-import os
-import sys
-
-from twisted.plugin import IPlugin
-from twisted.python import usage
-from zope.interface import implements
-
-from lib import txscapy
-from utils import log
-from plugoo.assets import Asset
-from plugoo.interface import ITest
-from protocols.scapyproto import ScapyTest
-
-class EchoOptions(usage.Options):
- optParameters = [
- ['interface', 'i', None, 'Network interface to use'],
- ['destination', 'd', None, 'File of hosts to ping'],
- ['count', 'c', 5, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 't', 25, 'Set the IP Time to Live', int],
- ]
- optFlags = []
-
-class EchoAsset(Asset):
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- if line.startswith('#'):
- return
- else:
- return line.replace('\n', '')
-
-class EchoTest(ScapyTest):
- implements(IPlugin, ITest)
-
- shortName = 'echo'
- description = 'A simple ICMP-8 test to check if a host is reachable'
- options = EchoOptions
- requirements = None
- blocking = False
-
- pcap_file = 'echo.pcap'
- receive = True
-
- def initialize(self):
- self.request = {}
- self.response = {}
-
- if self.local_options:
-
- options = self.local_options
-
- if options['interface']:
- self.interface = options['interface']
-
- if options['count']:
- ## there's a Counter() somewhere, use it
- self.count = options['count']
-
- if options['size']:
- self.size = options['size']
-
- if options['ttl']:
- self.ttl = options['ttl']
-
- def load_assets(self):
- assets = {}
- option = self.local_options
-
- if option and option['destination']:
-
- try:
- from scapy.all import IP
- except:
- log.err()
-
- if os.path.isfile(option['destination']):
- with open(option['destination']) as hosts:
- for line in hosts.readlines():
- assets.update({'host': EchoAsset(line)})
- else:
- while type(options['destination']) is str:
- try:
- IP(options['destination'])
- except:
- log.err()
- break
- assets.update({'host': options['destination']})
- else:
- log.msg("Couldn't understand destination option...")
- log.msg("Give one IPv4 address, or a file with one address per line.")
- return assets
-
- def experiment(self, args):
- if len(args) == 0:
- log.err("Error: We're Echo, not Narcissus!")
- log.err(" Provide a list of hosts to ping...")
- d = sys.exit(1)
- return d
-
- ## XXX v4 / v6
- from scapy.all import ICMP, IP, sr
- ping = sr(IP(dst=args)/ICMP())
- if ping:
- self.response.update(ping.show())
- else:
- log.msg('No response received from %s' % args)
-
- def control(self, *args):
- pass
-
-echo = EchoTest(None, None, None)
1
0
[ooni-probe/master] * Moving tests which are not ported to the new API to the top level
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 8894f057967a779875c8cb5b9c00971408d08fc9
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 17:03:33 2012 +0000
* Moving tests which are not ported to the new API to the top level
old-to-be-ported/ directory.
---
ooni/example_plugins/examplescapy.py | 49 --------
ooni/example_plugins/skel.py | 29 -----
ooni/hack_this/TO_BE_PORTED | 14 --
ooni/hack_this/dnstamper.py | 200 -------------------------------
ooni/hack_this/tcpscan.py | 84 -------------
ooni/hack_this/traceroute.py | 108 -----------------
ooni/plugins/TESTS_ARE_MOVING.txt | 8 --
ooni/plugins/chinatrigger.py | 140 ----------------------
ooni/plugins/daphn3.py | 152 ------------------------
ooni/plugins/domclass.py | 216 ----------------------------------
ooni/plugins/httpt.py | 94 ---------------
ooni/plugins/tcpconnect.py | 65 ----------
12 files changed, 0 insertions(+), 1159 deletions(-)
diff --git a/ooni/example_plugins/examplescapy.py b/ooni/example_plugins/examplescapy.py
deleted file mode 100644
index 21a919d..0000000
--- a/ooni/example_plugins/examplescapy.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import random
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, defer
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.scapyproto import ScapyTest
-
-from ooni.lib.txscapy import txsr, txsend
-
-class scapyArgs(usage.Options):
- optParameters = []
-
-class ExampleScapyTest(ScapyTest):
- """
- An example of writing a scapy Test
- """
- implements(IPlugin, ITest)
-
- shortName = "example_scapy"
- description = "An example of a scapy test"
- requirements = None
- options = scapyArgs
- blocking = False
-
- receive = True
- pcapfile = 'example_scapy.pcap'
- def initialize(self, reactor=None):
- if not self.reactor:
- from twisted.internet import reactor
- self.reactor = reactor
-
- self.request = {}
- self.response = {}
-
- def build_packets(self):
- """
- Override this method to build scapy packets.
- """
- from scapy.all import IP, TCP
- return IP()/TCP()
-
- def load_assets(self):
- return {}
-
-examplescapy = ExampleScapyTest(None, None, None)
-
diff --git a/ooni/example_plugins/skel.py b/ooni/example_plugins/skel.py
deleted file mode 100644
index 5f46620..0000000
--- a/ooni/example_plugins/skel.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from plugoo.tests import ITest, TwistedTest
-import log
-
-class SkelArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file'],
- ['resume', 'r', 0, 'Resume at this index'],
- ['other', 'o', None, 'Other arguments']]
-
-class SkelTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "skeleton"
- description = "Skeleton plugin"
- requirements = None
- options = SkelArgs
- blocking = False
-
- def load_assets(self):
- if self.local_options:
- return {'asset': open(self.local_options['asset'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-skel = SkelTest(None, None, None)
diff --git a/ooni/hack_this/TO_BE_PORTED b/ooni/hack_this/TO_BE_PORTED
deleted file mode 100644
index 49ce5e0..0000000
--- a/ooni/hack_this/TO_BE_PORTED
+++ /dev/null
@@ -1,14 +0,0 @@
-
-The tests in this directory are very old, and have neither been ported to
-Twisted, nor to the new twisted.trial API framework. Although, they are not
-old in the sense of the *seriously old* OONI code which was written two years
-ago.
-
-These tests should be updated at least to use Twisted.
-
-If you want to hack on something care free, feel free to mess with these files
-because it would be difficult to not improve on them.
-
-<(A)3
-isis
-0x2cdb8b35
diff --git a/ooni/hack_this/dnstamper.py b/ooni/hack_this/dnstamper.py
deleted file mode 100644
index d6f87a6..0000000
--- a/ooni/hack_this/dnstamper.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- dnstamper
- *********
-
- This test resolves DNS for a list of domain names, one per line, in the
- file specified in the ooni-config under the setting "dns_experiment". If
- the file is top-1m.txt, the test will be run using Amazon's list of top
- one million domains. The experimental dns servers to query should
- be specified one per line in assets/dns_servers.txt.
-
- The test reports censorship if the cardinality of the intersection of
- the query result set from the control server and the query result set
- from the experimental server is zero, which is to say, if the two sets
- have no matching results whatsoever.
-
- NOTE: This test frequently results in false positives due to GeoIP-based
- load balancing on major global sites such as google, facebook, and
- youtube, etc.
-
- :copyright: (c) 2012 Arturo Filastò, Isis Lovecruft
- :license: see LICENSE for more details
-
- TODO:
- * Switch to using Twisted's DNS builtins instead of dnspython
- *
-"""
-
-import os
-
-from twisted.names import client
-from twisted.internet import reactor
-from twisted.internet.protocol import Factory, Protocol
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from zope.interface import implements
-
-from ooni.plugoo.assets import Asset
-from ooni.plugoo.tests import ITest, OONITest
-from ooni import log
-
-class Top1MAsset(Asset):
- """
- Class for parsing the Alexa top-1m.txt as an asset.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- self = Asset.parse_line(self, line)
- return line.split(',')[1].replace('\n','')
-
-class DNSTamperAsset(Asset):
- """
- Creates DNS testing specific Assets.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-class DNSTamperArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file of hostnames to resolve'],
- ['controlserver', 'c', '8.8.8.8', 'Known good DNS server'],
- ['testservers', 't', None, 'Asset file of the DNS servers to test'],
- ['resume', 'r', 0, 'Resume at this index in the asset file']]
-'''
- def control(self, experiment_result, args):
- print "Experiment Result:", experiment_result
- print "Args", args
- return experiment_result
-
- def experiment(self, args):
-'''
-
-class DNSTamperTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "DNSTamper"
- description = "DNS censorship detection test"
- requirements = None
- options = DNSTamperArgs
- blocking = False
-
- def load_assets(self):
- if self.local_options:
- if self.local_options['asset']:
- assetf = self.local_options['asset']
- if assetf == 'top-1m.txt':
- return {'asset': Top1MAsset(assetf)}
- else:
- return {'asset': DNSTamperAsset(assetf)}
- else:
- return {}
-
- def lookup(self, hostname, nameserver):
- """
- Resolves a hostname through a DNS nameserver to the corresponding
- IP addresses.
- """
- def got_result(result):
- #self.logger.log(result)
- print result
- reactor.stop()
-
- def got_failure(failure):
- failure.printTraceback()
- reactor.stop()
-
- res = client.createResolver(servers=[(nameserver, 53)])
- d = res.getHostByName(hostname)
- d.addCallbacks(got_result, got_failure)
-
- ## XXX MAY ALSO BE:
- #answer = res.getAddress(servers=[('nameserver', 53)])
-
- ret = []
-
- for data in answer:
- ret.append(data.address)
-
- return ret
-
- def reverse_lookup(self, ip, nameserver):
- """
- Attempt to do a reverse DNS lookup to determine if the control and exp
- sets from a positive result resolve to the same domain, in order to
- remove false positives due to GeoIP load balancing.
- """
- res = client.createResolver(servers=nameserver)
- n = reversename.from_address(ip)
- revn = res.query(n, "PTR").__iter__().next().to_text()[:-1]
-
- return revn
-
- def experiment(self, *a, **kw):
- """
- Compares the lookup() sets of the control and experiment groups.
- """
- # this is just a dirty hack
- address = kw['data'][0]
- ns = kw['data'][1]
-
- config = self.config
- ctrl_ns = config.tests.dns_control_server
-
- print "ADDRESS: %s" % address
- print "NAMESERVER: %s" % ns
-
- exp = self.lookup(address, ns)
- control = self.lookup(address, ctrl_ns)
-
- result = []
-
- if len(set(exp) & set(control)) > 0:
- print "Address %s has not tampered with on DNS server %s\n" % (address, ns)
- result = (address, ns, exp, control, False)
- return result
- else:
- print "Address %s has possibly been tampered on %s:\nDNS resolution through %s yeilds:\n%s\nAlthough the control group DNS servers resolve to:\n%s" % (address, ns, ns, exp, control)
- result = (address, ns, exp, control, True)
-
- if config.tests.dns_reverse_lookup:
-
- exprevn = [self.reverse_lookup(ip, ns) for ip in exp]
- ctrlrevn = [self.reverse_lookup(ip, ctrl_ns)
- for ip in control]
-
- if len(set(exprevn) & set(ctrlrevn)) > 0:
- print "Further testing has eliminated this as a false positive."
- else:
- print "Reverse DNS on the results returned by %s returned:\n%s\nWhich does not match the expected domainname:\n%s\n" % (ns, exprevn, ctrlrevn)
- return result
-
- else:
- print "\n"
- return result
-
-#def run(ooni):
-# """
-# Run the test.
-# """
-# config = ooni.config
-# urls = []
-#
-# if (config.tests.dns_experiment == "top-1m.txt"):
-# dns_experiment = Top1MAsset(os.path.join(config.main.assetdir,
-# config.tests.dns_experiment))
-# else:
-# dns_experiment = DNSTAsset(os.path.join(config.main.assetdir,
-# config.tests.dns_experiment))
-# dns_experiment_dns = DNSTAsset(os.path.join(config.main.assetdir,
-# config.tests.dns_experiment_dns))
-#
-# assets = [dns_experiment, dns_experiment_dns]
-#
-# dnstest = DNST(ooni)
-# ooni.logger.info("Beginning dnstamper test...")
-# dnstest.run(assets, {'index': 1})
-# ooni.logger.info("Dnstamper test completed!")
-
-dnstamper = DNSTamperTest(None, None, None)
diff --git a/ooni/hack_this/tcpscan.py b/ooni/hack_this/tcpscan.py
deleted file mode 100644
index b371c88..0000000
--- a/ooni/hack_this/tcpscan.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
- TCP Port Scanner
- ****************
-
- Does a TCP connect scan on the IP:port pairs.
-
-"""
-import os
-from gevent import socket
-from datetime import datetime
-import socks
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-__plugoo__ = "TCP Port Scanner"
-__desc__ = "This a test template to be used to build your own tests"
-
-class TCPScanAsset(Asset):
- """
- This is the asset that should be used by the Test. It will
- contain all the code responsible for parsing the asset file
- and should be passed on instantiation to the test.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-
-class TCPScan(Test):
- """
- The main Test class
- """
-
- def experiment(self, *a, **kw):
- """
- Fill this up with the tasks that should be performed
- on the "dirty" network and should be compared with the
- control.
- """
- addr = kw['data']
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- res = False
- try:
- self.logger.debug('Doing a connection to %s' % addr)
- s.connect((addr.split(':')[0], int(addr.split(':')[1])))
- res = True
- except socket.error, msg:
- self.logger.debug('Connection failed to %s: %s' % (addr, msg))
-
- finally:
- s.close()
-
- return {'Time': datetime.now(),
- 'Address': addr,
- 'Status': res}
-
- def control(self):
- """
- Fill this up with the control related code.
- """
- return True
-
-def run(ooni, asset=None):
- """
- This is the function that will be called by OONI
- and it is responsible for instantiating and passing
- the arguments to the Test class.
- """
- config = ooni.config
-
- # This the assets array to be passed to the run function of
- # the test
- if asset:
- assets = [TCPScanAsset(asset)]
- else:
- assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
- "tcpscan.txt"))]
-
- # Instantiate the Test
- thetest = TCPScan(ooni)
- ooni.logger.info("starting TCP Scan...")
- # Run the test with argument assets
- thetest.run(assets)
- ooni.logger.info("finished.")
diff --git a/ooni/hack_this/traceroute.py b/ooni/hack_this/traceroute.py
deleted file mode 100644
index e8252c1..0000000
--- a/ooni/hack_this/traceroute.py
+++ /dev/null
@@ -1,108 +0,0 @@
-try:
- from dns import resolver
-except:
- print "Error: dnspython is not installed (http://www.dnspython.org/)"
-import gevent
-import os
-import plugoo
-
-try:
- import scapy
-except:
- print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-import socket
-
-__plugoo__ = "Traceroute"
-__desc__ = "Performs TTL walking tests"
-
-class TracerouteAsset(Asset):
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-
-class Traceroute(Test):
- """A *very* quick and dirty traceroute implementation, UDP and TCP
- """
- def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
- dest_addr = socket.gethostbyname(dst)
- print "Doing traceroute on %s" % dst
-
- recv = socket.getprotobyname('icmp')
- send = socket.getprotobyname(proto)
- ttl = 1
- while True:
- recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
- if proto == "tcp":
- send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
- else:
- send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
- recv_sock.settimeout(10)
- send_sock.settimeout(10)
-
- send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
- recv_sock.bind(("", src_port))
- if proto == "tcp":
- try:
- send_sock.settimeout(2)
- send_sock.connect((dst, dst_port))
- except socket.timeout:
- pass
-
- except Exception, e:
- print "Error doing connect %s" % e
- else:
- send_sock.sendto("", (dst, dst_port))
-
- curr_addr = None
- try:
- print "receiving data..."
- _, curr_addr = recv_sock.recvfrom(512)
- curr_addr = curr_addr[0]
-
- except socket.error, e:
- print "SOCKET ERROR: %s" % e
-
- except Exception, e:
- print "ERROR: %s" % e
-
- finally:
- send_sock.close()
- recv_sock.close()
-
- if curr_addr is not None:
- curr_host = "%s" % curr_addr
- else:
- curr_host = "*"
-
- print "%d\t%s" % (ttl, curr_host)
-
- if curr_addr == dest_addr or ttl > max_hops:
- break
-
- ttl += 1
-
-
- def experiment(self, *a, **kw):
- # this is just a dirty hack
- address = kw['data'][0]
-
- self.traceroute(address)
-
-def run(ooni):
- """Run the test"""
- config = ooni.config
- urls = []
-
- traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
- config.tests.traceroute))
-
- assets = [traceroute_experiment]
-
- traceroute = Traceroute(ooni)
- ooni.logger.info("starting traceroute test")
- traceroute.run(assets)
- ooni.logger.info("finished")
diff --git a/ooni/plugins/TESTS_ARE_MOVING.txt b/ooni/plugins/TESTS_ARE_MOVING.txt
deleted file mode 100644
index f4c0084..0000000
--- a/ooni/plugins/TESTS_ARE_MOVING.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-7/10/2012
-
-All new tests will be moved to the directory /nettests/.
-
-Tests that are in this directory are either here for historical reasons or have
-not yet been properly tested and fully supporting the new API.
-
-A.
diff --git a/ooni/plugins/chinatrigger.py b/ooni/plugins/chinatrigger.py
deleted file mode 100644
index cf4bcb3..0000000
--- a/ooni/plugins/chinatrigger.py
+++ /dev/null
@@ -1,140 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, defer
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.scapyproto import ScapyTest
-
-from ooni.lib.txscapy import txsr, txsend
-
-class scapyArgs(usage.Options):
- optParameters = [['dst', 'd', None, 'Specify the target address'],
- ['port', 'p', None, 'Specify the target port'],
- ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
- ]
-
-class ChinaTriggerTest(ScapyTest):
- """
- This test is a OONI based implementation of the C tool written
- by Philipp Winter to engage chinese probes in active scanning.
-
- Example of running it:
- ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
- """
- implements(IPlugin, ITest)
-
- shortName = "chinatrigger"
- description = "Triggers the chinese probes into scanning"
- requirements = ['root']
- options = scapyArgs
- blocking = False
-
- receive = True
- pcapfile = 'example_scapy.pcap'
- timeout = 5
-
- def initialize(self, reactor=None):
- if not self.reactor:
- from twisted.internet import reactor
- self.reactor = reactor
-
- @staticmethod
- def set_random_servername(pkt):
- ret = pkt[:121]
- for i in range(16):
- ret += random.choice(string.ascii_lowercase)
- ret += pkt[121+16:]
- return ret
-
- @staticmethod
- def set_random_time(pkt):
- ret = pkt[:11]
- ret += struct.pack('!I', int(time.time()))
- ret += pkt[11+4:]
- return ret
-
- @staticmethod
- def set_random_field(pkt):
- ret = pkt[:15]
- for i in range(28):
- ret += chr(random.randint(0, 256))
- ret += pkt[15+28:]
- return ret
-
- @staticmethod
- def mutate(pkt, idx):
- """
- Slightly changed mutate function.
- """
- ret = pkt[:idx-1]
- mutation = chr(random.randint(0, 256))
- while mutation == pkt[idx]:
- mutation = chr(random.randint(0, 256))
- ret += mutation
- ret += pkt[idx:]
- return ret
-
- @staticmethod
- def set_all_random_fields(pkt):
- pkt = ChinaTriggerTest.set_random_servername(pkt)
- pkt = ChinaTriggerTest.set_random_time(pkt)
- pkt = ChinaTriggerTest.set_random_field(pkt)
- return pkt
-
- def build_packets(self, *args, **kw):
- """
- Override this method to build scapy packets.
- """
- from scapy.all import IP, TCP
- pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
- "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
- "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
- "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
- "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
- "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
- "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
- "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
- "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
- "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
- "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
- "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
- "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
- "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
- "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
- "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
- "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
- "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
- "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
- "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
- "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
- "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
- "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
- "\x00\x00"
-
- pkt = ChinaTriggerTest.set_all_random_fields(pkt)
- pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
- for x in range(len(pkt)):
- mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
- pkts.append(mutation)
- return pkts
-
- def load_assets(self):
- if self.local_options:
- self.dst = self.local_options['dst']
- self.port = int(self.local_options['port'])
- if self.local_options['pcap']:
- self.pcapfile = self.local_options['pcap']
- if not self.port or not self.dst:
- pass
-
- return {}
-
-#chinatrigger = ChinaTriggerTest(None, None, None)
-
diff --git a/ooni/plugins/daphn3.py b/ooni/plugins/daphn3.py
deleted file mode 100644
index bf4d60d..0000000
--- a/ooni/plugins/daphn3.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, endpoints
-
-from ooni.plugoo import reports
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
- def connectionMade(self):
- self.next_state()
-
-class Daphn3ClientFactory(protocol.ClientFactory):
- protocol = Daphn3ClientProtocol
- mutator = None
- steps = None
- test = None
-
- def buildProtocol(self, addr):
- p = self.protocol()
- p.factory = self
- p.test = self.test
-
- if self.steps:
- p.steps = self.steps
-
- if not self.mutator:
- self.mutator = daphn3.Mutator(p.steps)
-
- else:
- print "Moving on to next mutation"
- self.mutator.next()
-
- p.mutator = self.mutator
- p.current_state = self.mutator.state()
- return p
-
- def clientConnectionFailed(self, reason):
- print "We failed connecting the the OONIB"
- print "Cannot perform test. Perhaps it got blocked?"
- print "Please report this to tor-assistants(a)torproject.org"
- self.test.result['error'] = ('Failed in connecting to OONIB', reason)
- self.test.end(d)
-
- def clientConnectionLost(self, reason):
- print "Connection Lost."
-
-class daphn3Args(usage.Options):
- optParameters = [['pcap', 'f', None,
- 'PCAP to read for generating the YAML output'],
-
- ['output', 'o', 'daphn3.yaml',
- 'What file should be written'],
-
- ['yaml', 'y', None,
- 'The input file to the test'],
-
- ['host', 'h', None, 'Target Hostname'],
- ['port', 'p', None, 'Target port number'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-class daphn3Test(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "daphn3"
- description = "daphn3"
- requirements = None
- options = daphn3Args
- blocking = False
-
- local_options = None
-
- steps = None
-
- def initialize(self):
- if not self.local_options:
- self.end()
- return
-
- self.factory = Daphn3ClientFactory()
- self.factory.test = self
-
- if self.local_options['pcap']:
- self.tool = True
-
- elif self.local_options['yaml']:
- self.steps = daphn3.read_yaml(self.local_options['yaml'])
-
- else:
- log.msg("Not enough inputs specified to the test")
- self.end()
-
- def runTool(self):
- import yaml
- pcap = daphn3.read_pcap(self.local_options['pcap'])
- f = open(self.local_options['output'], 'w')
- f.write(yaml.dump(pcap))
- f.close()
-
- def control(self, exp_res, args):
- try:
- mutation = self.factory.mutator.get(0)
- self.result['censored'] = False
- except:
- mutation = None
-
- return {'mutation_number': args['mutation'],
- 'value': mutation}
-
- def _failure(self, *argc, **kw):
- self.result['censored'] = True
- self.result['error'] = ('Failed in connecting', (argc, kw))
- self.end()
-
- def experiment(self, args):
- log.msg("Doing mutation %s" % args['mutation'])
- self.factory.steps = self.steps
- host = self.local_options['host']
- port = int(self.local_options['port'])
- log.msg("Connecting to %s:%s" % (host, port))
-
- if self.ended:
- return
-
- endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
- d = endpoint.connect(self.factory)
- d.addErrback(self._failure)
- return d
-
- def load_assets(self):
- if not self.local_options:
- return {}
- if not self.steps:
- print "Error: No assets!"
- self.end()
- return {}
- mutations = 0
- for x in self.steps:
- mutations += len(x['data'])
- return {'mutation': range(mutations)}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#daphn3test = daphn3Test(None, None, None)
diff --git a/ooni/plugins/domclass.py b/ooni/plugins/domclass.py
deleted file mode 100644
index 3080c40..0000000
--- a/ooni/plugins/domclass.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/usr/bin/env python
-#-*- encoding: utf-8 -*-
-#
-# domclass
-# ********
-#
-# :copyright: (c) 2012 by Arturo Filastò
-# :license: see LICENSE for more details.
-#
-# how this works
-# --------------
-#
-# This classifier uses the DOM structure of a website to determine how similar
-# the two sites are.
-# The procedure we use is the following:
-# * First we parse all the DOM tree of the web page and we build a list of
-# TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
-# (html, a), (a, b), (html, c)).
-#
-# * We then use this information to build a matrix (M) where m[i][j] = P(of
-# transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
-# Note: M is a square matrix that is number_of_tags wide.
-#
-# * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
-#
-# * The corelation between page A and B is given via this formula:
-# correlation = dot_product(e_A, e_B), where e_A and e_B are
-# resepectively the eigenvalues for the probability matrix A and the
-# probability matrix B.
-#
-
-try:
- import numpy
-except:
- print "Error numpy not installed!"
-
-import yaml
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.http import HTTPTest
-
-class domclassArgs(usage.Options):
- optParameters = [['output', 'o', None, 'Output to write'],
- ['file', 'f', None, 'Corpus file'],
- ['fileb', 'b', None, 'Corpus file'],
- ['urls', 'u', None, 'URL List'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-# All HTML4 tags
-# XXX add link to W3C page where these came from
-alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
- 'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
- 'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
- 'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
- 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
- 'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
- 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
- 'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
- 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
- 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
-
-# Reduced subset of only the most common tags
-commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
- 'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
- 'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
- 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
- 'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
- 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
- 'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
- 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
- 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
-
-# The tags we are intested in using for our analysis
-thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
- 'H3', 'H4', 'IFRAME ', 'INPUT',
- 'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
- 'STYLE', 'TR']
-
-def compute_probability_matrix(dataset):
- """
- Compute the probability matrix based on the input dataset.
-
- :dataset: an array of pairs representing the parent child relationships.
- """
- import itertools
- ret = {}
- matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
-
- for data in dataset:
- x = data[0].upper()
- y = data[1].upper()
- try:
- x = thetags.index(x)
- except:
- x = len(thetags)
-
- try:
- y = thetags.index(y)
- except:
- y = len(thetags)
-
- matrix[x,y] += 1
-
- for x in xrange(len(thetags) + 1):
- possibilities = 0
- for y in matrix[x]:
- possibilities += y
-
- for i in xrange(len(matrix[x])):
- if possibilities != 0:
- matrix[x][i] = matrix[x][i]/possibilities
-
- return matrix
-
-def compute_eigenvalues(matrix):
- """
- Returns the eigenvalues of the supplied square matrix.
-
- :matrix: must be a square matrix and diagonalizable.
- """
- return numpy.linalg.eigvals(matrix)
-
-def readDOM(content=None, filename=None):
- """
- Parses the DOM of the HTML page and returns an array of parent, child
- pairs.
-
- :content: the content of the HTML page to be read.
-
- :filename: the filename to be read from for getting the content of the
- page.
- """
- from bs4 import BeautifulSoup
-
- if filename:
- f = open(filename)
- content = ''.join(f.readlines())
- f.close()
-
- dom = BeautifulSoup(content)
- couples = []
- for x in dom.findAll():
- couples.append((str(x.parent.name), str(x.name)))
-
- return couples
-
-class domclassTest(HTTPTest):
- implements(IPlugin, ITest)
-
- shortName = "domclass"
- description = "domclass"
- requirements = None
- options = domclassArgs
- blocking = False
-
- follow_redirects = True
- #tool = True
-
- def runTool(self):
- site_a = readDOM(filename=self.local_options['file'])
- site_b = readDOM(filename=self.local_options['fileb'])
- a = {}
- a['matrix'] = compute_probability_matrix(site_a)
- a['eigen'] = compute_eigenvalues(a['matrix'])
-
- self.result['eigenvalues'] = a['eigen']
- b = {}
- b['matrix'] = compute_probability_matrix(site_b)
- b['eigen'] = compute_eigenvalues(b['matrix'])
-
- #print "A: %s" % a
- #print "B: %s" % b
- correlation = numpy.vdot(a['eigen'],b['eigen'])
- correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
- correlation = (correlation + 1)/2
- print "Corelation: %s" % correlation
- self.end()
- return a
-
- def processResponseBody(self, data):
- site_a = readDOM(data)
- #site_b = readDOM(self.local_options['fileb'])
- a = {}
- a['matrix'] = compute_probability_matrix(site_a)
- a['eigen'] = compute_eigenvalues(a['matrix'])
-
-
- if len(data) == 0:
- self.result['eigenvalues'] = None
- self.result['matrix'] = None
- else:
- self.result['eigenvalues'] = a['eigen']
- #self.result['matrix'] = a['matrix']
- #self.result['content'] = data[:200]
- #b = compute_matrix(site_b)
- print "A: %s" % a
- return a['eigen']
-
- def load_assets(self):
- if self.local_options:
- if self.local_options['file']:
- self.tool = True
- return {}
- elif self.local_options['urls']:
- return {'url': Asset(self.local_options['urls'])}
- else:
- self.end()
- return {}
- else:
- return {}
-
-#domclass = domclassTest(None, None, None)
diff --git a/ooni/plugins/httpt.py b/ooni/plugins/httpt.py
deleted file mode 100644
index 358f1ea..0000000
--- a/ooni/plugins/httpt.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import http
-from ooni.utils import log
-
-class httptArgs(usage.Options):
- optParameters = [['urls', 'f', None, 'Urls file'],
- ['url', 'u', 'http://torproject.org/', 'Test single site'],
- ['resume', 'r', 0, 'Resume at this index'],
- ['rules', 'y', None, 'Specify the redirect rules file']]
-
-class httptTest(http.HTTPTest):
- implements(IPlugin, ITest)
-
- shortName = "httpt"
- description = "httpt"
- requirements = None
- options = httptArgs
- blocking = False
-
-
- def testPattern(self, value, pattern, type):
- if type == 'eq':
- return value == pattern
- elif type == 're':
- import re
- if re.match(pattern, value):
- return True
- else:
- return False
- else:
- return None
-
- def testPatterns(self, patterns, location):
- test_result = False
-
- if type(patterns) == list:
- for pattern in patterns:
- test_result |= self.testPattern(location, pattern['value'], pattern['type'])
- else:
- test_result |= self.testPattern(location, patterns['value'], patterns['type'])
-
- return test_result
-
- def testRules(self, rules, location):
- result = {}
- blocked = False
- for rule, value in rules.items():
- current_rule = {}
- current_rule['name'] = value['name']
- current_rule['patterns'] = value['patterns']
- current_rule['test'] = self.testPatterns(value['patterns'], location)
- blocked |= current_rule['test']
- result[rule] = current_rule
- result['blocked'] = blocked
- return result
-
- def processRedirect(self, location):
- self.result['redirect'] = None
- try:
- rules_file = self.local_options['rules']
- import yaml
- rules = yaml.load(open(rules_file))
- log.msg("Testing rules %s" % rules)
- redirect = self.testRules(rules, location)
- self.result['redirect'] = redirect
- except TypeError:
- log.msg("No rules file. Got a redirect, but nothing to do.")
-
-
- def control(self, experiment_result, args):
- print self.response
- print self.request
- # What you return here ends up inside of the report.
- log.msg("Running control")
- return {}
-
- def load_assets(self):
- if self.local_options and self.local_options['urls']:
- return {'url': Asset(self.local_options['urls'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#httpt = httptTest(None, None, None)
diff --git a/ooni/plugins/tcpconnect.py b/ooni/plugins/tcpconnect.py
deleted file mode 100644
index 7758a9e..0000000
--- a/ooni/plugins/tcpconnect.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from ooni.plugoo.interface import ITest
-from ooni.plugoo.tests import OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-
-class tcpconnectArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-class tcpconnectTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "tcpconnect"
- description = "tcpconnect"
- requirements = None
- options = tcpconnectArgs
- blocking = False
-
- def experiment(self, args):
- try:
- host, port = args['asset'].split(':')
- except:
- raise Exception("Error in parsing asset. Wrong format?")
- class DummyFactory(Factory):
- def buildProtocol(self, addr):
- return Protocol()
-
- def gotProtocol(p):
- p.transport.loseConnection()
- log.msg("Got a connection!")
- log.msg(str(p))
- return {'result': True, 'target': [host, port]}
-
- def gotError(err):
- log.msg("Had error :(")
- log.msg(err)
- return {'result': False, 'target': [host, port]}
-
- # What you return here gets handed as input to control
- point = TCP4ClientEndpoint(self.reactor, host, int(port))
- d = point.connect(DummyFactory())
- d.addCallback(gotProtocol)
- d.addErrback(gotError)
- return d
-
- def load_assets(self):
- if self.local_options:
- return {'asset': Asset(self.local_options['asset'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#tcpconnect = tcpconnectTest(None, None, None)
1
0
03 Nov '12
commit 3350885b7d3c24795b5ccf9e1fbeee379ebcecd0
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sat Nov 3 01:19:53 2012 +0000
* Updated the TODO file. PLEASE READ IT.
---
TODO | 110 ++++++++++++++++++++++++++++++++++++++++++++++++++++++------------
1 files changed, 90 insertions(+), 20 deletions(-)
diff --git a/TODO b/TODO
index 2686ef7..63d950c 100644
--- a/TODO
+++ b/TODO
@@ -1,26 +1,12 @@
This is a list of things to be done on ooni-probe.
-Once you have completed something you should add a
-note to this file stating what you have done under
-the item.
+Once you have completed something you should add a brief note to this file
+stating what you have done under the item. If you discover needed tasks, feel
+free to add them, but also keep in mind that OONI is mostly using the Tor Trac
+instance, and the main ticket for OONI which all tests should be organized
+under is here:
-Migrate code from old
----------------------
-
-Migrate all the interesting parts of the old code to the new.
-
-It's important to make the new code asych and based on Twisted.
-It should respect the design goals of the new ooni-probe model.
-
-New things to develop
----------------------
-
-These are either components specific to the new refactor of ooni
-or that we haven't yet figured out how they should work.
-
-* Design and implement the Node Factory
-
-* Design and implement the Network Node and the Code Exec node classes
+ https://trac.torproject.org/projects/tor/ticket/5869
New things to test
------------------
@@ -36,3 +22,87 @@ New things to test
nowhere is this presented to someone trying to run a test. So, the informing
users/testers bit can be worked on, and the testing. Obviously we're going
to want something more robust that a 20 LOC Makefile pretty fast.
+
+Finalization of API design
+--------------------------
+
+* The nettest.TestCase should have an interface.
+
+ I know that there is a push away from using zope.interfaces, but I think
+ it is actually *highly* necessary for ensuring that subclasses implement
+ the required functions, and also that they do not improperly override
+ necessary functions, for them to run.
+
+ Personally, I am quite annoyed when I subclass a class from Twisted and
+ override a public method, and it breaks things (when nothing in their
+ documentation informed me that it would break things) and I have to spend
+ half an hour digging through their code to figure out precisely what is
+ needed externally from the function I'm overriding. Others should not have
+ to do this with our code.
+
+* The nettest.TestCase should have a twisted.python.usage.Options subclass and
+ interface as well, even if the instantiation of that subclass is handled by
+ the ooni.oonicli or the ooni.runner. There is more functionality to
+ usage.Options that we should expose than merely "optParameters", for
+ instance the "coerceOptions" parameter validation methods, or the
+ "postOptions" configuration.
+
+New things to develop
+---------------------
+
+These are either components specific to the new refactor of ooni
+or that we haven't yet figured out how they should work.
+
+* Finish implementing the backend collection code.
+
+ o PCAP READER/WRITER:
+ This should be quite simple...see scapy.all.wrpcap and
+ scapy.all.rdpcap. However, we have been warned by other projects that
+ this does *not* scale well. For example, see:
+ https://github.com/isislovecruft/switzerland/blob/master/switzerland/client…
+ Which is a circular ring buffer specifically for libpcap, to avoid kernel
+ buffer overflows due to a high number of incoming packets. I expect this
+ to only be an issue on substantially high-bandwidth nodes...though that
+ is what we'll be dealing with when we deploy on Mlab.
+
+ o PCAP UPLOADER:
+ This also sounds simple, and is, until you begin to deal with things like
+ persistence. What we really need is rsync, written in python, or at least
+ some cross-platform implementation. I (Isis speaking) am the current
+ maintainer of pyrsync, BUT DO NOT USE PYRSYNC. It is only an
+ implementation of the rsync *algorithm* for diffs, it is not rsync the
+ program. Also, it is BROKEN AND I DO NOT MAINTAIN IT. If you want to
+ maintain it, please take it off my hands.
+
+* Useability:
+
+ o UNITTESTS. Pronto.
+
+ o DOCUMENTATION. If you found something that confused you, or still
+ confuses you, and you couldn't find the answer within fifteen seconds,
+ then that thing is not well documented. Make it better, or at least mark
+ it with an "XXX document me!" tag.
+
+* Persistence:
+
+ o We need some type of scheduler/cron thing which will background the tests
+ so that they don't take up a terminal, and can be configured to run
+ certain tests at timed intervals.
+
+ o The Reporter will probably need to be updated to handle knowing when *a
+ test* has completed, but that the scheduler is still running.
+
+Migrate code from old
+---------------------
+
+Migrate all the interesting parts of the old code to the new. This is mostly
+finished, but there still are things in the /old-to-be-ported directory which
+might be of use. At this point, because we have gone through several version
+of the API design, many of them are entirely unusable, and merely the general
+idea remains.
+
+It's important to make the new code asych and based on Twisted. It should
+respect the design goals of the new ooni-probe model. Also, importing new,
+non-standard libraries should be discussed first, if the new test is to be
+used in the core of OONI (packaging scapy and twisted already makes our
+codebase quite large).
\ No newline at end of file
1
0
[ooni-probe/master] * Removing some of the old old old code. Everything that I've deleted is
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 559171b5c4ac91f780d96ddbcce4664ab06a2654
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sat Nov 3 01:16:27 2012 +0000
* Removing some of the old old old code. Everything that I've deleted is
something I've read through and decided either has a replacement or else is
useless in the current context.
* These file in particular are all like 10 line bash scripts which, in most
cases, don't do anything at all because the files they modify/wget/parse
whatever do not exist.
---
old_scripts/README | 47 -------------------------------------------
old_scripts/TODO | 6 -----
old_scripts/dns-checker.sh | 7 ------
old_scripts/host-prep.sh | 20 ------------------
old_scripts/run-tests.sh | 11 ----------
old_scripts/twitter-test.sh | 33 ------------------------------
6 files changed, 0 insertions(+), 124 deletions(-)
diff --git a/old_scripts/README b/old_scripts/README
deleted file mode 100644
index 4903479..0000000
--- a/old_scripts/README
+++ /dev/null
@@ -1,47 +0,0 @@
- "Marco!"
-
- "Polo!"
-
- * * *
-
-The marco.py script tries to figure out who's out there. It does this
-by trying to do a ssl handshake with a lot of Tor servers in parallel.
-If it succeeds, it records their certificates. If it fails, it records
-why.
-
-WHAT YOU MIGHT NEED:
-
- - I tested it with Python 2.6, and I think it should work with Python 2.5.
- If your Python is older than that, it won't work.
-
-HOW TO USE IT:
-
- - Edit the top of marco.py to make sure you like the defaults. You
- can adjust the timeout, where it writes stuff, and how many servers
- it tests in parallel.
-
- - Run marco.py with one or more networkstatus files as command-line
- arguments. If an addr:port appears more than once, marco will only
- test it once.
-
-HOW TO READ THE OUTPUT:
-
- - Marco will generate a file called marco.out full of lines like:
- ADDR:PORT STATUS MESSAGE.
-
- STATUS will be one of:
- "ok" -- everything is fine
- "noconnect" -- we couldn't open a TCP socket.
- "nohandshake" -- we couldn't do a TLS handshake.
- "err" -- we got an unexpected internal error
-
- MESSAGE will say more about what went wrong.
-
- The lines will be in the order that Marco received answers. If you want
- them to be sorted by something else, you'll need to do that yourself.
-
- - If you have Python 2.6, Marco will also generate a file called
- marco_certs.out, containing every TLS cert that it got for an "ok"
- server. We can use this later to make sure identity keys were correct.
-
- If you only have Python 2.5, Marco will only get the DN for the cert.
diff --git a/old_scripts/TODO b/old_scripts/TODO
deleted file mode 100644
index c24a16f..0000000
--- a/old_scripts/TODO
+++ /dev/null
@@ -1,6 +0,0 @@
-
-- Run anywhere, even older pythons.
-
-- Wrap the ssl stuff into its own class.
-
-- Decode certificates and detect MITM.
diff --git a/old_scripts/dns-checker.sh b/old_scripts/dns-checker.sh
deleted file mode 100644
index 9096c7f..0000000
--- a/old_scripts/dns-checker.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-for host in `cat twitter-host-list.txt`
-do
-echo "Trying to resolve: $host"
-host -t any $host
-done
diff --git a/old_scripts/host-prep.sh b/old_scripts/host-prep.sh
deleted file mode 100644
index b8f62d7..0000000
--- a/old_scripts/host-prep.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-SUITE="`lsb_release -c|cut -f2`";
-apt-get -y install tcptraceroute traceroute iputils-ping wget dnsutils \
- python-openssl rsync openssl libevent-1.4-2 zlib1g openssh-server
-
-# Lets make sure we can run these programs without ever becoming root again
-chmod 4755 `which tcptraceroute`
-chmod 4755 `which traceroute`
-
-# Install Tor from the Tor repo here...
-#cp /etc/apt/sources.list /etc/apt/sources.list.bkp
-#cat << "EOF" >> /etc/apt/sources.list
-#deb http://deb.torproject.org/torproject.org $SOURCE main
-#deb http://deb.torproject.org/torproject.org experimental-$SOURCE main
-#EOF
-#
-#gpg --keyserver keys.gnupg.net --recv 886DDD89
-#gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add -
-#apt-get update
-#apt-get install tor tor-geoipdb
diff --git a/old_scripts/run-tests.sh b/old_scripts/run-tests.sh
deleted file mode 100644
index 44d1c5a..0000000
--- a/old_scripts/run-tests.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-DATE="`date -u`";
-cd ~/.probe/logs/;
-~/.probe/bin/marco.py ~/.probe/logs/cached-consensus 2>&1 >> ~/.probe/logs/run-tests-marco-"$DATE".log;
-~/.probe/bin/dirconntest.sh 2>&1 >> ~/.probe/logs/run-tests-dirconntest-"$DATE".log;
-
-for host in `cat ~/.probe/logs/hosts.txt`;
-do
- ~/.probe/bin/generic-host-test.sh $host > 2>&1 >> ~/.probe/logs/generic-host-test-"$DATE".log;
-done;
diff --git a/old_scripts/twitter-test.sh b/old_scripts/twitter-test.sh
deleted file mode 100644
index 5dfcb41..0000000
--- a/old_scripts/twitter-test.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-#
-# A quick hack to (tcp)traceroute to a list of hosts
-#
-
-echo "tcp/conntest v0.6"
-date -R
-echo
-/sbin/ifconfig -a
-echo
-/sbin/route -n
-echo
-
-echo "Testing Twitter IP addresses..."
-for ip in `cat twitter-ip-list.txt|grep 1`
-do
- echo "Testing $ip"
- tcptraceroute -m 6 -w 1 $ip 80
- tcptraceroute -m 6 -w 1 $ip 0
- tcptraceroute -m 6 -w 1 $ip 123
- tcptraceroute -m 6 -w 1 $ip 443
-done
-echo "Various traceroute attempts"
-for ip in `cat twitter-ip-list.txt|grep 1`
-do
- traceroute -A $ip
- traceroute -A -I $ip
- traceroute -A -U $ip
-done
-
-wget -q -O- https://check.torproject.org|grep "IP address"
-echo
-date -R
1
0
commit aa57669fa826c6146a8028ed571c531ab2a2bc66
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 17:07:08 2012 +0000
* Moar refaktorzingz.
---
old-to-be-ported-code/TODO.plgoons | 79 ----
old-to-be-ported-code/ooni-probe.diff | 358 -------------------
old-to-be-ported-code/ooni/.DS_Store | Bin 15364 -> 0 bytes
old-to-be-ported-code/ooni/__init__.py | 12 -
old-to-be-ported-code/ooni/command.py | 250 -------------
old-to-be-ported-code/ooni/dns_poisoning.py | 43 ---
old-to-be-ported-code/ooni/dnsooni.py | 356 ------------------
old-to-be-ported-code/ooni/helpers.py | 38 --
old-to-be-ported-code/ooni/http.py | 306 ----------------
old-to-be-ported-code/ooni/input.py | 33 --
old-to-be-ported-code/ooni/namecheck.py | 39 --
.../ooni/plugins/dnstest_plgoo.py | 84 -----
old-to-be-ported-code/ooni/plugins/http_plgoo.py | 70 ----
old-to-be-ported-code/ooni/plugins/marco_plgoo.py | 377 --------------------
old-to-be-ported-code/ooni/plugins/proxy_plgoo.py | 69 ----
.../ooni/plugins/simple_dns_plgoo.py | 35 --
old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py | 278 --------------
old-to-be-ported-code/ooni/plugins/tor.py | 80 ----
old-to-be-ported-code/ooni/plugins/torrc | 9 -
old-to-be-ported-code/ooni/plugooni.py | 106 ------
old-to-be-ported-code/ooni/transparenthttp.py | 41 ---
21 files changed, 0 insertions(+), 2663 deletions(-)
diff --git a/old-to-be-ported-code/TODO.plgoons b/old-to-be-ported-code/TODO.plgoons
deleted file mode 100644
index ace2a10..0000000
--- a/old-to-be-ported-code/TODO.plgoons
+++ /dev/null
@@ -1,79 +0,0 @@
-We should implement the following as plugoons:
-
-dns_plgoo.py - Various DNS checks
-
-As a start - we should perform a known good check against a name or list of
-names. As input, we should take an ip address, a name or a list of names for
-testing; we also take dns servers for experiment or control data. For output we
-emit UDP or TCP packets - we should support proxying these requests when
-possible as is the case with TCP but probably not with UDP for certain DNS
-request types.
-
-http_plgoo.py - Various HTTP checks
-
-We should compare two pages and see if we have identical properties.
-At the very least, we should print the important differences - perhaps
-with a diff like output? We should look for fingerprints in URLS that are
-returned. We should detect 302 re-direction.
-
-As input, we should take an ip address, a name or a list of names for testing;
-we also take a list of headers such as random user agent strings and so on.
-We should emit TCP packets and ensure that we do not leak DNS for connections
-that we expect to proxy to a remote network.
-
-latency_plgoo.py - Measure latency for a host or a list of hosts
-
-As input, we should take an ip address, a name or a list of names for testing;
-We should measure the mean latency from the ooni-probe to the host with various
-traceroute tests. We should also measure the latency between the ooni-probe and
-a given server for any other protocol that is request and response oriented;
-HTTP latency may be calculated by simply tracking the delta between requests
-and responses.
-
-tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
-
-tcptrace_plgoo.py should allow for both stray and in-connection traceroute
-modes.
-
-udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
-- it may also be nice to simply make a random A record request in a DNS packet
-and use it as the payload for a UDP traceroute.
-
-reversetrace_plgoo.py should give a remote host the client's IP and return the
-output of a traceroute to that IP from the remote host. It will need a remote
-component if run against a web server. It would not need a remote component if
-run against route-views - we can simply telnet over Tor and ask it to trace to
-our detected client IP.
-
-keyword_plgoo.py should take a keyword or a list of keywords for use as a
-payload in a varity of protocols. This should be protocol aware - dns keyword
-filtering requires a sniffer to catch stray packets after the censor wins the
-race. HTTP payloads in open connections may be similar and in practice, we'll
-have to find tune it.
-
-icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
-servers. They have dozens of tests and to implement this plgoo, we'll need to
-add many things to ooni. More details here:
-http://netalyzr.icsi.berkeley.edu/faq.html
-http://netalyzr.icsi.berkeley.edu/json/id=example-session
-
-HTML output:
-http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-JSON output:
-http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-Netalyzer log:
-http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
-
-sniffer_plgoo.py - We need a generic method for capturing packets during a full
-run - this may be better as a core ooni-probe feature but we should implement
-packet capture in a plugin if it is done no where else.
-
-nmap_plgoo.py - We should take a list of hosts and run nmap against each of
-these hosts; many hosts are collected during testing and they should be scanned
-with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
-more reasonable.
-
diff --git a/old-to-be-ported-code/ooni-probe.diff b/old-to-be-ported-code/ooni-probe.diff
deleted file mode 100644
index fc61d3f..0000000
--- a/old-to-be-ported-code/ooni-probe.diff
+++ /dev/null
@@ -1,358 +0,0 @@
-diff --git a/TODO b/TODO
-index c2e19af..51fa559 100644
---- a/TODO
-+++ b/TODO
-@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
- Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
-
- http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&pas…
-+
-+VIA Rail Via header:
-+
-+HTTP/1.0 301 Moved Permanently
-+Location: http://www.google.com/
-+Content-Type: text/html; charset=UTF-8
-+Date: Sat, 23 Jul 2011 02:21:30 GMT
-+Expires: Mon, 22 Aug 2011 02:21:30 GMT
-+Cache-Control: public, max-age=2592000
-+Server: gws
-+Content-Length: 219
-+X-XSS-Protection: 1; mode=block
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: MISS from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Connection: close
-+
-+<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
-+<TITLE>301 Moved</TITLE></HEAD><BODY>
-+<H1>301 Moved</H1>
-+The document has moved
-+<A HREF="http://www.google.com/">here</A>.
-+</BODY></HTML>
-+
-+
-+blocked site:
-+
-+HTTP/1.0 302 Moved Temporarily
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:17 GMT
-+Content-Length: 0
-+Location: http://10.66.66.66/denied.html
-+
-+invalid request response:
-+
-+$ nc 8.8.8.8 80
-+hjdashjkdsahjkdsa
-+HTTP/1.0 400 Bad Request
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:44 GMT
-+Content-Type: text/html
-+Content-Length: 1178
-+Expires: Sat, 23 Jul 2011 02:22:44 GMT
-+X-Squid-Error: ERR_INVALID_REQ 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to process the request:
-+<PRE>
-+hjdashjkdsahjkdsa
-+
-+</PRE>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Invalid Request
-+</STRONG>
-+</UL>
-+
-+<P>
-+Some aspect of the HTTP Request is invalid. Possible problems:
-+<UL>
-+<LI>Missing or unknown request method
-+<LI>Missing URL
-+<LI>Missing HTTP Identifier (HTTP/1.0)
-+<LI>Request is too large
-+<LI>Content-Length missing for POST or PUT requests
-+<LI>Illegal character in hostname; underscores are not allowed
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+nc 10.66.66.66 80
-+GET cache_object://localhost/info HTTP/1.0
-+HTTP/1.0 403 Forbidden
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:25:56 GMT
-+Content-Type: text/html
-+Content-Length: 1061
-+Expires: Sat, 23 Jul 2011 02:25:56 GMT
-+X-Squid-Error: ERR_ACCESS_DENIED 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to retrieve the URL:
-+<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Access Denied.
-+</STRONG>
-+<P>
-+Access control configuration prevents your request from
-+being allowed at this time. Please contact your service provider if
-+you feel this is incorrect.
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-+
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+
-diff --git a/ooni/command.py b/ooni/command.py
-index 361190f..df1a58c 100644
---- a/ooni/command.py
-+++ b/ooni/command.py
-@@ -13,6 +13,7 @@ import ooni.captive_portal
- import ooni.namecheck
- import ooni.dns_poisoning
- import ooni.dns_cc_check
-+import ooni.transparenthttp
-
- class Command():
- def __init__(self, args):
-@@ -48,6 +49,15 @@ class Command():
- help="run captiveportal tests"
- )
-
-+ # --transhttp
-+ def cb_transhttp(option, opt, value, oparser):
-+ self.action = opt[2:]
-+ optparser.add_option(
-+ "--transhttp",
-+ action="callback", callback=cb_transhttp,
-+ help="run Transparent HTTP tests"
-+ )
-+
- # --dns
- def cb_dnstests(option, opt, value, oparser):
- self.action = opt[2:]
-@@ -122,7 +132,7 @@ class Command():
- if (not self.action):
- raise optparse.OptionError(
- 'is required',
-- '--dns | --dnsbulk | --captiveportal | --help | --version'
-+ '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
- )
-
- except optparse.OptionError, err:
-@@ -138,6 +148,10 @@ class Command():
- captive_portal = ooni.captive_portal.CaptivePortal
- captive_portal(self).main()
-
-+ def transhttp(self):
-+ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
-+ transparent_http(self).main()
-+
- def dns(self):
- dnstests = ooni.namecheck.DNS
- dnstests(self).main()
-diff --git a/ooni/dns.py b/ooni/dns.py
-index 95da6ef..90d50bd 100644
---- a/ooni/dns.py
-+++ b/ooni/dns.py
-@@ -8,7 +8,7 @@ from socket import gethostbyname
- import ooni.common
-
- # apt-get install python-dns
--import DNS
-+import dns
- import random
-
- """ Wrap gethostbyname """
-diff --git a/ooni/http.py b/ooni/http.py
-index 62365bb..bb72001 100644
---- a/ooni/http.py
-+++ b/ooni/http.py
-@@ -7,8 +7,14 @@
- from socket import gethostbyname
- import ooni.common
- import urllib2
-+import httplib
-+from urlparse import urlparse
-+from pprint import pprint
- import pycurl
-+import random
-+import string
- import re
-+from BeautifulSoup import BeautifulSoup
-
- # By default, we'll be Torbutton's UA
- default_ua = { 'User-Agent' :
-@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
- default_proxy_host = "127.0.0.1"
- default_proxy_port = "9050"
-
--
--
--
--
--
--
--
--
--
--
--
--
--
--
-+#class HTTPResponse(object):
-+# def __init__(self):
-
-
- """A very basic HTTP fetcher that uses Tor by default and returns a curl
-@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
- http_code = getinfo(pycurl.HTTP_CODE)
- return response, http_code
-
--"""A very basic HTTP fetcher that returns a urllib3 response object."""
-+"""A very basic HTTP fetcher that returns a urllib2 response object."""
- def http_fetch(url,
- headers= default_ua,
- label="generic HTTP fetch"):
-@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
- else:
- return True
-
-+def http_request(self, method, url, path=None):
-+ """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
-+ purl = urlparse(url)
-+ host = purl.netloc
-+ conn = httplib.HTTPConnection(host, 80)
-+ if path is None:
-+ path = purl.path
-+ conn.request(method, purl.path)
-+ response = conn.getresponse()
-+ headers = dict(response.getheaders())
-+ self.headers = headers
-+ self.data = response.read()
-+ return True
-+
-+def search_headers(self, s_headers, url):
-+ if http_request(self, "GET", url):
-+ headers = self.headers
-+ else:
-+ return None
-+ result = {}
-+ for h in s_headers.items():
-+ result[h[0]] = h[0] in headers
-+ return result
-+
-+def http_header_match_dict(experimental_url, dict_header):
-+ result = {}
-+ url_header = http_get_header_dict(experimental_url)
-+
-+# XXX for testing
-+# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
-+
-+def search_squid_headers(self):
-+ url = "http://securityfocus.org/blabla"
-+ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
-+ ret = search_headers(self, s_headers, url)
-+ for i in ret.items():
-+ if i[1] is True:
-+ return False
-+ return True
-+
-+def random_bad_request(self):
-+ url = "http://securityfocus.org/blabla"
-+ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
-+ if http_request(self, r_str, url):
-+ return True
-+ else:
-+ return None
-+
-+def squid_search_bad_request(self):
-+ if random_bad_request(self):
-+ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
-+ for i in s_headers.items():
-+ if i[0] in self.headers:
-+ return False
-+ return True
-+ else:
-+ return None
-+
-+def squid_cacheobject_request(self):
-+ url = "http://securityfocus.org/blabla"
-+ if http_request(self, "GET", url, "cache_object://localhost/info"):
-+ soup = BeautifulSoup(self.data)
-+ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
-+ return False
-+ else:
-+ return True
-+ else:
-+ return None
-+
-+
- def MSHTTP_CP_Tests(self):
- experiment_url = "http://www.msftncsi.com/ncsi.txt"
- expectedResponse = "Microsoft NCSI" # Only this - nothing more
-@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
-
- # Google ChromeOS fetches this url in guest mode
- # and they expect the user to authenticate
-- def googleChromeOSHTTPTest(self):
-- print "noop"
-- #url = "http://www.google.com/"
-+def googleChromeOSHTTPTest(self):
-+ print "noop"
-+ #url = "http://www.google.com/"
-+
-+def SquidHeader_TransparentHTTP_Tests(self):
-+ return search_squid_headers(self)
-+
-+def SquidBadRequest_TransparentHTTP_Tests(self):
-+ squid_cacheobject_request(self)
-+ return squid_search_bad_request(self)
-+
-+def SquidCacheobject_TransparentHTTP_Tests(self):
-+ return squid_cacheobject_request(self)
-+
-+
diff --git a/old-to-be-ported-code/ooni/.DS_Store b/old-to-be-ported-code/ooni/.DS_Store
deleted file mode 100644
index f5738a5..0000000
Binary files a/old-to-be-ported-code/ooni/.DS_Store and /dev/null differ
diff --git a/old-to-be-ported-code/ooni/__init__.py b/old-to-be-ported-code/ooni/__init__.py
deleted file mode 100644
index 8f1b96e..0000000
--- a/old-to-be-ported-code/ooni/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-"""\
-This is your package, 'ooni'.
-
-It was provided by the package, `package`.
-
-Please change this documentation, and write this module!
-"""
-
-__version__ = '0.0.1'
-
-# If you run 'make test', this is your failing test.
-# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/old-to-be-ported-code/ooni/command.py b/old-to-be-ported-code/ooni/command.py
deleted file mode 100644
index e5f8f9f..0000000
--- a/old-to-be-ported-code/ooni/command.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# -*- coding: utf-8
-"""\
-Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
-"""
-
-import os
-import sys
-import re
-import optparse
-
-# Only include high level ooni tests at this time
-import ooni.captive_portal
-import ooni.namecheck
-import ooni.dns_poisoning
-import ooni.dns_cc_check
-import ooni.transparenthttp
-import ooni.helpers
-import ooni.plugooni
-import ooni.input
-
-class Command():
- def __init__(self, args):
- sys.argv = sys.argv[0:1]
- sys.argv.extend(args)
- self.startup_options()
-
- def startup_options(self):
- self.action = None
- self.from_ = None
- self.to = None
- self.parser = None
- self.emitter = None
- self.emit_header = None
- self.emit_trailer = None
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.randomize = True
- self.cc = None
- self.hostname = None
- self.listfile = None
- self.listplugooni = False
- self.plugin_name = "all"
- self.controlproxy = None # "socks4a://127.0.0.1:9050/"
- self.experimentproxy = None
-
- usage = """
-
- 'ooni' is the Open Observatory of Network Interference
-
- command line usage: ooni-probe [options]"""
-
- optparser = optparse.OptionParser(usage=usage)
-
- # --plugin
- def cb_plugin(option, opt, value, oparser):
- self.action = opt[2:]
- self.plugin_name = str(value)
- optparser.add_option(
- "--plugin", type="string",
- action="callback", callback=cb_plugin,
- help="run the Plugooni plgoo plugin specified"
- )
-
- # --listplugins
- def cb_list_plugins(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--listplugins",
- action="callback", callback=cb_list_plugins,
- help="list available Plugooni as plgoos plugin names"
- )
-
- # --captiveportal
- def cb_captiveportal(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--captiveportal",
- action="callback", callback=cb_captiveportal,
- help="run vendor emulated captiveportal tests"
- )
-
- # --transhttp
- def cb_transhttp(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--transhttp",
- action="callback", callback=cb_transhttp,
- help="run Transparent HTTP tests"
- )
-
- # --dns
- def cb_dnstests(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dns",
- action="callback", callback=cb_dnstests,
- help="run fixed generic dns tests"
- )
-
- # --dnsbulk
- def cb_dnsbulktests(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dnsbulk",
- action="callback", callback=cb_dnsbulktests,
- help="run bulk DNS tests in random.shuffle() order"
- )
-
- # --dns-cc-check
- def cb_dnscccheck(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dnscccheck",
- action="callback", callback=cb_dnscccheck,
- help="run cc specific bulk DNS tests in random.shuffle() order"
- )
-
- # --cc [country code]
- def cb_cc(option, opt, value, optparser):
- # XXX: We should check this against a list of supported county codes
- # and then return the matching value from the list into self.cc
- self.cc = str(value)
- optparser.add_option(
- "--cc", type="string",
- action="callback", callback=cb_cc,
- help="set a specific county code -- default is None",
- )
-
- # --list [url/hostname/ip list in file]
- def cb_list(option, opt, value, optparser):
- self.listfile = os.path.expanduser(value)
- if not os.path.isfile(self.listfile):
- print "Wrong file '" + value + "' in --list."
- sys.exit(1)
- optparser.add_option(
- "--list", type="string",
- action="callback", callback=cb_list,
- help="file to read from -- default is None",
- )
-
- # --url [url/hostname/ip]
- def cb_host(option, opt, value, optparser):
- self.hostname = str(value)
- optparser.add_option(
- "--url", type="string",
- action="callback", callback=cb_host,
- help="set URL/hostname/IP for use in tests -- default is None",
- )
-
- # --controlproxy [scheme://host:port]
- def cb_controlproxy(option, opt, value, optparser):
- self.controlproxy = str(value)
- optparser.add_option(
- "--controlproxy", type="string",
- action="callback", callback=cb_controlproxy,
- help="proxy to be used as a control -- default is None",
- )
-
- # --experimentproxy [scheme://host:port]
- def cb_experimentproxy(option, opt, value, optparser):
- self.experimentproxy = str(value)
- optparser.add_option(
- "--experimentproxy", type="string",
- action="callback", callback=cb_experimentproxy,
- help="proxy to be used for experiments -- default is None",
- )
-
-
-
- # --randomize
- def cb_randomize(option, opt, value, optparser):
- self.randomize = bool(int(value))
- optparser.add_option(
- "--randomize", type="choice",
- choices=['0', '1'], metavar="0|1",
- action="callback", callback=cb_randomize,
- help="randomize host order -- default is on",
- )
-
- # XXX TODO:
- # pause/resume scans for dns_BULK_DNS_Tests()
- # setting of control/experiment resolver
- # setting of control/experiment proxy
- #
-
- def cb_version(option, opt, value, oparser):
- self.action = 'version'
- optparser.add_option(
- "-v", "--version",
- action="callback", callback=cb_version,
- help="print ooni-probe version"
- )
-
- # parse options
- (opts, args) = optparser.parse_args()
-
- # validate options
- try:
- if (args):
- raise optparse.OptionError('extra arguments found', args)
- if (not self.action):
- raise optparse.OptionError(
- 'RTFS', 'required arguments missing'
- )
-
- except optparse.OptionError, err:
- sys.stderr.write(str(err) + '\n\n')
- optparser.print_help()
- sys.exit(1)
-
- def version(self):
- print """
-ooni-probe pre-alpha
-Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
-See: https://www.torproject.org/ooni/
-
-"""
-
- def run(self):
- getattr(self, self.action)()
-
- def plugin(self):
- plugin_run = ooni.plugooni.Plugooni
- plugin_run(self).run(self)
-
- def listplugins(self):
- plugin_run = ooni.plugooni.Plugooni
- plugin_run(self).list_plugoons()
-
- def captiveportal(self):
- captive_portal = ooni.captive_portal.CaptivePortal
- captive_portal(self).main()
-
- def transhttp(self):
- transparent_http = ooni.transparenthttp.TransparentHTTPProxy
- transparent_http(self).main()
-
- def dns(self):
- dnstests = ooni.namecheck.DNS
- dnstests(self).main()
-
- def dnsbulk(self):
- dnstests = ooni.dns_poisoning.DNSBulk
- dnstests(self).main()
-
- def dnscccheck(self):
- dnstests = ooni.dns_cc_check.DNSBulk
- dnstests(self).main()
-
diff --git a/old-to-be-ported-code/ooni/dns_poisoning.py b/old-to-be-ported-code/ooni/dns_poisoning.py
deleted file mode 100644
index 939391e..0000000
--- a/old-to-be-ported-code/ooni/dns_poisoning.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-class DNSBulk():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.randomize = args.randomize
- self.debug = False
-
- def DNS_Tests(self):
- print "DNS tampering detection for list of domains:"
- filter_name = "_DNS_BULK_Tests"
- tests = [ooni.dnsooni]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/ooni/dnsooni.py b/old-to-be-ported-code/ooni/dnsooni.py
deleted file mode 100644
index bfdfe51..0000000
--- a/old-to-be-ported-code/ooni/dnsooni.py
+++ /dev/null
@@ -1,356 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-
-from socket import gethostbyname
-import ooni.common
-
-# requires python-dns
-# (pydns.sourceforge.net)
-try:
- import DNS
-# Mac OS X needs this
-except:
- try:
- import dns as DNS
- except:
- pass # Never mind, let's break later.
-import random
-from pprint import pprint
-
-""" Wrap gethostbyname """
-def dns_resolve(hostname):
- try:
- resolved_host = gethostbyname(hostname)
- return resolved_host
- except:
- return False
-
-"""Perform a resolution on test_hostname and compare it with the expected
- control_resolved ip address. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True; otherwise
- it returns False.
-"""
-def dns_resolve_match(experiment_hostname, control_resolved,
- label="generic DNS comparison"):
- experiment_resolved = dns_resolve(experiment_hostname)
- if experiment_resolved == False:
- return None
- if experiment_resolved:
- if str(experiment_resolved) != str(control_resolved):
- print label + " control " + str(control_resolved) + " data does not " \
- "match experiment response: " + str(experiment_resolved)
- return False
- return True
-
-def generic_DNS_resolve(experiment_hostname, experiment_resolver):
- if experiment_resolver == None:
- req = DNS.Request(name=experiment_hostname) # local resolver
- else:
- req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
- resolved_data = req.req().answers
- return resolved_data
-
-""" Return a list of all known censors. """
-def load_list_of_known_censors(known_proxy_file=None):
- proxyfile = "proxy-lists/ips.txt"
- known_proxy_file = open(proxyfile, 'r', 1)
- known_proxy_list = []
- for known_proxy in known_proxy_file.readlines():
- known_proxy_list.append(known_proxy)
- known_proxy_file.close()
- known_proxy_count = len(known_proxy_list)
- print "Loading " + str(known_proxy_count) + " known proxies..."
- return known_proxy_list, known_proxy_count
-
-def load_list_of_test_hosts(hostfile=None):
- if hostfile == None:
- hostfile="censorship-lists/norwegian-dns-blacklist.txt"
- host_list_file = open(hostfile, 'r', 1)
- host_list = []
- for host_name in host_list_file.readlines():
- if host_name.isspace():
- continue
- else:
- host_list.append(host_name)
- host_list_file.close()
- host_count = len(host_list)
- #print "Loading " + str(host_count) + " test host names..."
- return host_list, host_count
-
-""" Return True with a list of censors if we find a known censor from
- known_proxy_list in the experiment_data DNS response. Otherwise return
- False and None. """
-def contains_known_censors(known_proxy_list, experiment_data):
- match = False
- proxy_list = []
- for answer in range(len(experiment_data)):
- for known_proxy in known_proxy_list:
- if answer == known_proxy:
- print "CONFLICT: known proxy discovered: " + str(known_proxy),
- proxy_list.append(known_proxy)
- match = True
- return match, proxy_list
-
-""" Return True and the experiment response that failed to match."""
-def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
- known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
- conflict_list = []
- conflict = False
- if known_proxy_found:
- print "known proxy discovered: " + str(known_proxies)
- for answer in range(len(control_data)):
- if control_data[answer]['data'] == experiment_data:
- print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
- continue
- else:
- conflict = True
- conflict_list.append(experiment_data)
- #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
- return conflict, conflict_list
-
-def dns_DNS_BULK_Tests(self, hostfile=None,
- known_good_resolver="8.8.8.8", test_resolver=None):
- tampering = False # By default we'll pretend the internet is nice
- tampering_list = []
- host_list, host_count = load_list_of_test_hosts()
- known_proxies, proxy_count = load_list_of_known_censors()
- check_count = 1
- if test_resolver == None:
- DNS.ParseResolvConf() # Set the local resolver as our default
- if self.randomize:
- random.shuffle(host_list) # This makes our list non-sequential for now
- for host_name in host_list:
- host_name = host_name.strip()
- print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
- print "Resolving with control resolver..."
- print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
- print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
- # XXX TODO - we need to keep track of the status of these requests and then resume them
- while True:
- try:
- control_data = generic_DNS_resolve(host_name, known_good_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "control resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Resolving with experiment resolver..."
- while True:
- try:
- experiment_data = generic_DNS_resolve(host_name, test_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "experiment resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Comparing control and experiment...",
- tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
- if tampering:
- tampering_list.append(conflicts)
- print "Conflicts with " + str(host_name) + " : " + str(conflicts)
- check_count = check_count + 1
- host_list.close()
- return tampering
-
-""" Attempt to resolve random_hostname and return True and None if empty. If an
- address is returned we return False and the returned address.
-"""
-def dns_response_empty(random_hostname):
- response = dns_resolve(random_hostname)
- if response == False:
- return True, None
- return False, response
-
-def dns_multi_response_empty(count, size):
- for i in range(count):
- randName = ooni.common._randstring(size)
- response_empty, response_ip = dns_response_empty(randName)
- if response_empty == True and response_ip == None:
- responses_are_empty = True
- else:
- print label + " " + randName + " found with value " + str(response_ip)
- responses_are_empty = False
- return responses_are_empty
-
-""" Attempt to resolve one random host name per tld in tld_list where the
- hostnames are random strings with a length between min_length and
- max_length. Return True if list is empty, otherwise return False."""
-def dns_list_empty(tld_list, min_length, max_length,
- label="generic DNS list test"):
- for tld in tld_list:
- randName = ooni.common._randstring(min_length, max_length) + tld
- response_empty, response_ip = dns_response_empty(randName)
- return response_empty
-
-# Known bad test
-# Test for their DNS breakage and their HTTP MITM
-# "Family Shield" is 208.67.222.123 and 208.67.220.123
-# returns 67.215.65.130 for filtered sites like kink.com
-# block.opendns.com is a block page where users are redirected
-# 208.67.216.135 208.67.217.135 are the block pages currently point
-# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
-# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
-# whatever the HOST header says
-# Amusingly - their Server header is: "OpenDNS Guide"
-""" Return True if we are not being directed as known OpenDNS block pages."""
-def OpenDNS_DNS_Tests(self):
- return OpenDNS_Censorship_DNS_TESTS(self)
- return OpenDNS_NXDomain_DNS_TESTS(self)
-
-def OpenDNS_Censorship_DNS_TESTS(self):
- known_filter = "67.215.65.130"
- randName = ooni.common._randstring(10)
- redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
- if redirected:
- return False
- else:
- return True
-
-def OpenDNS_NXDomain_DNS_TESTS(self):
- known_filter = "67.215.65.132"
- randName = ooni.common._randstring(10)
- redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
- if redirected:
- return False
- else:
- return True
-
-"""Returns True if the experiment_url returns the well known Italian block page."""
-def cc_DNS_Tests_it(self):
- tampering = False # By default we'll pretend the internet is nice
- tampering_list = []
- conflicts = []
- known_good_resolver = "8.8.8.8"
- host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
- known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
- known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
-
- check_count = 1
- DNS.ParseResolvConf()
- # Set the local resolver as our default
- if self.randomize:
- random.shuffle(host_list) # This makes our list non-sequential for now
- print "We're testing (" + str(host_count) + ") URLs"
- print "We're looking for (" + str(known_block_count) + ") block pages"
- print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
- for test_resolver in known_censoring_resolvers:
- test_resolver = test_resolver.strip()
- for host_name in host_list:
- host_name = host_name.strip()
- print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
- print "Testing " + host_name + " with control resolver: " + known_good_resolver
- print "Testing " + host_name + " with experiment resolver: " + test_resolver
- while True:
- try:
- control_data = generic_DNS_resolve(host_name, known_good_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "control resolver appears to be failing..."
- break
- except:
- print "Timeout; looping!"
- continue
-
- while True:
- try:
- experiment_data = generic_DNS_resolve(host_name, test_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "experiment resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Comparing control and experiment...",
- tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
- if tampering:
- tampering_list.append(conflicts)
- print "Conflicts with " + str(host_name) + " : " + str(conflicts)
- check_count = check_count + 1
-
- host_list.close()
- return tampering
-
-
-## XXX TODO
-## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
-
-""" Returns True if the experiment_host returns a well known Australian filter
- IP address."""
-def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
- # http://www.robtex.com/ip/61.88.88.88.html
- # http://requests.optus.net.au/dns/
- known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
- known_censoring_resolvers = ["61.88.88.88"] # Optus
- for resolver in known_censoring_resolvers:
- blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
- if blocked:
- return True
-
-"""Returns True if experiment_hostname as resolved by experiment_resolver
- resolves to control_data. Returns False if there is no match or None if the
- attempt fails."""
-def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
- control_data):
- req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
- resolved_data = s.req().answers
- for answer in range(len(resolved_data)):
- if resolved_data[answer]['data'] == control_data:
- return True
- return False
-
-# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
-# details
-""" Return True if Tor would consider the network fine; False if it's hostile
- and has no signs of DNS tampering. """
-def Tor_DNS_Tests(self):
- response_rfc2606_empty = RFC2606_DNS_Tests(self)
- tor_tld_list = ["", ".com", ".org", ".net"]
- response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
- return response_tor_empty | response_rfc2606_empty
-
-""" Return True if RFC2606 would consider the network hostile; False if it's all
- clear and has no signs of DNS tampering. """
-def RFC2606_DNS_Tests(self):
- tld_list = [".invalid", ".test"]
- return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
-
-""" Return True if googleChromeDNSTest would consider the network OK."""
-def googleChrome_CP_Tests(self):
- maxGoogleDNSTests = 3
- GoogleDNSTestSize = 10
- return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
- GoogleDNSTestSize)
-def googleChrome_DNS_Tests(self):
- return googleChrome_CP_Tests(self)
-
-""" Return True if MSDNSTest would consider the network OK."""
-def MSDNS_CP_Tests(self):
- experimentHostname = "dns.msftncsi.com"
- expectedResponse = "131.107.255.255"
- return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
-
-def MSDNS_DNS_Tests(self):
- return MSDNS_CP_Tests(self)
diff --git a/old-to-be-ported-code/ooni/helpers.py b/old-to-be-ported-code/ooni/helpers.py
deleted file mode 100644
index 514e65f..0000000
--- a/old-to-be-ported-code/ooni/helpers.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-
-import ooni.common
-import pycurl
-import random
-import zipfile
-import os
-from xml.dom import minidom
-try:
- from BeautifulSoup import BeautifulSoup
-except:
- pass # Never mind, let's break later.
-
-def get_random_url(self):
- filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
- fp = zipfile.ZipFile(filepath, "r")
- fp.open("top-1m.csv")
- content = fp.read("top-1m.csv")
- return "http://" + random.choice(content.split("\n")).split(",")[1]
-
-"""Pick a random header and use that for the request"""
-def get_random_headers(self):
- filepath = os.getcwd() + "/test-lists/whatheaders.xml"
- headers = []
- content = open(filepath, "r").read()
- soup = BeautifulSoup(content)
- measurements = soup.findAll('measurement')
- i = random.randint(0,len(measurements))
- for vals in measurements[i].findAll('header'):
- name = vals.find('name').string
- value = vals.find('value').string
- if name != "host":
- headers.append((name, value))
- return headers
diff --git a/old-to-be-ported-code/ooni/http.py b/old-to-be-ported-code/ooni/http.py
deleted file mode 100644
index 61abad4..0000000
--- a/old-to-be-ported-code/ooni/http.py
+++ /dev/null
@@ -1,306 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-#
-
-from socket import gethostbyname
-import ooni.common
-import ooni.helpers
-import ooni.report
-import urllib2
-import httplib
-from urlparse import urlparse
-from pprint import pprint
-import pycurl
-import random
-import string
-import re
-from pprint import pprint
-try:
- from BeautifulSoup import BeautifulSoup
-except:
- pass # Never mind, let's break later.
-
-# By default, we'll be Torbutton's UA
-default_ua = { 'User-Agent' :
- 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
-
-# Use pycurl to connect over a proxy
-PROXYTYPE_SOCKS5 = 5
-default_proxy_type = PROXYTYPE_SOCKS5
-default_proxy_host = "127.0.0.1"
-default_proxy_port = "9050"
-
-#class HTTPResponse(object):
-# def __init__(self):
-
-
-"""A very basic HTTP fetcher that uses Tor by default and returns a curl
- object."""
-def http_proxy_fetch(url, headers, proxy_type=5,
- proxy_host="127.0.0.1",
- proxy_port=9050):
- request = pycurl.Curl()
- request.setopt(pycurl.PROXY, proxy_host)
- request.setopt(pycurl.PROXYPORT, proxy_port)
- request.setopt(pycurl.PROXYTYPE, proxy_type)
- request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
- request.setopt(pycurl.URL, url)
- response = request.perform()
- http_code = getinfo(pycurl.HTTP_CODE)
- return response, http_code
-
-"""A very basic HTTP fetcher that returns a urllib2 response object."""
-def http_fetch(url,
- headers= default_ua,
- label="generic HTTP fetch"):
- request = urllib2.Request(url, None, headers)
- response = urllib2.urlopen(request)
- return response
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
- control_result. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True with the http
- status code; otherwise it returns False.
-"""
-def http_content_match(experimental_url, control_result,
- headers= { 'User-Agent' : default_ua },
- label="generic HTTP content comparison"):
- request = urllib2.Request(experimental_url, None, headers)
- response = urllib2.urlopen(request)
- responseContents = response.read()
- responseCode = response.code
- if responseContents != False:
- if str(responseContents) != str(control_result):
- print label + " control " + str(control_result) + " data does not " \
- "match experiment response: " + str(responseContents)
- return False, responseCode
- return True, responseCode
- else:
- print "HTTP connection appears to have failed"
- return False, False
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
- control_result as a regex. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True with the HTTP
- status code; otherwise it returns False.
-"""
-def http_content_fuzzy_match(experimental_url, control_result,
- headers= { 'User-Agent' : default_ua },
- label="generic HTTP content comparison"):
- request = urllib2.Request(experimental_url, None, headers)
- response = urllib2.urlopen(request)
- responseContents = response.read()
- responseCode = response.code
- pattern = re.compile(control_result)
- match = pattern.search(responseContents)
- if responseContents != False:
- if not match:
- print label + " control " + str(control_result) + " data does not " \
- "match experiment response: " + str(responseContents)
- return False, responseCode
- return True, responseCode
- else:
- print "HTTP connection appears to have failed"
- return False, False
-
-"""Compare two HTTP status codes as integers and return True if they match."""
-def http_status_code_match(experiment_code, control_code):
- if int(experiment_code) != int(control_code):
- return False
- return True
-
-"""Compare two HTTP status codes as integers and return True if they don't match."""
-def http_status_code_no_match(experiment_code, control_code):
- if http_status_code_match(experiment_code, control_code):
- return False
- return True
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it matches, False if it does not."""
-def http_header_match(experiment_url, control_header, control_result):
- response = http_fetch(url, label=label)
- remote_header = response.get_header(control_header)
- if str(remote_header) == str(control_result):
- return True
- else:
- return False
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it does not matche, False if it does."""
-def http_header_no_match(experiment_url, control_header, control_result):
- match = http_header_match(experiment_url, control_header, control_result)
- if match:
- return False
- else:
- return True
-
-def send_browser_headers(self, browser, conn):
- headers = ooni.helpers.get_random_headers(self)
- for h in headers:
- conn.putheader(h[0], h[1])
- conn.endheaders()
- return True
-
-def http_request(self, method, url, path=None):
- purl = urlparse(url)
- host = purl.netloc
- conn = httplib.HTTPConnection(host, 80)
- conn.connect()
- if path is None:
- path = purl.path
- conn.putrequest(method, purl.path)
- send_browser_headers(self, None, conn)
- response = conn.getresponse()
- headers = dict(response.getheaders())
- self.headers = headers
- self.data = response.read()
- return True
-
-def search_headers(self, s_headers, url):
- if http_request(self, "GET", url):
- headers = self.headers
- else:
- return None
- result = {}
- for h in s_headers.items():
- result[h[0]] = h[0] in headers
- return result
-
-# XXX for testing
-# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
-
-"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
-def search_squid_headers(self):
- test_name = "squid header"
- self.logger.info("RUNNING %s test" % test_name)
- url = ooni.helpers.get_random_url(self)
- s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
- ret = search_headers(self, s_headers, url)
- for i in ret.items():
- if i[1] is True:
- self.logger.info("the %s test returned False" % test_name)
- return False
- self.logger.info("the %s test returned True" % test_name)
- return True
-
-def random_bad_request(self):
- url = ooni.helpers.get_random_url(self)
- r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
- if http_request(self, r_str, url):
- return True
- else:
- return None
-
-"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
-def squid_search_bad_request(self):
- test_name = "squid bad request"
- self.logger.info("RUNNING %s test" % test_name)
- if random_bad_request(self):
- s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
- for i in s_headers.items():
- if i[0] in self.headers:
- self.logger.info("the %s test returned False" % test_name)
- return False
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- self.logger.warning("the %s test returned failed" % test_name)
- return None
-
-"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
-def squid_cacheobject_request(self):
- url = ooni.helpers.get_random_url(self)
- test_name = "squid cacheobject"
- self.logger.info("RUNNING %s test" % test_name)
- if http_request(self, "GET", url, "cache_object://localhost/info"):
- soup = BeautifulSoup(self.data)
- if soup.find('strong') and soup.find('strong').string == "Access Denied.":
- self.logger.info("the %s test returned False" % test_name)
- return False
- else:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- self.logger.warning("the %s test failed" % test_name)
- return None
-
-
-def MSHTTP_CP_Tests(self):
- test_name = "MS HTTP Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- experiment_url = "http://www.msftncsi.com/ncsi.txt"
- expectedResponse = "Microsoft NCSI" # Only this - nothing more
- expectedResponseCode = "200" # Must be this - nothing else
- label = "MS HTTP"
- headers = { 'User-Agent' : 'Microsoft NCSI' }
- content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
- headers, label)
- status_match = http_status_code_match(expectedResponseCode,
- experiment_code)
- if status_match and content_match:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-def AppleHTTP_CP_Tests(self):
- test_name = "Apple HTTP Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- experiment_url = "http://www.apple.com/library/test/success.html"
- expectedResponse = "Success" # There is HTML that contains this string
- expectedResponseCode = "200"
- label = "Apple HTTP"
- headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
- 'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
- ' Mobile/1A543a Safari/419.3' }
- content_match, experiment_code = http_content_fuzzy_match(
- experiment_url, expectedResponse, headers)
- status_match = http_status_code_match(expectedResponseCode,
- experiment_code)
- if status_match and content_match:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- print label + "content match:" + str(content_match) + " status match:" + str(status_match)
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-def WC3_CP_Tests(self):
- test_name = "W3 Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
- draftResponseCode = "428"
- label = "WC3 draft-nottingham-http-portal"
- response = http_fetch(url, label=label)
- responseCode = response.code
- if http_status_code_no_match(responseCode, draftResponseCode):
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- print label + " status match:" + status_match
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-# Google ChromeOS fetches this url in guest mode
-# and they expect the user to authenticate
-def googleChromeOSHTTPTest(self):
- print "noop"
- #url = "http://www.google.com/"
-
-def SquidHeader_TransparentHTTP_Tests(self):
- return search_squid_headers(self)
-
-def SquidBadRequest_TransparentHTTP_Tests(self):
- return squid_search_bad_request(self)
-
-def SquidCacheobject_TransparentHTTP_Tests(self):
- return squid_cacheobject_request(self)
-
-
diff --git a/old-to-be-ported-code/ooni/input.py b/old-to-be-ported-code/ooni/input.py
deleted file mode 100644
index c32ab48..0000000
--- a/old-to-be-ported-code/ooni/input.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-
-class file:
- def __init__(self, name=None):
- if name:
- self.name = name
-
- def simple(self, name=None):
- """ Simple file parsing method:
- Read a file line by line and output an array with all it's lines, without newlines
- """
- if name:
- self.name = name
- output = []
- try:
- f = open(self.name, "r")
- for line in f.readlines():
- output.append(line.strip())
- return output
- except:
- return output
-
- def csv(self, name=None):
- if name:
- self.name = name
-
- def yaml(self, name):
- if name:
- self.name = name
-
- def consensus(self, name):
- if name:
- self.name = name
diff --git a/old-to-be-ported-code/ooni/namecheck.py b/old-to-be-ported-code/ooni/namecheck.py
deleted file mode 100644
index 1a2a3f0..0000000
--- a/old-to-be-ported-code/ooni/namecheck.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs multiple DNS tests.
-
-import sys
-import ooni.dnsooni
-
-class DNS():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.randomize = args.randomize
-
- def DNS_Tests(self):
- print "DNS tampering detection:"
- filter_name = "_DNS_Tests"
- tests = [ooni.dnsooni]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
-
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/ooni/plugins/__init__.py b/old-to-be-ported-code/ooni/plugins/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/old-to-be-ported-code/ooni/plugins/dnstest_plgoo.py b/old-to-be-ported-code/ooni/plugins/dnstest_plgoo.py
deleted file mode 100644
index 04782d4..0000000
--- a/old-to-be-ported-code/ooni/plugins/dnstest_plgoo.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from pprint import pprint
-from twisted.internet import reactor, endpoints
-from twisted.names import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class DNSTestPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ""
- self.control_response = []
-
- def response_split(self, response):
- a = []
- b = []
- for i in response:
- a.append(i[0])
- b.append(i[1])
-
- return a,b
-
- def cb(self, type, hostname, dns_server, value):
- if self.control_response is None:
- self.control_response = []
- if type == 'control' and self.control_response != value:
- print "%s %s" % (dns_server, value)
- self.control_response.append((dns_server,value))
- pprint(self.control_response)
- if type == 'experiment':
- pprint(self.control_response)
- _, res = self.response_split(self.control_response)
- if value not in res:
- print "res (%s) : " % value
- pprint(res)
- print "---"
- print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
-
- else:
- print "%s appears to be clean on %s" % (hostname, dns_server)
- self.r2.servers = [('212.245.158.66',53)]
- print "HN: %s %s" % (hostname, value)
-
- def err(self, pck, error):
- pprint(pck)
- error.printTraceback()
- reactor.stop()
- print "error!"
- pass
-
- def ooni_main(self, args):
- self.experimentalproxy = ''
- self.test_hostnames = ['dio.it']
- self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
- self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
-
- self.control_res = []
- self.control_response = None
-
- self.r1 = client.Resolver(None, [self.control_dns.pop()])
- self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
-
- for hostname in self.test_hostnames:
- for dns_server in self.control_dns:
- self.r1.servers = [dns_server]
- f = self.r1.getHostByName(hostname)
- pck = (hostname, dns_server)
- f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
- for dns_server in self.experiment_dns:
- self.r2.servers = [dns_server]
- pck = (hostname, dns_server)
- f = self.r2.getHostByName(hostname)
- f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
- reactor.run()
-
diff --git a/old-to-be-ported-code/ooni/plugins/http_plgoo.py b/old-to-be-ported-code/ooni/plugins/http_plgoo.py
deleted file mode 100644
index 021e863..0000000
--- a/old-to-be-ported-code/ooni/plugins/http_plgoo.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ''
-
- def cb(self, type, content):
- print "got %d bytes from %s" % (len(content), type) # DEBUG
- if not self.buf:
- self.buf = content
- else:
- if self.buf == content:
- print "SUCCESS"
- else:
- print "FAIL"
- reactor.stop()
-
- def endpoint(self, scheme, host, port):
- ep = None
- if scheme == 'http':
- ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
- elif scheme == 'https':
- ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
- return ep
-
- def ooni_main(self):
- # We don't have the Command object so cheating for now.
- url = 'http://check.torproject.org/'
- self.controlproxy = 'socks4a://127.0.0.1:9050'
- self.experimentalproxy = ''
-
- if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
- return None
- scheme, host, port, path = client._parse(url)
-
- ctrl_dest = self.endpoint(scheme, host, port)
- if not ctrl_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- if self.controlproxy:
- _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
- control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
- else:
- control = ctrl_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('control', x))
- control.connect(f)
-
- exp_dest = self.endpoint(scheme, host, port)
- if not exp_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- # FIXME: use the experiment proxy if there is one
- experiment = exp_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('experiment', x))
- experiment.connect(f)
-
- reactor.run()
-
diff --git a/old-to-be-ported-code/ooni/plugins/marco_plgoo.py b/old-to-be-ported-code/ooni/plugins/marco_plgoo.py
deleted file mode 100644
index cb63df7..0000000
--- a/old-to-be-ported-code/ooni/plugins/marco_plgoo.py
+++ /dev/null
@@ -1,377 +0,0 @@
-#!/usr/bin/python
-# Copyright 2009 The Tor Project, Inc.
-# License at end of file.
-#
-# This tests connections to a list of Tor nodes in a given Tor consensus file
-# while also recording the certificates - it's not a perfect tool but complete
-# or even partial failure should raise alarms.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
- from ooni.plugooni import Plugoo
-except:
- print "Error importing Plugoo"
-
-try:
- from ooni.common import Storage
-except:
- print "Error importing Storage"
-
-try:
- from ooni import output
-except:
- print "Error importing output"
-
-try:
- from ooni import input
-except:
- print "Error importing output"
-
-
-
-ssl = OpenSSL = None
-
-try:
- import ssl
-except ImportError:
- pass
-
-if ssl is None:
- try:
- import OpenSSL.SSL
- import OpenSSL.crypto
- except ImportError:
- pass
-
-if ssl is None and OpenSSL is None:
- if socket.ssl:
- print """Your Python is too old to have the ssl module, and you haven't
-installed pyOpenSSL. I'll try to work with what you've got, but I can't
-record certificates so well."""
- else:
- print """Your Python has no OpenSSL support. Upgrade to 2.6, install
-pyOpenSSL, or both."""
- sys.exit(1)
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-HANDSHAKING = "nohandshake"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-def clean_pem_cert(cert):
- idx = cert.find('-----END')
- if idx > 1 and cert[idx-1] != '\n':
- cert = cert.replace('-----END','\n-----END')
- return cert
-
-def record((addr,port), state, extra=None, cert=None):
- LOCK.acquire()
- try:
- OUT.append({'addr' : addr,
- 'port' : port,
- 'state' : state,
- 'extra' : extra})
- if cert:
- CERT_OUT.append({'addr' : addr,
- 'port' : port,
- 'clean_cert' : clean_pem_cert(cert)})
- finally:
- LOCK.release()
-
-def probe(address,theCtx=None):
- sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- logging.info("Opening socket to %s",address)
- try:
- s.connect(address)
- except IOError, e:
- logging.info("Error %s from socket connect.",e)
- record(address, CONNECTING, e)
- s.close()
- return
- logging.info("Socket to %s open. Launching SSL handshake.",address)
- if ssl:
- try:
- s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
- # "MARCO!"
- s.do_handshake()
- except IOError, e:
- logging.info("Error %s from ssl handshake",e)
- record(address, HANDSHAKING, e)
- s.close()
- sock.close()
- return
- cert = s.getpeercert(True)
- if cert != None:
- cert = ssl.DER_cert_to_PEM_cert(cert)
- elif OpenSSL:
- try:
- s = OpenSSL.SSL.Connection(theCtx, s)
- s.set_connect_state()
- s.setblocking(True)
- s.do_handshake()
- cert = s.get_peer_certificate()
- if cert != None:
- cert = OpenSSL.crypto.dump_certificate(
- OpenSSL.crypto.FILETYPE_PEM, cert)
- except IOError, e:
- logging.info("Error %s from OpenSSL handshake",e)
- record(address, HANDSHAKING, e)
- s.close()
- sock.close()
- return
- else:
- try:
- s = socket.ssl(s)
- s.write('a')
- cert = s.server()
- except IOError, e:
- logging.info("Error %s from socket.ssl handshake",e)
- record(address, HANDSHAKING, e)
- sock.close()
- return
-
- logging.info("SSL handshake with %s finished",address)
- # "POLO!"
- record(address,OK, cert=cert)
- if (ssl or OpenSSL):
- s.close()
- sock.close()
-
-def parseNetworkstatus(ns):
- for line in ns:
- if line.startswith('r '):
- r = line.split()
- yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
- for line in cd:
- if line.startswith('router '):
- r = line.split()
- yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
- done = False
- logging.info("Launching thread.")
-
- if OpenSSL is not None:
- context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
- else:
- context = None
-
- while True:
- LOCK.acquire()
- try:
- if addrList:
- print "Starting test %d/%d"%(
- 1+origLength-len(addrList),origLength)
- addr = addrList.pop()
- else:
- return
- finally:
- LOCK.release()
-
- try:
- logging.info("Launching probe for %s",addr)
- probe(addr, context)
- except Exception, e:
- logging.info("Unexpected error from %s",addr)
- record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
- ts = []
- origLen = len(addrList)
- for num in xrange(nThreads):
- t = threading.Thread(target=worker, args=(addrList,origLen))
- t.setName("Th#%s"%num)
- ts.append(t)
- t.start()
- for t in ts:
- logging.info("Joining thread %s",t.getName())
- t.join()
-
-def main(self, args):
- # BEGIN
- # This logic should be present in more or less all plugoos
- global OUT
- global CERT_OUT
- global OUT_DATA
- global CERT_OUT_DATA
- OUT_DATA = []
- CERT_OUT_DATA = []
-
- try:
- OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
- except:
- print "No output file given. quitting..."
- return -1
-
- try:
- CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
- except:
- print "No output cert file given. quitting..."
- return -1
-
- logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
- datefmt="%b %d %H:%M:%S",
- level=logging.INFO,
- filename=args.log)
- logging.info("============== STARTING NEW LOG")
- # END
-
- if ssl is not None:
- methodName = "ssl"
- elif OpenSSL is not None:
- methodName = "OpenSSL"
- else:
- methodName = "socket"
- logging.info("Running marco with method '%s'", methodName)
-
- addresses = []
-
- if args.input.ips:
- for fn in input.file(args.input.ips).simple():
- a, b = fn.split(":")
- addresses.append( (a,int(b)) )
-
- elif args.input.consensus:
- for fn in args:
- print fn
- for a,b in parseNetworkstatus(open(args.input.consensus)):
- addresses.append( (a,b) )
-
- if args.input.randomize:
- # Take a random permutation of the set the knuth way!
- for i in range(0, len(addresses)):
- j = random.randint(0, i)
- addresses[i], addresses[j] = addresses[j], addresses[i]
-
- if len(addresses) == 0:
- logging.error("No input source given, quiting...")
- return -1
-
- addresses = list(addresses)
-
- if not args.input.randomize:
- addresses.sort()
-
- runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
- def __init__(self):
- self.name = ""
-
- self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
- "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
-
- self.input = Storage()
- self.input.ip = None
- try:
- c_file = os.path.expanduser("~/.tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- try:
- c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- if not self.input.consensus:
- print "Error importing consensus file"
- sys.exit(1)
-
- self.output = Storage()
- self.output.main = 'reports/marco-1.yamlooni'
- self.output.certificates = 'reports/marco_certs-1.out'
-
- # XXX This needs to be moved to a proper function
- # refactor, refactor and ... refactor!
- if os.path.exists(self.output.main):
- basedir = "/".join(self.output.main.split("/")[:-1])
- fn = self.output.main.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- if os.path.exists(self.output.certificates):
- basedir = "/".join(self.output.certificates.split("/")[:-1])
- fn = self.output.certificates.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- # We require for Tor to already be running or have recently run
- self.args = Storage()
- self.args.input = self.input
- self.args.output = self.output
- self.args.log = 'reports/marco.log'
-
- def ooni_main(self, cmd):
- self.args.input.randomize = cmd.randomize
- self.args.input.ips = cmd.listfile
- main(self, self.args)
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >> sys.stderr, ("This script takes one or more networkstatus "
- "files as an argument.")
- self = None
- main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-# * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/ooni/plugins/proxy_plgoo.py b/old-to-be-ported-code/ooni/plugins/proxy_plgoo.py
deleted file mode 100644
index b2b4d0f..0000000
--- a/old-to-be-ported-code/ooni/plugins/proxy_plgoo.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/python
-
-import sys
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ''
-
- def cb(self, type, content):
- print "got %d bytes from %s" % (len(content), type) # DEBUG
- if not self.buf:
- self.buf = content
- else:
- if self.buf == content:
- print "SUCCESS"
- else:
- print "FAIL"
- reactor.stop()
-
- def endpoint(self, scheme, host, port):
- ep = None
- if scheme == 'http':
- ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
- elif scheme == 'https':
- from twisted.internet import ssl
- ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
- ssl.ClientContextFactory())
- return ep
-
- def ooni_main(self, cmd):
- # We don't have the Command object so cheating for now.
- url = cmd.hostname
-
- # FIXME: validate that url is on the form scheme://host[:port]/path
- scheme, host, port, path = client._parse(url)
-
- ctrl_dest = self.endpoint(scheme, host, port)
- if not ctrl_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- if cmd.controlproxy:
- assert scheme != 'https', "no support for proxied https atm, sorry"
- _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
- control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
- print "proxy: ", proxy_host, proxy_port
- else:
- control = ctrl_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('control', x))
- control.connect(f)
-
- exp_dest = self.endpoint(scheme, host, port)
- if not exp_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- # FIXME: use the experiment proxy if there is one
- experiment = exp_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('experiment', x))
- experiment.connect(f)
-
- reactor.run()
diff --git a/old-to-be-ported-code/ooni/plugins/simple_dns_plgoo.py b/old-to-be-ported-code/ooni/plugins/simple_dns_plgoo.py
deleted file mode 100644
index 87d3684..0000000
--- a/old-to-be-ported-code/ooni/plugins/simple_dns_plgoo.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-from ooni.plugooni import Plugoo
-
-class DNSBulkPlugin(Plugoo):
- def __init__(self):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.randomize = True # Pass this down properly
- self.debug = False
-
- def DNS_Tests(self):
- print "DNS tampering detection for list of domains:"
- tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
- self.run_tests(tests)
-
- def magic_main(self):
- self.run_plgoo_tests("_Tests")
-
- def ooni_main(self, args):
- self.magic_main()
-
diff --git a/old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py b/old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py
deleted file mode 100644
index 01dee81..0000000
--- a/old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/python
-# Copyright 2011 The Tor Project, Inc.
-# License at end of file.
-#
-# This is a modified version of the marco plugoo. Given a list of #
-# IP:port addresses, this plugoo will attempt a TCP connection with each
-# host and write the results to a .yamlooni file.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
- from ooni.plugooni import Plugoo
-except:
- print "Error importing Plugoo"
-
-try:
- from ooni.common import Storage
-except:
- print "Error importing Storage"
-
-try:
- from ooni import output
-except:
- print "Error importing output"
-
-try:
- from ooni import input
-except:
- print "Error importing output"
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-# We will want to log the IP address, the port and the state
-def record((addr,port), state, extra=None):
- LOCK.acquire()
- try:
- OUT.append({'addr' : addr,
- 'port' : port,
- 'state' : state,
- 'extra' : extra})
- finally:
- LOCK.release()
-
-# For each IP address in the list, open a socket, write to the log and
-# then close the socket
-def probe(address,theCtx=None):
- sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- logging.info("Opening socket to %s",address)
- try:
- s.connect(address)
- except IOError, e:
- logging.info("Error %s from socket connect.",e)
- record(address, CONNECTING, e)
- s.close()
- return
- logging.info("Socket to %s open. Successfully launched TCP handshake.",address)
- record(address, OK)
- s.close()
-
-def parseNetworkstatus(ns):
- for line in ns:
- if line.startswith('r '):
- r = line.split()
- yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
- for line in cd:
- if line.startswith('router '):
- r = line.split()
- yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
- done = False
- context = None
-
- while True:
- LOCK.acquire()
- try:
- if addrList:
- print "Starting test %d/%d"%(
- 1+origLength-len(addrList),origLength)
- addr = addrList.pop()
- else:
- return
- finally:
- LOCK.release()
-
- try:
- logging.info("Launching probe for %s",addr)
- probe(addr, context)
- except Exception, e:
- logging.info("Unexpected error from %s",addr)
- record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
- ts = []
- origLen = len(addrList)
- for num in xrange(nThreads):
- t = threading.Thread(target=worker, args=(addrList,origLen))
- t.setName("Th#%s"%num)
- ts.append(t)
- t.start()
- for t in ts:
- t.join()
-
-def main(self, args):
- # BEGIN
- # This logic should be present in more or less all plugoos
- global OUT
- global OUT_DATA
- OUT_DATA = []
-
- try:
- OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
- except:
- print "No output file given. quitting..."
- return -1
-
- logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
- datefmt="%b %d %H:%M:%S",
- level=logging.INFO,
- filename=args.log)
- logging.info("============== STARTING NEW LOG")
- # END
-
- methodName = "socket"
- logging.info("Running tcpcon with method '%s'", methodName)
-
- addresses = []
-
- if args.input.ips:
- for fn in input.file(args.input.ips).simple():
- a, b = fn.split(":")
- addresses.append( (a,int(b)) )
-
- elif args.input.consensus:
- for fn in args:
- print fn
- for a,b in parseNetworkstatus(open(args.input.consensus)):
- addresses.append( (a,b) )
-
- if args.input.randomize:
- # Take a random permutation of the set the knuth way!
- for i in range(0, len(addresses)):
- j = random.randint(0, i)
- addresses[i], addresses[j] = addresses[j], addresses[i]
-
- if len(addresses) == 0:
- logging.error("No input source given, quiting...")
- return -1
-
- addresses = list(addresses)
-
- if not args.input.randomize:
- addresses.sort()
-
- runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
- def __init__(self):
- self.name = ""
-
- self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
- "os" ]
-
- self.input = Storage()
- self.input.ip = None
- try:
- c_file = os.path.expanduser("~/.tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- try:
- c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- if not self.input.consensus:
- print "Error importing consensus file"
- sys.exit(1)
-
- self.output = Storage()
- self.output.main = 'reports/tcpcon-1.yamlooni'
- self.output.certificates = 'reports/tcpcon_certs-1.out'
-
- # XXX This needs to be moved to a proper function
- # refactor, refactor and ... refactor!
- if os.path.exists(self.output.main):
- basedir = "/".join(self.output.main.split("/")[:-1])
- fn = self.output.main.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- if os.path.exists(self.output.certificates):
- basedir = "/".join(self.output.certificates.split("/")[:-1])
- fn = self.output.certificates.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- # We require for Tor to already be running or have recently run
- self.args = Storage()
- self.args.input = self.input
- self.args.output = self.output
- self.args.log = 'reports/tcpcon.log'
-
- def ooni_main(self, cmd):
- self.args.input.randomize = cmd.randomize
- self.args.input.ips = cmd.listfile
- main(self, self.args)
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >> sys.stderr, ("This script takes one or more networkstatus "
- "files as an argument.")
- self = None
- main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-# * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/ooni/plugins/tor.py b/old-to-be-ported-code/ooni/plugins/tor.py
deleted file mode 100644
index 0d95d4d..0000000
--- a/old-to-be-ported-code/ooni/plugins/tor.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import re
-import os.path
-import signal
-import subprocess
-import socket
-import threading
-import time
-import logging
-
-from pytorctl import TorCtl
-
-torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
-# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
-
-class ThreadProc(threading.Thread):
- def __init__(self, cmd):
- threading.Thread.__init__(self)
- self.cmd = cmd
- self.proc = None
-
- def run(self):
- print "running"
- try:
- self.proc = subprocess.Popen(self.cmd,
- shell = False, stdout = subprocess.PIPE,
- stderr = subprocess.PIPE)
-
- except OSError:
- logging.fatal('cannot execute command')
-
-class Tor:
- def __init__(self):
- self.start()
-
- def check(self):
- conn = TorCtl.connect()
- if conn != None:
- conn.close()
- return True
-
- return False
-
-
- def start(self):
- if not os.path.exists(torrc):
- raise OSError("torrc doesn't exist (%s)" % torrc)
-
- tor_cmd = ["tor", "-f", torrc]
-
- torproc = ThreadProc(tor_cmd)
- torproc.run()
-
- bootstrap_line = re.compile("Bootstrapped 100%: ")
-
- while True:
- if torproc.proc == None:
- time.sleep(1)
- continue
-
- init_line = torproc.proc.stdout.readline().strip()
-
- if not init_line:
- torproc.proc.kill()
- return False
-
- if bootstrap_line.search(init_line):
- break
-
- return True
-
- def stop(self):
- if not self.check():
- return
-
- conn = TorCtl.connect()
- if conn != None:
- conn.send_signal("SHUTDOWN")
- conn.close()
-
-t = Tor()
diff --git a/old-to-be-ported-code/ooni/plugins/torrc b/old-to-be-ported-code/ooni/plugins/torrc
deleted file mode 100644
index b9ffc80..0000000
--- a/old-to-be-ported-code/ooni/plugins/torrc
+++ /dev/null
@@ -1,9 +0,0 @@
-SocksPort 9050
-ControlPort 9051
-VirtualAddrNetwork 10.23.47.0/10
-AutomapHostsOnResolve 1
-TransPort 9040
-TransListenAddress 127.0.0.1
-DNSPort 5353
-DNSListenAddress 127.0.0.1
-
diff --git a/old-to-be-ported-code/ooni/plugooni.py b/old-to-be-ported-code/ooni/plugooni.py
deleted file mode 100644
index 17f17b3..0000000
--- a/old-to-be-ported-code/ooni/plugooni.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-#
-# Plugooni, ooni plugin module for loading plgoo files.
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-
-import sys
-import os
-
-import imp, pkgutil, inspect
-
-class Plugoo:
- def __init__(self, name, plugin_type, paranoia, author):
- self.name = name
- self.author = author
- self.type = plugin_type
- self.paranoia = paranoia
-
- """
- Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
- Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
- """
- def get_tests_by_filter(self, filters, plugins):
- ret_functions = []
-
- for plugin in plugins:
- for function_ptr in dir(plugin):
- if function_ptr.endswith(filters):
- ret_functions.append((plugin,function_ptr))
- return ret_functions
-
- """
- Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
- 'dirty' and 'failed'.
- Run the tests and print 'clean','dirty' or 'failed' according to the test result.
- """
- def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
- for test in tests:
- filter_result = getattr(test[0], test[1])(self)
- if filter_result == True:
- print test[1] + ": " + clean
- elif filter_result == None:
- print test[1] + ": " + failed
- else:
- print test[1] + ": " + dirty
-
- """
- Find all the tests belonging to plgoo 'self' and run them.
- We know the tests when we see them because they end in 'filter'.
- """
- def run_plgoo_tests(self, filter):
- for function_ptr in dir(self):
- if function_ptr.endswith(filter):
- getattr(self, function_ptr)()
-
-PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
-RESERVED_NAMES = [ "skel_plgoo" ]
-
-class Plugooni():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.loadall = True
- self.plugin_name = args.plugin_name
- self.listfile = args.listfile
-
- self.plgoo_found = False
-
- # Print all the plugoons to stdout.
- def list_plugoons(self):
- print "Plugooni list:"
- for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
- if name not in RESERVED_NAMES:
- print "\t%s" %(name.split("_")[0])
-
- # Return name of the plgoo class of a plugin.
- # We know because it always ends with "Plugin".
- def get_plgoo_class(self,plugin):
- for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
- if memb.__name__.endswith("Plugin"):
- return memb
-
- # This function is responsible for loading and running the plugoons
- # the user wants to run.
- def run(self, command_object):
- print "Plugooni: the ooni plgoo plugin module loader"
-
- # iterate all modules
- for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
- # see if this module should be loaded
- if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
- self.plgoo_found = True # we found at least one plgoo!
-
- file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
- # load module
- plugin = imp.load_module(name, file, pathname, desc)
- # instantiate plgoo class and call its ooni_main()
- self.get_plgoo_class(plugin)().ooni_main(command_object)
-
- # if we couldn't find the plgoo; whine to the user
- if self.plgoo_found is False:
- print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/ooni/transparenthttp.py b/old-to-be-ported-code/ooni/transparenthttp.py
deleted file mode 100644
index 19cc5d0..0000000
--- a/old-to-be-ported-code/ooni/transparenthttp.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-#
-# Captive Portal Detection With Multi-Vendor Emulation
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs multiple tests that match specific vendor
-# mitm proxies
-
-import sys
-import ooni.http
-import ooni.report
-
-class TransparentHTTPProxy():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.logger = ooni.report.Log().logger
-
- def TransparentHTTPProxy_Tests(self):
- print "Transparent HTTP Proxy:"
- filter_name = "_TransparentHTTP_Tests"
- tests = [ooni.http]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
-
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
1
0
[ooni-probe/master] * Moved the /assets directory to /lists. I find that seeing a directory named
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 7a34cb4bb9dd2d0f6b2d6deadb459501d72cecf5
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sat Nov 3 01:20:51 2012 +0000
* Moved the /assets directory to /lists. I find that seeing a directory named
"assets" does not immediately inform me that it is full of txt files with
lists of keywords, servers, hostnames and such to use as test inputs. Not
that "lists" is a final decision, but I think it's a little better. Feel
free to come up with something better! :)
---
lists/captive_portal_tests.txt.good | 4 +
lists/cctld.txt | 511 ++++++++++++++++++++++
lists/dns_servers.txt.bak | 6 +
lists/dns_servers.txt.bak2 | 1 +
lists/example_exp_list.txt | 3 +
lists/major_isp_dns_servers.txt | 796 +++++++++++++++++++++++++++++++++++
lists/short_hostname_list.txt | 7 +
lists/tld-list-cc.txt | 511 ++++++++++++++++++++++
lists/tld-list-mozilla.txt | 5 +
lists/top-1m.txt.bak2 | 11 +
10 files changed, 1855 insertions(+), 0 deletions(-)
diff --git a/lists/captive_portal_tests.txt.good b/lists/captive_portal_tests.txt.good
new file mode 100644
index 0000000..1bd016f
--- /dev/null
+++ b/lists/captive_portal_tests.txt.good
@@ -0,0 +1,4 @@
+
+http://ooni.nu, Open Observatory of Network Interference, 200
+http://www.patternsinthevoid.net/2CDB8B35pub.asc, mQINBE5qkHABEADVnasCm9w9hUff1E4iKnzcAdp4lx6XU5USmYdwKg2RQt2VFqWQ, 200
+http://www.google.com, Search the world's information, 200
diff --git a/lists/cctld.txt b/lists/cctld.txt
new file mode 100644
index 0000000..57e0cc8
--- /dev/null
+++ b/lists/cctld.txt
@@ -0,0 +1,511 @@
+.ac = Ascension Island
+
+.ad = Andorra
+
+.ae = United Arab Emirates
+
+.af = Afghanistan
+
+.ag = Antigua and Barbuda
+
+.ai = Anguilla
+
+.al = Albania
+
+.am = Armenia
+
+.an = Netherlands Antilles
+
+.ao = Angola
+
+.aq = Antarctica - no registrar
+
+.ar = Argentina
+
+.as = American Samoa
+
+.at = Austria
+
+.au = Australia
+
+.aw = Aruba - no registrar
+
+.ax = Aland Islands
+
+.az = Azerbaijan - no registrar
+
+.ba = Bosnia and Herzegovina
+
+.bb = Barbados
+
+.bd = Bangladesh - no registrar
+
+.be = Belgium
+
+.bf = Burkina Faso - no registrar
+
+.bg = Bulgaria
+
+.bh = Bahrain
+
+.bi = Burundi
+
+.bj = Benin ... (little info) DETAILS
+
+.bm = Bermuda
+
+.bn = Brunei Darussalam
+
+.bo = Bolivia
+
+.br = Brazil
+
+.bs = Bahamas
+
+.bt = Bhutan
+
+.bv = Bouvet Island - not in use
+
+.bw = Botswana - no registrar
+
+.by = Belarus
+
+.bz = Belize
+
+.ca = Canada
+
+.cc = Cocos (Keeling) Islands
+
+.cd = The Democratic Republic of the Congo
+
+.cf = Central African Republic - no registrar
+
+.cg = Republic of Congo
+
+.ch = Switzerland
+
+.ci = Cote d'Ivoire
+
+.ck = Cook Islands
+
+.cl = Chile
+
+.cm = Cameroon - no registrar - wildcarded
+
+.cn = China
+
+.co = Colombia
+
+.cr = Costa Rica
+
+.cs = (former) Serbia and Montenegro - no registrar - see: .me
+(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
+
+.cu = Cuba - no registrar
+
+.cv = Cape Verde - no registrar
+
+.cx = Christmas Island
+
+.cy = Cyprus
+
+.cz = Czech Republic
+
+.dd = East Germany (obsolete)
+
+.de = Germany
+
+.dj = Djibouti - no information
+
+.dk = Denmark
+
+.dm = Dominica
+
+.do = Dominican Republic
+
+.dz = Algeria - no registrar
+
+.ec = Ecuador
+
+.ee = Estonia
+
+.eg = Egypt - DETAILS
+
+.eh = Western Sahara - no registrar
+
+.er = Eritrea - no registrar
+
+.es = Spain
+
+.et = Ethiopia
+
+.eu = European Union - DETAILS
+
+.fi = Finland
+
+.fj = Fiji
+
+.fk = Falkland Islands (Malvinas)
+
+.fm = Micronesia, Federal State of
+
+.fo = Faroe Islands
+
+.fr = France
+
+.ga = Gabon - no registrar
+
+.gb = Great Britain (United Kingdom) - reserved, see .uk
+
+.gd = Grenada
+
+.ge = Georgia
+
+.gf = French Guiana
+
+.gg = Guernsey
+
+.gh = Ghana
+
+.gi = Gibraltar
+
+.gl = Greenland
+
+.gm = Gambia
+
+.gn = Guinea
+
+.gp = Guadeloupe - no information
+
+.gq = Equatorial Guinea - no information
+
+.gr = Greece
+
+.gs = South Georgia and the
+South Sandwich Islands
+
+.gt = Guatemala
+
+.gu = Guam
+
+.gw = Guinea-Bissau - no registrar
+
+.gy = Guyana - no registrar
+
+.hk = Hong Kong
+
+.hm = Heard and McDonald Islands
+
+.hn = Honduras
+
+.hr = Croatia/Hrvatska
+
+.ht = Haiti - no registrar
+
+.hu = Hungary
+
+.id = Indonesia - no information
+
+.ie = Ireland
+
+.il = Israel
+
+.im = Isle of Man
+
+.in = India
+
+.io = British Indian Ocean Territory
+
+.iq = Iraq - no registrar
+
+.ir = Islamic Republic of Iran
+
+.is = Iceland
+
+.it = Italy
+
+.je = Jersey
+
+.jm = Jamaica - no registrar
+
+.jo = Jordan
+
+.jp = Japan
+
+.ke = Kenya
+
+.kg = Kyrgyzstan - no registrar
+
+.kh = Cambodia
+
+.ki = Kiribati
+
+.km = Comoros
+
+.kn = Saint Kitts and Nevis - no registrar
+
+.kp = Democratic People's Republic of Korea
+(North) - no registrar
+
+.kr = Republic of Korea (South)
+
+.kw = Kuwait - no registrar
+
+.ky = Cayman Islands
+
+.kz = Kazakhstan
+
+.la = Lao People's Democratic Republic (Laos)
+... DETAILS
+
+.lb = Lebanon
+
+.lc = Saint Lucia
+
+.li = Liechtenstein
+
+.lk = Sri Lanka
+
+.lr = Liberia
+
+.ls = Lesotho - no registrar
+
+.lt = Lithuania
+
+.lu = Luxembourg
+
+.lv = Latvia
+
+.ly = Libyan Arab Jamahiriya (Libya)
+
+.ma = Morocco
+
+.mc = Monaco
+
+.md = Moldova
+
+.me = Montenegro
+
+.mg = Madagascar
+
+.mh = Marshall Islands
+
+.mk = Macedonia
+
+.ml = Mali - no information
+
+.mm = Myanmar (formerly Burma) - no registrar
+
+.mn = Mongolia
+
+.mo = Macau
+
+.mp = Northern Mariana Islands
+
+.mq = Martinique - no information
+
+.mr = Mauritania
+
+.ms = Montserrat
+
+.mt = Malta
+
+.mu = Mauritius
+
+.mv = Maldives - no registrar
+
+.mw = Malawi
+
+.mx = Mexico
+
+.my = Malaysia
+
+.mz = Mozambique - no registrar
+
+.na = Namibia
+
+.nc = New Caledonia
+
+.ne = Niger - no information
+
+.nf = Norfolk Island
+
+.ng = Nigeria
+
+.ni = Nicaragua
+
+.nl = Netherlands
+
+.no = Norway
+
+.np = Nepal
+
+.nr = Nauru
+
+.nu = Niue
+
+.nz = New Zealand
+
+.om = Oman - Omantel.net.om not functioning
+
+.pa = Panama
+
+.pe = Peru
+
+.pf = French Polynesia - no registrar
+
+.pg = Papua New Guinea - no registrar
+
+.ph = Philippines
+
+.pk = Pakistan
+
+.pl = Poland
+
+.pm = Saint Pierre and Miquelon - not available
+
+.pn = Pitcairn Island
+
+.pr = Puerto Rico
+
+.ps = Palestinian Territories
+
+.pt = Portugal
+
+.pw = Palau
+
+.py = Paraguay
+
+.qa = Qatar
+
+.re = Reunion Island
+
+.ro = Romania
+
+.rs = Serbia - no registrar
+
+.ru = Russian Federation
+
+.rw = Rwanda
+
+.sa = Saudi Arabia
+
+.sb = Solomon Islands
+
+.sc = Seychelles
+
+.sd = Sudan
+
+.se = Sweden
+
+.sg = Singapore
+
+.sh = Saint Helena
+
+.si = Slovenia
+
+.sj = Svalbard and Jan Mayen Islands - not in use
+
+.sk = Slovak Republic
+
+.sl = Sierra Leone
+
+.sm = San Marino
+
+.sn = Senegal - no registrar
+
+.so = Somalia - no registrar
+
+.sr = Suriname
+
+.st = Sao Tome and Principe
+
+.su = Soviet Union
+
+.sv = El Salvador
+
+.sy = Syrian Arab Republic
+
+.sz = Swaziland
+
+.tc = Turks and Caicos Islands - no registrar
+
+.td = Chad - no registrar
+
+.tf = French Southern Territories - no registrar
+
+.tg = Togo
+
+.th = Thailand
+
+.tj = Tajikistan
+
+.tk = Tokelau
+
+.tl = Timor-Leste
+
+.tm = Turkmenistan
+
+.tn = Tunisia
+
+.to = Tonga
+
+.tp = East Timor - Closed. See: Timor-Leste
+
+.tr = Turkey
+
+.tt = Trinidad and Tobago
+
+.tv = Tuvalu
+
+.tw = Taiwan
+
+.tz = Tanzania
+
+.ua = Ukraine
+
+.ug = Uganda
+
+.uk = United Kingdom
+
+.um = United States Minor Outlying Islands
+- Withdrawn, no domains exist.
+
+.us = United States (USA)
+
+.uy = Uruguay
+
+.uz = Uzbekistan
+
+.va = Holy See (Vatican City State)- no registrar
+
+.vc = Saint Vincent and the Grenadines
+
+.ve = Venezuela
+
+.vg = British Virgin Islands
+
+.vi = U.S. Virgin Islands
+
+.vn = Vietnam
+
+.vu = Vanuatu
+
+.wf = Wallis and Futuna Islands - no registrar
+
+.ws = Western Samoa
+
+.ye = Yemen - no registrar
+
+.yt = Mayotte - no registrar
+
+.yu = Yugoslavia Withdrawn in favor of .me and .rs
+
+.za = South Africa
+
+.zm = Zambia - no registrar
+
+.zr = Zaire - Obsolete
+now: The Democratic Republic of the Congo (.cd)
+
+.zw = Zimbabwe - no registrar
diff --git a/lists/dns_servers.txt.bak b/lists/dns_servers.txt.bak
new file mode 100644
index 0000000..844e8d5
--- /dev/null
+++ b/lists/dns_servers.txt.bak
@@ -0,0 +1,6 @@
+209.244.0.3
+209.244.0.4
+208.67.222.222
+208.67.220.220
+156.154.70.1
+156.154.71.1
diff --git a/lists/dns_servers.txt.bak2 b/lists/dns_servers.txt.bak2
new file mode 100644
index 0000000..0c4b6f6
--- /dev/null
+++ b/lists/dns_servers.txt.bak2
@@ -0,0 +1 @@
+192.168.1.1
diff --git a/lists/example_exp_list.txt b/lists/example_exp_list.txt
new file mode 100644
index 0000000..42ab976
--- /dev/null
+++ b/lists/example_exp_list.txt
@@ -0,0 +1,3 @@
+86.59.30.36
+38.229.72.14
+38.229.72.16
diff --git a/lists/major_isp_dns_servers.txt b/lists/major_isp_dns_servers.txt
new file mode 100644
index 0000000..36b8098
--- /dev/null
+++ b/lists/major_isp_dns_servers.txt
@@ -0,0 +1,796 @@
+######################################
+## ISP DNS SERVERS BY COUNTRY
+######################################
+## USA
+######################################
+##
+## AT&T
+68.94.156.1
+68.94.157.1
+##
+## ACS Alaska
+209.193.4.7
+209.112.160.2
+##
+## AOL
+205.188.146.145
+##
+## Century Link
+207.14.235.234
+67.238.98.162
+74.4.19.187
+##
+## Charter
+24.296.64.53
+##
+## Cincinnati Bell, ZoomTown
+216.68.4.10
+216.68.5.10
+##
+## Cincinnati Bell, Fuze
+216.68.1.100
+216.68.2.100
+##
+## Comcast, General DNS Servers
+## West Coast
+68.87.85.98
+2001:558:1004:6:68:87:85:98
+## East Coast
+68.87.64.146
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Albuquerque
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Atlanta
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Augusta
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Battle Creek
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+##
+## Comcast, Charleston
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Chattanooga
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Chesterfield
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, Chicago
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Colorado
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Connecticut
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Dallas
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, East Tennessee
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Elyria
+68.87.75.194
+68.87.64.146
+2001:558:1001:C:68:87:75:194
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Fort Wayne
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Fresno
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Hattiesburg-Laurel
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Huntsville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Illinois
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Independence
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Indianapolis
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Jacksonville
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Knoxville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Lake County
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Little Rock
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Los Angeles
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Massachusetts
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Meridian
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+## Comcast, Miami
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Michigan
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+## Comcast, Minnesota
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+##
+## Comcast, Mobile
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Muncie
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Naples
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Nashville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, New England
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Olathe
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Oregon
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Paducah
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Panama City
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Pennsylvania
+68.87.75.194
+68.87.64.146
+2001:558:1001:C:68:87:75:194
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Philadelphia
+68.87.64.146
+68.87.75.194
+2001:558:1002:B:68:87:64:146
+2001:558:1001:C:68:87:75:194
+##
+## Comcast, Pima
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Richmond
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, Sacramento
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, San Francisco Bay Area
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Savannah
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, South Bend
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Spokane
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Stockton
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Tallahassee
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Texas
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Tuscaloosa
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Utah
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Washington
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Washington DC
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, West Florida
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Earthlink
+207.69.188.185
+207.69.188.186
+207.69.188.187
+##
+############################
+## UK
+############################
+##
+## AAISP
+217.169.20.20
+217.169.20.21
+2001:8b0::2020
+2001:8b0::2021
+##
+## AOL Broadband
+64.12.51.132
+149.174.221.8
+205.188.157.232
+205.188.146.145
+##
+## BE Unlimited
+87.194.0.51
+87.194.0.52
+87.194.0.66
+87.194.0.67
+##
+## BT Broadband
+62.6.40.178
+62.6.40.162
+194.72.9.38
+194.72.9.34
+194.72.0.98
+194.72.0.114
+194.74.65.68
+194.74.65.69
+##
+## Bulldog Broadband North
+212.158.248.5
+212.158.248.6
+##
+## Bulldog Broadband South
+83.146.21.5
+83.146.21.6
+##
+## Bytel
+80.76.204.35
+80.76.200.69
+##
+## Clara.net
+195.8.69.7
+195.8.69.12
+##
+## Datanet
+80.68.34.6
+77.241.177.2
+80.68.34.8
+##
+## Demon Internet
+158.152.1.58
+158.152.1.43
+##
+## Eclipse Internet
+212.104.130.9
+212.104.130.65
+##
+## Entanet
+195.74.102.146
+195.74.102.147
+##
+## Exa Networks
+82.219.4.24
+82.219.4.25
+##
+## Fast
+78.143.192.10
+78.143.192.20
+##
+## Freedom 2 Surf
+194.106.56.6
+194.106.33.42
+##
+## IDNet
+212.69.36.3
+212.69.36.2
+212.69.40.2
+##
+## Karoo
+212.50.160.100
+213.249.130.100
+##
+## Madasafish
+80.189.94.2
+80.189.92.2
+##
+## Merula
+217.146.97.10
+217.146.105.2
+##
+## Metronet
+213.162.97.65
+213.162.97.66
+##
+## Namesco
+195.7.224.57
+195.7.224.143
+##
+## NewNet
+212.87.64.10
+212.87.64.11
+##
+## Nildram
+213.208.106.212
+213.208.106.213
+##
+## O2
+87.194.0.51
+87.194.0.52
+87.194.0.66
+87.194.0.67
+##
+## Onetel
+212.67.96.129
+212.67.96.130
+##
+## Onyx
+194.176.65.5
+195.97.231.31
+##
+## Oosha
+213.190.161.254
+213.190.161.250
+213.190.160.9
+##
+## Orange
+195.92.195.94
+195.92.195.95
+##
+## Pipex
+62.241.160.200
+158.43.240.4
+212.74.112.66
+212.74.112.67
+##
+## PlusNet
+212.159.13.49
+212.159.13.50
+212.159.6.9
+212.159.6.10
+##
+## Powernet
+195.60.0.1
+195.60.0.5
+##
+## Prodigy
+198.83.19.241
+198.83.19.244
+207.115.59.241
+207.115.59.244
+##
+## SAQ
+195.2.130.209
+195.2.156.67
+##
+## Scotnet
+217.16.223.30
+217.16.223.31
+##
+## Sky Broadband
+87.86.189.16
+87.86.189.17
+195.40.1.36
+##
+## Skymarket
+212.84.173.66
+212.84.173.82
+##
+## Supanet
+213.40.66.126
+213.40.130.126
+##
+## TalkTalk
+62.24.199.13
+62.24.199.23
+62.24.128.18
+62.24.128.17
+##
+## Tesco
+194.168.4.100
+194.168.8.100
+##
+## Timewarp
+217.149.108.10
+217.149.108.11
+##
+## Timico
+195.54.225.10
+195.54.226.10
+##
+## Tiscali
+212.74.112.66
+212.74.112.67
+80.255.252.50
+80.255.252.58
+##
+## Topletter
+77.95.114.100
+77.95.112.1
+##
+## UK Online
+212.135.1.36
+195.40.1.36
+##
+## Utility Warehouse
+62.24.128.17
+62.24.128.18
+##
+## UTV Internet
+194.46.192.141
+194.46.192.142
+##
+## Virgin Media
+194.168.4.100
+194.168.8.100
+##
+## VISPA
+62.24.228.9
+62.24.228.10
+##
+## Zen Internet
+212.23.3.100
+212.23.6.100
+##
+####################################
+## NEW ZEALAND
+####################################
+##
+## Xtra
+202.27.158.40
+202.27.156.72
+##
+####################################
+## AUSTRALIA
+####################################
+##
+## AANet, Victoria
+203.24.100.125
+203.123.94.40
+##
+## AANet, South Australia
+203.24.100.125
+203.123.69.15
+##
+## AANet, Western Australia
+203.24.100.125
+202.76.136.40
+##
+## AANet, Queensland
+203.24.100.125
+202.76.170.40
+##
+## AANet, New South Wales
+203.24.100.125
+203.123.69.15
+##
+## AAPT, New South Wales
+192.189.54.33
+203.8.183.1
+##
+## AAPT, Victoria
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Queensland
+192.189.54.33
+203.8.183.1
+##
+## AAPT, Tasmania
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Australian Capital Territory
+192.189.54.33
+203.8.183.1
+##
+## AAPT, South Australia
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Northern Territory
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Western Australia
+192.189.54.17
+203.8.183.1
+##
+## Adam
+122.49.191.252
+122.49.191.253
+##
+## Amnet
+203.161.127.1
+203.153.224.42
+##
+## Comcen
+203.23.236.66
+203.23.236.69
+##
+## Dodo
+203.220.32.121
+203.220.32.122
+203.220.32.123
+##
+## Exetel
+220.233.0.4
+220.233.0.3
+##
+## iiNet
+203.0.178.191
+203.0.178.191
+##
+## Internode
+192.231.203.132
+192.231.203.3
+2001:44b8:1::1
+2001:44b8:2::2
+##
+## iPrimus, New South Wales
+203.134.64.66
+203.134.65.66
+##
+## iPrimus, Victoria
+203.134.24.70
+203.134.26.70
+##
+## iPrimus, Queensland
+203.134.12.90
+203.134.102.90
+##
+## iPrimus, Western Australia
+203.134.17.90
+211.26.25.90
+##
+## Netspace
+210.15.254.240
+210.15.254.241
+##
+## Optus
+211.29.132.12
+198.142.0.51
+##
+## People Telecom, New South Wales
+202.154.123.97
+218.214.227.3
+##
+## People Telecom, Northern Territory
+202.154.92.5
+218.214.228.97
+##
+## People Telecom, Queensland
+218.214.227.3
+202.154.123.97
+##
+## People Telecom, South Australia
+218.214.228.97
+218.214.17.1
+##
+## People Telecom, Victoria
+218.214.17.1
+218.214.228.97
+##
+## People Telecom, Western Australia
+202.154.92.5
+218.214.228.97
+##
+## Spin Internet
+203.23.236.66
+203.23.236.69
+##
+## Telstra BigPond, New South Wales
+61.9.194.49
+61.9.195.193
+##
+## Telstra BigPond, Victoria
+61.9.133.193
+61.9.134.49
+##
+## Telstra BigPond, Queensland
+61.9.211.33
+61.9.211.1
+##
+## Telstra BigPond, Tasmania
+61.9.188.33
+61.9.134.49
+##
+## Telstra BigPond, Australian Capital Territory
+61.9.207.1
+61.9.195.193
+##
+## Telstra BigPond, South Australia
+61.9.226.33
+61.9.194.49
+##
+## Telstra BigPond, Northern Territory
+61.9.226.33
+61.9.194.49
+##
+## Telstra BigPond, Western Australia
+61.9.242.33
+61.9.226.33
+##
+## TPG
+203.12.160.35
+203.12.160.36
+203.12.160.37
+##
+## Westnet
+203.21.20.20
+203.10.1.9
+########################################
diff --git a/lists/short_hostname_list.txt b/lists/short_hostname_list.txt
new file mode 100644
index 0000000..f13c702
--- /dev/null
+++ b/lists/short_hostname_list.txt
@@ -0,0 +1,7 @@
+torproject.org
+google.com
+ooni.nu
+torrentz.eu
+anarchyplanet.org
+riseup.net
+indymedia.org
diff --git a/lists/tld-list-cc.txt b/lists/tld-list-cc.txt
new file mode 100644
index 0000000..57e0cc8
--- /dev/null
+++ b/lists/tld-list-cc.txt
@@ -0,0 +1,511 @@
+.ac = Ascension Island
+
+.ad = Andorra
+
+.ae = United Arab Emirates
+
+.af = Afghanistan
+
+.ag = Antigua and Barbuda
+
+.ai = Anguilla
+
+.al = Albania
+
+.am = Armenia
+
+.an = Netherlands Antilles
+
+.ao = Angola
+
+.aq = Antarctica - no registrar
+
+.ar = Argentina
+
+.as = American Samoa
+
+.at = Austria
+
+.au = Australia
+
+.aw = Aruba - no registrar
+
+.ax = Aland Islands
+
+.az = Azerbaijan - no registrar
+
+.ba = Bosnia and Herzegovina
+
+.bb = Barbados
+
+.bd = Bangladesh - no registrar
+
+.be = Belgium
+
+.bf = Burkina Faso - no registrar
+
+.bg = Bulgaria
+
+.bh = Bahrain
+
+.bi = Burundi
+
+.bj = Benin ... (little info) DETAILS
+
+.bm = Bermuda
+
+.bn = Brunei Darussalam
+
+.bo = Bolivia
+
+.br = Brazil
+
+.bs = Bahamas
+
+.bt = Bhutan
+
+.bv = Bouvet Island - not in use
+
+.bw = Botswana - no registrar
+
+.by = Belarus
+
+.bz = Belize
+
+.ca = Canada
+
+.cc = Cocos (Keeling) Islands
+
+.cd = The Democratic Republic of the Congo
+
+.cf = Central African Republic - no registrar
+
+.cg = Republic of Congo
+
+.ch = Switzerland
+
+.ci = Cote d'Ivoire
+
+.ck = Cook Islands
+
+.cl = Chile
+
+.cm = Cameroon - no registrar - wildcarded
+
+.cn = China
+
+.co = Colombia
+
+.cr = Costa Rica
+
+.cs = (former) Serbia and Montenegro - no registrar - see: .me
+(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
+
+.cu = Cuba - no registrar
+
+.cv = Cape Verde - no registrar
+
+.cx = Christmas Island
+
+.cy = Cyprus
+
+.cz = Czech Republic
+
+.dd = East Germany (obsolete)
+
+.de = Germany
+
+.dj = Djibouti - no information
+
+.dk = Denmark
+
+.dm = Dominica
+
+.do = Dominican Republic
+
+.dz = Algeria - no registrar
+
+.ec = Ecuador
+
+.ee = Estonia
+
+.eg = Egypt - DETAILS
+
+.eh = Western Sahara - no registrar
+
+.er = Eritrea - no registrar
+
+.es = Spain
+
+.et = Ethiopia
+
+.eu = European Union - DETAILS
+
+.fi = Finland
+
+.fj = Fiji
+
+.fk = Falkland Islands (Malvinas)
+
+.fm = Micronesia, Federal State of
+
+.fo = Faroe Islands
+
+.fr = France
+
+.ga = Gabon - no registrar
+
+.gb = Great Britain (United Kingdom) - reserved, see .uk
+
+.gd = Grenada
+
+.ge = Georgia
+
+.gf = French Guiana
+
+.gg = Guernsey
+
+.gh = Ghana
+
+.gi = Gibraltar
+
+.gl = Greenland
+
+.gm = Gambia
+
+.gn = Guinea
+
+.gp = Guadeloupe - no information
+
+.gq = Equatorial Guinea - no information
+
+.gr = Greece
+
+.gs = South Georgia and the
+South Sandwich Islands
+
+.gt = Guatemala
+
+.gu = Guam
+
+.gw = Guinea-Bissau - no registrar
+
+.gy = Guyana - no registrar
+
+.hk = Hong Kong
+
+.hm = Heard and McDonald Islands
+
+.hn = Honduras
+
+.hr = Croatia/Hrvatska
+
+.ht = Haiti - no registrar
+
+.hu = Hungary
+
+.id = Indonesia - no information
+
+.ie = Ireland
+
+.il = Israel
+
+.im = Isle of Man
+
+.in = India
+
+.io = British Indian Ocean Territory
+
+.iq = Iraq - no registrar
+
+.ir = Islamic Republic of Iran
+
+.is = Iceland
+
+.it = Italy
+
+.je = Jersey
+
+.jm = Jamaica - no registrar
+
+.jo = Jordan
+
+.jp = Japan
+
+.ke = Kenya
+
+.kg = Kyrgyzstan - no registrar
+
+.kh = Cambodia
+
+.ki = Kiribati
+
+.km = Comoros
+
+.kn = Saint Kitts and Nevis - no registrar
+
+.kp = Democratic People's Republic of Korea
+(North) - no registrar
+
+.kr = Republic of Korea (South)
+
+.kw = Kuwait - no registrar
+
+.ky = Cayman Islands
+
+.kz = Kazakhstan
+
+.la = Lao People's Democratic Republic (Laos)
+... DETAILS
+
+.lb = Lebanon
+
+.lc = Saint Lucia
+
+.li = Liechtenstein
+
+.lk = Sri Lanka
+
+.lr = Liberia
+
+.ls = Lesotho - no registrar
+
+.lt = Lithuania
+
+.lu = Luxembourg
+
+.lv = Latvia
+
+.ly = Libyan Arab Jamahiriya (Libya)
+
+.ma = Morocco
+
+.mc = Monaco
+
+.md = Moldova
+
+.me = Montenegro
+
+.mg = Madagascar
+
+.mh = Marshall Islands
+
+.mk = Macedonia
+
+.ml = Mali - no information
+
+.mm = Myanmar (formerly Burma) - no registrar
+
+.mn = Mongolia
+
+.mo = Macau
+
+.mp = Northern Mariana Islands
+
+.mq = Martinique - no information
+
+.mr = Mauritania
+
+.ms = Montserrat
+
+.mt = Malta
+
+.mu = Mauritius
+
+.mv = Maldives - no registrar
+
+.mw = Malawi
+
+.mx = Mexico
+
+.my = Malaysia
+
+.mz = Mozambique - no registrar
+
+.na = Namibia
+
+.nc = New Caledonia
+
+.ne = Niger - no information
+
+.nf = Norfolk Island
+
+.ng = Nigeria
+
+.ni = Nicaragua
+
+.nl = Netherlands
+
+.no = Norway
+
+.np = Nepal
+
+.nr = Nauru
+
+.nu = Niue
+
+.nz = New Zealand
+
+.om = Oman - Omantel.net.om not functioning
+
+.pa = Panama
+
+.pe = Peru
+
+.pf = French Polynesia - no registrar
+
+.pg = Papua New Guinea - no registrar
+
+.ph = Philippines
+
+.pk = Pakistan
+
+.pl = Poland
+
+.pm = Saint Pierre and Miquelon - not available
+
+.pn = Pitcairn Island
+
+.pr = Puerto Rico
+
+.ps = Palestinian Territories
+
+.pt = Portugal
+
+.pw = Palau
+
+.py = Paraguay
+
+.qa = Qatar
+
+.re = Reunion Island
+
+.ro = Romania
+
+.rs = Serbia - no registrar
+
+.ru = Russian Federation
+
+.rw = Rwanda
+
+.sa = Saudi Arabia
+
+.sb = Solomon Islands
+
+.sc = Seychelles
+
+.sd = Sudan
+
+.se = Sweden
+
+.sg = Singapore
+
+.sh = Saint Helena
+
+.si = Slovenia
+
+.sj = Svalbard and Jan Mayen Islands - not in use
+
+.sk = Slovak Republic
+
+.sl = Sierra Leone
+
+.sm = San Marino
+
+.sn = Senegal - no registrar
+
+.so = Somalia - no registrar
+
+.sr = Suriname
+
+.st = Sao Tome and Principe
+
+.su = Soviet Union
+
+.sv = El Salvador
+
+.sy = Syrian Arab Republic
+
+.sz = Swaziland
+
+.tc = Turks and Caicos Islands - no registrar
+
+.td = Chad - no registrar
+
+.tf = French Southern Territories - no registrar
+
+.tg = Togo
+
+.th = Thailand
+
+.tj = Tajikistan
+
+.tk = Tokelau
+
+.tl = Timor-Leste
+
+.tm = Turkmenistan
+
+.tn = Tunisia
+
+.to = Tonga
+
+.tp = East Timor - Closed. See: Timor-Leste
+
+.tr = Turkey
+
+.tt = Trinidad and Tobago
+
+.tv = Tuvalu
+
+.tw = Taiwan
+
+.tz = Tanzania
+
+.ua = Ukraine
+
+.ug = Uganda
+
+.uk = United Kingdom
+
+.um = United States Minor Outlying Islands
+- Withdrawn, no domains exist.
+
+.us = United States (USA)
+
+.uy = Uruguay
+
+.uz = Uzbekistan
+
+.va = Holy See (Vatican City State)- no registrar
+
+.vc = Saint Vincent and the Grenadines
+
+.ve = Venezuela
+
+.vg = British Virgin Islands
+
+.vi = U.S. Virgin Islands
+
+.vn = Vietnam
+
+.vu = Vanuatu
+
+.wf = Wallis and Futuna Islands - no registrar
+
+.ws = Western Samoa
+
+.ye = Yemen - no registrar
+
+.yt = Mayotte - no registrar
+
+.yu = Yugoslavia Withdrawn in favor of .me and .rs
+
+.za = South Africa
+
+.zm = Zambia - no registrar
+
+.zr = Zaire - Obsolete
+now: The Democratic Republic of the Congo (.cd)
+
+.zw = Zimbabwe - no registrar
diff --git a/lists/tld-list-mozilla.txt b/lists/tld-list-mozilla.txt
new file mode 100644
index 0000000..7902eee
--- /dev/null
+++ b/lists/tld-list-mozilla.txt
@@ -0,0 +1,5 @@
+--2012-05-19 13:07:53-- https://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_na…
+Resolving mxr.mozilla.org (mxr.mozilla.org) 63.245.215.42
+Connecting to mxr.mozilla.org (mxr.mozilla.org)|63.245.215.42|:443... connected.
+ERROR: The certificate of `mxr.mozilla.org' is not trusted.
+ERROR: The certificate of `mxr.mozilla.org' hasn't got a known issuer.
diff --git a/lists/top-1m.txt.bak2 b/lists/top-1m.txt.bak2
new file mode 100644
index 0000000..293e661
--- /dev/null
+++ b/lists/top-1m.txt.bak2
@@ -0,0 +1,11 @@
+1,torproject.org
+2,google.com
+3,facebook.com
+4,youtube.com
+5,yahoo.com
+6,baidu.com
+7,wikipedia.org
+8,live.com
+9,blogspot.com
+10,twitter.com
+11,qq.com
1
0
03 Nov '12
commit dc3393fef26d2d9b03e6403f46909de00f55bf17
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 17:47:00 2012 +0000
* Moved old code to /old-to-be-ported code.
---
.../old-api/.ropeproject/config.py | 85 +++++
.../old-api/.ropeproject/globalnames | Bin 0 -> 108 bytes
old-to-be-ported-code/old-api/.ropeproject/history | 1 +
.../old-api/.ropeproject/objectdb | Bin 0 -> 741 bytes
old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt | 8 +
old-to-be-ported-code/old-api/chinatrigger.py | 140 ++++++++
old-to-be-ported-code/old-api/daphn3.py | 152 ++++++++
old-to-be-ported-code/old-api/domclass.py | 216 +++++++++++
old-to-be-ported-code/old-api/dropin.cache | 243 +++++++++++++
old-to-be-ported-code/old-api/httpt.py | 94 +++++
old-to-be-ported-code/old-api/tcpconnect.py | 65 ++++
old-to-be-ported-code/old-api/tcpscan.py | 84 +++++
old-to-be-ported-code/very-old/TODO.plgoons | 79 ++++
old-to-be-ported-code/very-old/TO_BE_PORTED | 14 +
old-to-be-ported-code/very-old/ooni-probe.diff | 358 +++++++++++++++++++
old-to-be-ported-code/very-old/ooni/#namecheck.py# | 39 ++
old-to-be-ported-code/very-old/ooni/.DS_Store | Bin 0 -> 15364 bytes
old-to-be-ported-code/very-old/ooni/__init__.py | 12 +
old-to-be-ported-code/very-old/ooni/command.py | 250 +++++++++++++
.../very-old/ooni/dns_poisoning.py | 43 +++
old-to-be-ported-code/very-old/ooni/dnsooni.py | 356 ++++++++++++++++++
old-to-be-ported-code/very-old/ooni/helpers.py | 38 ++
old-to-be-ported-code/very-old/ooni/http.py | 306 ++++++++++++++++
old-to-be-ported-code/very-old/ooni/input.py | 33 ++
old-to-be-ported-code/very-old/ooni/namecheck.py | 39 ++
.../very-old/ooni/plugins/dnstest_plgoo.py | 84 +++++
.../very-old/ooni/plugins/http_plgoo.py | 70 ++++
.../very-old/ooni/plugins/marco_plgoo.py | 377 ++++++++++++++++++++
.../very-old/ooni/plugins/proxy_plgoo.py | 69 ++++
.../very-old/ooni/plugins/simple_dns_plgoo.py | 35 ++
.../very-old/ooni/plugins/tcpcon_plgoo.py | 278 ++++++++++++++
old-to-be-ported-code/very-old/ooni/plugins/tor.py | 80 ++++
old-to-be-ported-code/very-old/ooni/plugins/torrc | 9 +
old-to-be-ported-code/very-old/ooni/plugooni.py | 106 ++++++
.../very-old/ooni/transparenthttp.py | 41 +++
old-to-be-ported-code/very-old/traceroute.py | 108 ++++++
36 files changed, 3912 insertions(+), 0 deletions(-)
diff --git a/old-to-be-ported-code/old-api/.ropeproject/config.py b/old-to-be-ported-code/old-api/.ropeproject/config.py
new file mode 100644
index 0000000..ffebcd4
--- /dev/null
+++ b/old-to-be-ported-code/old-api/.ropeproject/config.py
@@ -0,0 +1,85 @@
+# The default ``config.py``
+
+
+def set_prefs(prefs):
+ """This function is called before opening the project"""
+
+ # Specify which files and folders to ignore in the project.
+ # Changes to ignored resources are not added to the history and
+ # VCSs. Also they are not returned in `Project.get_files()`.
+ # Note that ``?`` and ``*`` match all characters but slashes.
+ # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
+ # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
+ # '.svn': matches 'pkg/.svn' and all of its children
+ # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
+ # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
+ prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
+ '.hg', '.svn', '_svn', '.git']
+
+ # Specifies which files should be considered python files. It is
+ # useful when you have scripts inside your project. Only files
+ # ending with ``.py`` are considered to be python files by
+ # default.
+ #prefs['python_files'] = ['*.py']
+
+ # Custom source folders: By default rope searches the project
+ # for finding source folders (folders that should be searched
+ # for finding modules). You can add paths to that list. Note
+ # that rope guesses project source folders correctly most of the
+ # time; use this if you have any problems.
+ # The folders should be relative to project root and use '/' for
+ # separating folders regardless of the platform rope is running on.
+ # 'src/my_source_folder' for instance.
+ #prefs.add('source_folders', 'src')
+
+ # You can extend python path for looking up modules
+ #prefs.add('python_path', '~/python/')
+
+ # Should rope save object information or not.
+ prefs['save_objectdb'] = True
+ prefs['compress_objectdb'] = False
+
+ # If `True`, rope analyzes each module when it is being saved.
+ prefs['automatic_soa'] = True
+ # The depth of calls to follow in static object analysis
+ prefs['soa_followed_calls'] = 0
+
+ # If `False` when running modules or unit tests "dynamic object
+ # analysis" is turned off. This makes them much faster.
+ prefs['perform_doa'] = True
+
+ # Rope can check the validity of its object DB when running.
+ prefs['validate_objectdb'] = True
+
+ # How many undos to hold?
+ prefs['max_history_items'] = 32
+
+ # Shows whether to save history across sessions.
+ prefs['save_history'] = True
+ prefs['compress_history'] = False
+
+ # Set the number spaces used for indenting. According to
+ # :PEP:`8`, it is best to use 4 spaces. Since most of rope's
+ # unit-tests use 4 spaces it is more reliable, too.
+ prefs['indent_size'] = 4
+
+ # Builtin and c-extension modules that are allowed to be imported
+ # and inspected by rope.
+ prefs['extension_modules'] = []
+
+ # Add all standard c-extensions to extension_modules list.
+ prefs['import_dynload_stdmods'] = True
+
+ # If `True` modules with syntax errors are considered to be empty.
+ # The default value is `False`; When `False` syntax errors raise
+ # `rope.base.exceptions.ModuleSyntaxError` exception.
+ prefs['ignore_syntax_errors'] = False
+
+ # If `True`, rope ignores unresolvable imports. Otherwise, they
+ # appear in the importing namespace.
+ prefs['ignore_bad_imports'] = False
+
+
+def project_opened(project):
+ """This function is called after opening the project"""
+ # Do whatever you like here!
diff --git a/old-to-be-ported-code/old-api/.ropeproject/globalnames b/old-to-be-ported-code/old-api/.ropeproject/globalnames
new file mode 100644
index 0000000..2877ef5
Binary files /dev/null and b/old-to-be-ported-code/old-api/.ropeproject/globalnames differ
diff --git a/old-to-be-ported-code/old-api/.ropeproject/history b/old-to-be-ported-code/old-api/.ropeproject/history
new file mode 100644
index 0000000..fcd9c96
--- /dev/null
+++ b/old-to-be-ported-code/old-api/.ropeproject/history
@@ -0,0 +1 @@
+]q(]q]qe.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/.ropeproject/objectdb b/old-to-be-ported-code/old-api/.ropeproject/objectdb
new file mode 100644
index 0000000..f276839
Binary files /dev/null and b/old-to-be-ported-code/old-api/.ropeproject/objectdb differ
diff --git a/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt b/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
new file mode 100644
index 0000000..f4c0084
--- /dev/null
+++ b/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
@@ -0,0 +1,8 @@
+7/10/2012
+
+All new tests will be moved to the directory /nettests/.
+
+Tests that are in this directory are either here for historical reasons or have
+not yet been properly tested and fully supporting the new API.
+
+A.
diff --git a/old-to-be-ported-code/old-api/chinatrigger.py b/old-to-be-ported-code/old-api/chinatrigger.py
new file mode 100644
index 0000000..cf4bcb3
--- /dev/null
+++ b/old-to-be-ported-code/old-api/chinatrigger.py
@@ -0,0 +1,140 @@
+import random
+import string
+import struct
+import time
+
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, defer
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.scapyproto import ScapyTest
+
+from ooni.lib.txscapy import txsr, txsend
+
+class scapyArgs(usage.Options):
+ optParameters = [['dst', 'd', None, 'Specify the target address'],
+ ['port', 'p', None, 'Specify the target port'],
+ ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
+ ]
+
+class ChinaTriggerTest(ScapyTest):
+ """
+ This test is a OONI based implementation of the C tool written
+ by Philipp Winter to engage chinese probes in active scanning.
+
+ Example of running it:
+ ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
+ """
+ implements(IPlugin, ITest)
+
+ shortName = "chinatrigger"
+ description = "Triggers the chinese probes into scanning"
+ requirements = ['root']
+ options = scapyArgs
+ blocking = False
+
+ receive = True
+ pcapfile = 'example_scapy.pcap'
+ timeout = 5
+
+ def initialize(self, reactor=None):
+ if not self.reactor:
+ from twisted.internet import reactor
+ self.reactor = reactor
+
+ @staticmethod
+ def set_random_servername(pkt):
+ ret = pkt[:121]
+ for i in range(16):
+ ret += random.choice(string.ascii_lowercase)
+ ret += pkt[121+16:]
+ return ret
+
+ @staticmethod
+ def set_random_time(pkt):
+ ret = pkt[:11]
+ ret += struct.pack('!I', int(time.time()))
+ ret += pkt[11+4:]
+ return ret
+
+ @staticmethod
+ def set_random_field(pkt):
+ ret = pkt[:15]
+ for i in range(28):
+ ret += chr(random.randint(0, 256))
+ ret += pkt[15+28:]
+ return ret
+
+ @staticmethod
+ def mutate(pkt, idx):
+ """
+ Slightly changed mutate function.
+ """
+ ret = pkt[:idx-1]
+ mutation = chr(random.randint(0, 256))
+ while mutation == pkt[idx]:
+ mutation = chr(random.randint(0, 256))
+ ret += mutation
+ ret += pkt[idx:]
+ return ret
+
+ @staticmethod
+ def set_all_random_fields(pkt):
+ pkt = ChinaTriggerTest.set_random_servername(pkt)
+ pkt = ChinaTriggerTest.set_random_time(pkt)
+ pkt = ChinaTriggerTest.set_random_field(pkt)
+ return pkt
+
+ def build_packets(self, *args, **kw):
+ """
+ Override this method to build scapy packets.
+ """
+ from scapy.all import IP, TCP
+ pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+ "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+ "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+ "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+ "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+ "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+ "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+ "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+ "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+ "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+ "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+ "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+ "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+ "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+ "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+ "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+ "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+ "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+ "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+ "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+ "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+ "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+ "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+ "\x00\x00"
+
+ pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+ pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+ for x in range(len(pkt)):
+ mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+ pkts.append(mutation)
+ return pkts
+
+ def load_assets(self):
+ if self.local_options:
+ self.dst = self.local_options['dst']
+ self.port = int(self.local_options['port'])
+ if self.local_options['pcap']:
+ self.pcapfile = self.local_options['pcap']
+ if not self.port or not self.dst:
+ pass
+
+ return {}
+
+#chinatrigger = ChinaTriggerTest(None, None, None)
+
diff --git a/old-to-be-ported-code/old-api/daphn3.py b/old-to-be-ported-code/old-api/daphn3.py
new file mode 100644
index 0000000..bf4d60d
--- /dev/null
+++ b/old-to-be-ported-code/old-api/daphn3.py
@@ -0,0 +1,152 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, endpoints
+
+from ooni.plugoo import reports
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+ def connectionMade(self):
+ self.next_state()
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+ protocol = Daphn3ClientProtocol
+ mutator = None
+ steps = None
+ test = None
+
+ def buildProtocol(self, addr):
+ p = self.protocol()
+ p.factory = self
+ p.test = self.test
+
+ if self.steps:
+ p.steps = self.steps
+
+ if not self.mutator:
+ self.mutator = daphn3.Mutator(p.steps)
+
+ else:
+ print "Moving on to next mutation"
+ self.mutator.next()
+
+ p.mutator = self.mutator
+ p.current_state = self.mutator.state()
+ return p
+
+ def clientConnectionFailed(self, reason):
+ print "We failed connecting the the OONIB"
+ print "Cannot perform test. Perhaps it got blocked?"
+ print "Please report this to tor-assistants(a)torproject.org"
+ self.test.result['error'] = ('Failed in connecting to OONIB', reason)
+ self.test.end(d)
+
+ def clientConnectionLost(self, reason):
+ print "Connection Lost."
+
+class daphn3Args(usage.Options):
+ optParameters = [['pcap', 'f', None,
+ 'PCAP to read for generating the YAML output'],
+
+ ['output', 'o', 'daphn3.yaml',
+ 'What file should be written'],
+
+ ['yaml', 'y', None,
+ 'The input file to the test'],
+
+ ['host', 'h', None, 'Target Hostname'],
+ ['port', 'p', None, 'Target port number'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+class daphn3Test(OONITest):
+ implements(IPlugin, ITest)
+
+ shortName = "daphn3"
+ description = "daphn3"
+ requirements = None
+ options = daphn3Args
+ blocking = False
+
+ local_options = None
+
+ steps = None
+
+ def initialize(self):
+ if not self.local_options:
+ self.end()
+ return
+
+ self.factory = Daphn3ClientFactory()
+ self.factory.test = self
+
+ if self.local_options['pcap']:
+ self.tool = True
+
+ elif self.local_options['yaml']:
+ self.steps = daphn3.read_yaml(self.local_options['yaml'])
+
+ else:
+ log.msg("Not enough inputs specified to the test")
+ self.end()
+
+ def runTool(self):
+ import yaml
+ pcap = daphn3.read_pcap(self.local_options['pcap'])
+ f = open(self.local_options['output'], 'w')
+ f.write(yaml.dump(pcap))
+ f.close()
+
+ def control(self, exp_res, args):
+ try:
+ mutation = self.factory.mutator.get(0)
+ self.result['censored'] = False
+ except:
+ mutation = None
+
+ return {'mutation_number': args['mutation'],
+ 'value': mutation}
+
+ def _failure(self, *argc, **kw):
+ self.result['censored'] = True
+ self.result['error'] = ('Failed in connecting', (argc, kw))
+ self.end()
+
+ def experiment(self, args):
+ log.msg("Doing mutation %s" % args['mutation'])
+ self.factory.steps = self.steps
+ host = self.local_options['host']
+ port = int(self.local_options['port'])
+ log.msg("Connecting to %s:%s" % (host, port))
+
+ if self.ended:
+ return
+
+ endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
+ d = endpoint.connect(self.factory)
+ d.addErrback(self._failure)
+ return d
+
+ def load_assets(self):
+ if not self.local_options:
+ return {}
+ if not self.steps:
+ print "Error: No assets!"
+ self.end()
+ return {}
+ mutations = 0
+ for x in self.steps:
+ mutations += len(x['data'])
+ return {'mutation': range(mutations)}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#daphn3test = daphn3Test(None, None, None)
diff --git a/old-to-be-ported-code/old-api/domclass.py b/old-to-be-ported-code/old-api/domclass.py
new file mode 100644
index 0000000..3080c40
--- /dev/null
+++ b/old-to-be-ported-code/old-api/domclass.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+#-*- encoding: utf-8 -*-
+#
+# domclass
+# ********
+#
+# :copyright: (c) 2012 by Arturo Filastò
+# :license: see LICENSE for more details.
+#
+# how this works
+# --------------
+#
+# This classifier uses the DOM structure of a website to determine how similar
+# the two sites are.
+# The procedure we use is the following:
+# * First we parse all the DOM tree of the web page and we build a list of
+# TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
+# (html, a), (a, b), (html, c)).
+#
+# * We then use this information to build a matrix (M) where m[i][j] = P(of
+# transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
+# Note: M is a square matrix that is number_of_tags wide.
+#
+# * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
+#
+# * The corelation between page A and B is given via this formula:
+# correlation = dot_product(e_A, e_B), where e_A and e_B are
+# resepectively the eigenvalues for the probability matrix A and the
+# probability matrix B.
+#
+
+try:
+ import numpy
+except:
+ print "Error numpy not installed!"
+
+import yaml
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.http import HTTPTest
+
+class domclassArgs(usage.Options):
+ optParameters = [['output', 'o', None, 'Output to write'],
+ ['file', 'f', None, 'Corpus file'],
+ ['fileb', 'b', None, 'Corpus file'],
+ ['urls', 'u', None, 'URL List'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+# All HTML4 tags
+# XXX add link to W3C page where these came from
+alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
+ 'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+ 'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
+ 'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+ 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
+ 'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+ 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
+ 'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+ 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+ 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
+
+# Reduced subset of only the most common tags
+commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+ 'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
+ 'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+ 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
+ 'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+ 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
+ 'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+ 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+ 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
+
+# The tags we are intested in using for our analysis
+thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
+ 'H3', 'H4', 'IFRAME ', 'INPUT',
+ 'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
+ 'STYLE', 'TR']
+
+def compute_probability_matrix(dataset):
+ """
+ Compute the probability matrix based on the input dataset.
+
+ :dataset: an array of pairs representing the parent child relationships.
+ """
+ import itertools
+ ret = {}
+ matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
+
+ for data in dataset:
+ x = data[0].upper()
+ y = data[1].upper()
+ try:
+ x = thetags.index(x)
+ except:
+ x = len(thetags)
+
+ try:
+ y = thetags.index(y)
+ except:
+ y = len(thetags)
+
+ matrix[x,y] += 1
+
+ for x in xrange(len(thetags) + 1):
+ possibilities = 0
+ for y in matrix[x]:
+ possibilities += y
+
+ for i in xrange(len(matrix[x])):
+ if possibilities != 0:
+ matrix[x][i] = matrix[x][i]/possibilities
+
+ return matrix
+
+def compute_eigenvalues(matrix):
+ """
+ Returns the eigenvalues of the supplied square matrix.
+
+ :matrix: must be a square matrix and diagonalizable.
+ """
+ return numpy.linalg.eigvals(matrix)
+
+def readDOM(content=None, filename=None):
+ """
+ Parses the DOM of the HTML page and returns an array of parent, child
+ pairs.
+
+ :content: the content of the HTML page to be read.
+
+ :filename: the filename to be read from for getting the content of the
+ page.
+ """
+ from bs4 import BeautifulSoup
+
+ if filename:
+ f = open(filename)
+ content = ''.join(f.readlines())
+ f.close()
+
+ dom = BeautifulSoup(content)
+ couples = []
+ for x in dom.findAll():
+ couples.append((str(x.parent.name), str(x.name)))
+
+ return couples
+
+class domclassTest(HTTPTest):
+ implements(IPlugin, ITest)
+
+ shortName = "domclass"
+ description = "domclass"
+ requirements = None
+ options = domclassArgs
+ blocking = False
+
+ follow_redirects = True
+ #tool = True
+
+ def runTool(self):
+ site_a = readDOM(filename=self.local_options['file'])
+ site_b = readDOM(filename=self.local_options['fileb'])
+ a = {}
+ a['matrix'] = compute_probability_matrix(site_a)
+ a['eigen'] = compute_eigenvalues(a['matrix'])
+
+ self.result['eigenvalues'] = a['eigen']
+ b = {}
+ b['matrix'] = compute_probability_matrix(site_b)
+ b['eigen'] = compute_eigenvalues(b['matrix'])
+
+ #print "A: %s" % a
+ #print "B: %s" % b
+ correlation = numpy.vdot(a['eigen'],b['eigen'])
+ correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
+ correlation = (correlation + 1)/2
+ print "Corelation: %s" % correlation
+ self.end()
+ return a
+
+ def processResponseBody(self, data):
+ site_a = readDOM(data)
+ #site_b = readDOM(self.local_options['fileb'])
+ a = {}
+ a['matrix'] = compute_probability_matrix(site_a)
+ a['eigen'] = compute_eigenvalues(a['matrix'])
+
+
+ if len(data) == 0:
+ self.result['eigenvalues'] = None
+ self.result['matrix'] = None
+ else:
+ self.result['eigenvalues'] = a['eigen']
+ #self.result['matrix'] = a['matrix']
+ #self.result['content'] = data[:200]
+ #b = compute_matrix(site_b)
+ print "A: %s" % a
+ return a['eigen']
+
+ def load_assets(self):
+ if self.local_options:
+ if self.local_options['file']:
+ self.tool = True
+ return {}
+ elif self.local_options['urls']:
+ return {'url': Asset(self.local_options['urls'])}
+ else:
+ self.end()
+ return {}
+ else:
+ return {}
+
+#domclass = domclassTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/dropin.cache b/old-to-be-ported-code/old-api/dropin.cache
new file mode 100755
index 0000000..65c2187
--- /dev/null
+++ b/old-to-be-ported-code/old-api/dropin.cache
@@ -0,0 +1,243 @@
+(dp1
+S'tcpconnect'
+p2
+ccopy_reg
+_reconstructor
+p3
+(ctwisted.plugin
+CachedDropin
+p4
+c__builtin__
+object
+p5
+NtRp6
+(dp7
+S'moduleName'
+p8
+S'ooni.plugins.tcpconnect'
+p9
+sS'description'
+p10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p11
+sS'plugins'
+p12
+(lp13
+g3
+(ctwisted.plugin
+CachedPlugin
+p14
+g5
+NtRp15
+(dp16
+S'provided'
+p17
+(lp18
+ctwisted.plugin
+IPlugin
+p19
+acooni.plugoo.interface
+ITest
+p20
+asS'dropin'
+p21
+g6
+sS'name'
+p22
+S'tcpconnect'
+p23
+sg10
+NsbasbsS'domclass'
+p24
+g3
+(g4
+g5
+NtRp25
+(dp26
+g8
+S'ooni.plugins.domclass'
+p27
+sg10
+Nsg12
+(lp28
+g3
+(g14
+g5
+NtRp29
+(dp30
+g17
+(lp31
+g19
+ag20
+asg21
+g25
+sg22
+S'domclass'
+p32
+sg10
+NsbasbsS'bridget'
+p33
+g3
+(g4
+g5
+NtRp34
+(dp35
+g8
+S'ooni.plugins.bridget'
+p36
+sg10
+Nsg12
+(lp37
+g3
+(g14
+g5
+NtRp38
+(dp39
+g17
+(lp40
+g19
+ag20
+asg21
+g34
+sg22
+S'bridget'
+p41
+sg10
+S"\n XXX fill me in\n\n :ivar config:\n An :class:`ooni.lib.txtorcon.TorConfig` instance.\n :ivar relays:\n A list of all provided relays to test.\n :ivar bridges:\n A list of all provided bridges to test.\n :ivar socks_port:\n Integer for Tor's SocksPort.\n :ivar control_port:\n Integer for Tor's ControlPort.\n :ivar transport:\n String defining the Tor's ClientTransportPlugin, for testing \n a bridge's pluggable transport functionality.\n :ivar tor_binary:\n Path to the Tor binary to use, e.g. '/usr/sbin/tor'\n "
+p42
+sbasbsS'daphn3'
+p43
+g3
+(g4
+g5
+NtRp44
+(dp45
+g8
+S'plugins.daphn3'
+p46
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p47
+sg12
+(lp48
+g3
+(g14
+g5
+NtRp49
+(dp50
+g17
+(lp51
+g19
+ag20
+asg21
+g44
+sg22
+S'daphn3test'
+p52
+sg10
+NsbasbsS'httpt'
+p53
+g3
+(g4
+g5
+NtRp54
+(dp55
+g8
+S'ooni.plugins.httpt'
+p56
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p57
+sg12
+(lp58
+sbsS'chinatrigger'
+p59
+g3
+(g4
+g5
+NtRp60
+(dp61
+g8
+S'plugins.chinatrigger'
+p62
+sg10
+Nsg12
+(lp63
+g3
+(g14
+g5
+NtRp64
+(dp65
+g17
+(lp66
+g19
+ag20
+asg21
+g60
+sg22
+S'chinatrigger'
+p67
+sg10
+S'\n This test is a OONI based implementation of the C tool written\n by Philipp Winter to engage chinese probes in active scanning.\n\n Example of running it:\n ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap\n '
+p68
+sbasbsS'dnstamper'
+p69
+g3
+(g4
+g5
+NtRp70
+(dp71
+g8
+S'ooni.plugins.dnstamper'
+p72
+sg10
+S'\n dnstamper\n *********\n\n This test resolves DNS for a list of domain names, one per line, in the\n file specified in the ooni-config under the setting "dns_experiment". If\n the file is top-1m.txt, the test will be run using Amazon\'s list of top\n one million domains. The experimental dns servers to query should\n be specified one per line in assets/dns_servers.txt.\n\n The test reports censorship if the cardinality of the intersection of\n the query result set from the control server and the query result set\n from the experimental server is zero, which is to say, if the two sets\n have no matching results whatsoever.\n\n NOTE: This test frequently results in false positives due to GeoIP-based\n load balancing on major global sites such as google, facebook, and\n youtube, etc.\n\n :author: Isis Lovecruft, Arturo Filast\xc3\xb2\n :license: see LICENSE for more details\n\n TODO:\n * Finish porting to twisted\n
* Finish the client.Resolver() subclass and test it\n * Use the DNS tests from captiveportal\n * Use plugoo/reports.py for final data\n'
+p73
+sg12
+(lp74
+g3
+(g14
+g5
+NtRp75
+(dp76
+g17
+(lp77
+g19
+ag20
+asg21
+g70
+sg22
+S'dnstamper'
+p78
+sg10
+S'\n XXX fill me in\n '
+p79
+sbasbsS'blocking'
+p80
+g3
+(g4
+g5
+NtRp81
+(dp82
+g8
+S'plugins.blocking'
+p83
+sg10
+Nsg12
+(lp84
+g3
+(g14
+g5
+NtRp85
+(dp86
+g17
+(lp87
+g19
+ag20
+asg21
+g81
+sg22
+S'blocking'
+p88
+sg10
+Nsbasbs.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/httpt.py b/old-to-be-ported-code/old-api/httpt.py
new file mode 100644
index 0000000..358f1ea
--- /dev/null
+++ b/old-to-be-ported-code/old-api/httpt.py
@@ -0,0 +1,94 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import http
+from ooni.utils import log
+
+class httptArgs(usage.Options):
+ optParameters = [['urls', 'f', None, 'Urls file'],
+ ['url', 'u', 'http://torproject.org/', 'Test single site'],
+ ['resume', 'r', 0, 'Resume at this index'],
+ ['rules', 'y', None, 'Specify the redirect rules file']]
+
+class httptTest(http.HTTPTest):
+ implements(IPlugin, ITest)
+
+ shortName = "httpt"
+ description = "httpt"
+ requirements = None
+ options = httptArgs
+ blocking = False
+
+
+ def testPattern(self, value, pattern, type):
+ if type == 'eq':
+ return value == pattern
+ elif type == 're':
+ import re
+ if re.match(pattern, value):
+ return True
+ else:
+ return False
+ else:
+ return None
+
+ def testPatterns(self, patterns, location):
+ test_result = False
+
+ if type(patterns) == list:
+ for pattern in patterns:
+ test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+ else:
+ test_result |= self.testPattern(location, patterns['value'], patterns['type'])
+
+ return test_result
+
+ def testRules(self, rules, location):
+ result = {}
+ blocked = False
+ for rule, value in rules.items():
+ current_rule = {}
+ current_rule['name'] = value['name']
+ current_rule['patterns'] = value['patterns']
+ current_rule['test'] = self.testPatterns(value['patterns'], location)
+ blocked |= current_rule['test']
+ result[rule] = current_rule
+ result['blocked'] = blocked
+ return result
+
+ def processRedirect(self, location):
+ self.result['redirect'] = None
+ try:
+ rules_file = self.local_options['rules']
+ import yaml
+ rules = yaml.load(open(rules_file))
+ log.msg("Testing rules %s" % rules)
+ redirect = self.testRules(rules, location)
+ self.result['redirect'] = redirect
+ except TypeError:
+ log.msg("No rules file. Got a redirect, but nothing to do.")
+
+
+ def control(self, experiment_result, args):
+ print self.response
+ print self.request
+ # What you return here ends up inside of the report.
+ log.msg("Running control")
+ return {}
+
+ def load_assets(self):
+ if self.local_options and self.local_options['urls']:
+ return {'url': Asset(self.local_options['urls'])}
+ else:
+ return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#httpt = httptTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpconnect.py b/old-to-be-ported-code/old-api/tcpconnect.py
new file mode 100644
index 0000000..7758a9e
--- /dev/null
+++ b/old-to-be-ported-code/old-api/tcpconnect.py
@@ -0,0 +1,65 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from ooni.plugoo.interface import ITest
+from ooni.plugoo.tests import OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+
+class tcpconnectArgs(usage.Options):
+ optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+class tcpconnectTest(OONITest):
+ implements(IPlugin, ITest)
+
+ shortName = "tcpconnect"
+ description = "tcpconnect"
+ requirements = None
+ options = tcpconnectArgs
+ blocking = False
+
+ def experiment(self, args):
+ try:
+ host, port = args['asset'].split(':')
+ except:
+ raise Exception("Error in parsing asset. Wrong format?")
+ class DummyFactory(Factory):
+ def buildProtocol(self, addr):
+ return Protocol()
+
+ def gotProtocol(p):
+ p.transport.loseConnection()
+ log.msg("Got a connection!")
+ log.msg(str(p))
+ return {'result': True, 'target': [host, port]}
+
+ def gotError(err):
+ log.msg("Had error :(")
+ log.msg(err)
+ return {'result': False, 'target': [host, port]}
+
+ # What you return here gets handed as input to control
+ point = TCP4ClientEndpoint(self.reactor, host, int(port))
+ d = point.connect(DummyFactory())
+ d.addCallback(gotProtocol)
+ d.addErrback(gotError)
+ return d
+
+ def load_assets(self):
+ if self.local_options:
+ return {'asset': Asset(self.local_options['asset'])}
+ else:
+ return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#tcpconnect = tcpconnectTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpscan.py b/old-to-be-ported-code/old-api/tcpscan.py
new file mode 100644
index 0000000..b371c88
--- /dev/null
+++ b/old-to-be-ported-code/old-api/tcpscan.py
@@ -0,0 +1,84 @@
+"""
+ TCP Port Scanner
+ ****************
+
+ Does a TCP connect scan on the IP:port pairs.
+
+"""
+import os
+from gevent import socket
+from datetime import datetime
+import socks
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+__plugoo__ = "TCP Port Scanner"
+__desc__ = "This a test template to be used to build your own tests"
+
+class TCPScanAsset(Asset):
+ """
+ This is the asset that should be used by the Test. It will
+ contain all the code responsible for parsing the asset file
+ and should be passed on instantiation to the test.
+ """
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+
+class TCPScan(Test):
+ """
+ The main Test class
+ """
+
+ def experiment(self, *a, **kw):
+ """
+ Fill this up with the tasks that should be performed
+ on the "dirty" network and should be compared with the
+ control.
+ """
+ addr = kw['data']
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ res = False
+ try:
+ self.logger.debug('Doing a connection to %s' % addr)
+ s.connect((addr.split(':')[0], int(addr.split(':')[1])))
+ res = True
+ except socket.error, msg:
+ self.logger.debug('Connection failed to %s: %s' % (addr, msg))
+
+ finally:
+ s.close()
+
+ return {'Time': datetime.now(),
+ 'Address': addr,
+ 'Status': res}
+
+ def control(self):
+ """
+ Fill this up with the control related code.
+ """
+ return True
+
+def run(ooni, asset=None):
+ """
+ This is the function that will be called by OONI
+ and it is responsible for instantiating and passing
+ the arguments to the Test class.
+ """
+ config = ooni.config
+
+ # This the assets array to be passed to the run function of
+ # the test
+ if asset:
+ assets = [TCPScanAsset(asset)]
+ else:
+ assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
+ "tcpscan.txt"))]
+
+ # Instantiate the Test
+ thetest = TCPScan(ooni)
+ ooni.logger.info("starting TCP Scan...")
+ # Run the test with argument assets
+ thetest.run(assets)
+ ooni.logger.info("finished.")
diff --git a/old-to-be-ported-code/very-old/TODO.plgoons b/old-to-be-ported-code/very-old/TODO.plgoons
new file mode 100644
index 0000000..ace2a10
--- /dev/null
+++ b/old-to-be-ported-code/very-old/TODO.plgoons
@@ -0,0 +1,79 @@
+We should implement the following as plugoons:
+
+dns_plgoo.py - Various DNS checks
+
+As a start - we should perform a known good check against a name or list of
+names. As input, we should take an ip address, a name or a list of names for
+testing; we also take dns servers for experiment or control data. For output we
+emit UDP or TCP packets - we should support proxying these requests when
+possible as is the case with TCP but probably not with UDP for certain DNS
+request types.
+
+http_plgoo.py - Various HTTP checks
+
+We should compare two pages and see if we have identical properties.
+At the very least, we should print the important differences - perhaps
+with a diff like output? We should look for fingerprints in URLS that are
+returned. We should detect 302 re-direction.
+
+As input, we should take an ip address, a name or a list of names for testing;
+we also take a list of headers such as random user agent strings and so on.
+We should emit TCP packets and ensure that we do not leak DNS for connections
+that we expect to proxy to a remote network.
+
+latency_plgoo.py - Measure latency for a host or a list of hosts
+
+As input, we should take an ip address, a name or a list of names for testing;
+We should measure the mean latency from the ooni-probe to the host with various
+traceroute tests. We should also measure the latency between the ooni-probe and
+a given server for any other protocol that is request and response oriented;
+HTTP latency may be calculated by simply tracking the delta between requests
+and responses.
+
+tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
+
+tcptrace_plgoo.py should allow for both stray and in-connection traceroute
+modes.
+
+udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
+- it may also be nice to simply make a random A record request in a DNS packet
+and use it as the payload for a UDP traceroute.
+
+reversetrace_plgoo.py should give a remote host the client's IP and return the
+output of a traceroute to that IP from the remote host. It will need a remote
+component if run against a web server. It would not need a remote component if
+run against route-views - we can simply telnet over Tor and ask it to trace to
+our detected client IP.
+
+keyword_plgoo.py should take a keyword or a list of keywords for use as a
+payload in a varity of protocols. This should be protocol aware - dns keyword
+filtering requires a sniffer to catch stray packets after the censor wins the
+race. HTTP payloads in open connections may be similar and in practice, we'll
+have to find tune it.
+
+icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
+servers. They have dozens of tests and to implement this plgoo, we'll need to
+add many things to ooni. More details here:
+http://netalyzr.icsi.berkeley.edu/faq.html
+http://netalyzr.icsi.berkeley.edu/json/id=example-session
+
+HTML output:
+http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+JSON output:
+http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+Netalyzer log:
+http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
+
+sniffer_plgoo.py - We need a generic method for capturing packets during a full
+run - this may be better as a core ooni-probe feature but we should implement
+packet capture in a plugin if it is done no where else.
+
+nmap_plgoo.py - We should take a list of hosts and run nmap against each of
+these hosts; many hosts are collected during testing and they should be scanned
+with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
+more reasonable.
+
diff --git a/old-to-be-ported-code/very-old/TO_BE_PORTED b/old-to-be-ported-code/very-old/TO_BE_PORTED
new file mode 100644
index 0000000..49ce5e0
--- /dev/null
+++ b/old-to-be-ported-code/very-old/TO_BE_PORTED
@@ -0,0 +1,14 @@
+
+The tests in this directory are very old, and have neither been ported to
+Twisted, nor to the new twisted.trial API framework. Although, they are not
+old in the sense of the *seriously old* OONI code which was written two years
+ago.
+
+These tests should be updated at least to use Twisted.
+
+If you want to hack on something care free, feel free to mess with these files
+because it would be difficult to not improve on them.
+
+<(A)3
+isis
+0x2cdb8b35
diff --git a/old-to-be-ported-code/very-old/ooni-probe.diff b/old-to-be-ported-code/very-old/ooni-probe.diff
new file mode 100644
index 0000000..fc61d3f
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni-probe.diff
@@ -0,0 +1,358 @@
+diff --git a/TODO b/TODO
+index c2e19af..51fa559 100644
+--- a/TODO
++++ b/TODO
+@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
+ Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
+
+ http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&pas…
++
++VIA Rail Via header:
++
++HTTP/1.0 301 Moved Permanently
++Location: http://www.google.com/
++Content-Type: text/html; charset=UTF-8
++Date: Sat, 23 Jul 2011 02:21:30 GMT
++Expires: Mon, 22 Aug 2011 02:21:30 GMT
++Cache-Control: public, max-age=2592000
++Server: gws
++Content-Length: 219
++X-XSS-Protection: 1; mode=block
++X-Cache: MISS from cache_server
++X-Cache-Lookup: MISS from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Connection: close
++
++<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
++<TITLE>301 Moved</TITLE></HEAD><BODY>
++<H1>301 Moved</H1>
++The document has moved
++<A HREF="http://www.google.com/">here</A>.
++</BODY></HTML>
++
++
++blocked site:
++
++HTTP/1.0 302 Moved Temporarily
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:17 GMT
++Content-Length: 0
++Location: http://10.66.66.66/denied.html
++
++invalid request response:
++
++$ nc 8.8.8.8 80
++hjdashjkdsahjkdsa
++HTTP/1.0 400 Bad Request
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:44 GMT
++Content-Type: text/html
++Content-Length: 1178
++Expires: Sat, 23 Jul 2011 02:22:44 GMT
++X-Squid-Error: ERR_INVALID_REQ 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to process the request:
++<PRE>
++hjdashjkdsahjkdsa
++
++</PRE>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Invalid Request
++</STRONG>
++</UL>
++
++<P>
++Some aspect of the HTTP Request is invalid. Possible problems:
++<UL>
++<LI>Missing or unknown request method
++<LI>Missing URL
++<LI>Missing HTTP Identifier (HTTP/1.0)
++<LI>Request is too large
++<LI>Content-Length missing for POST or PUT requests
++<LI>Illegal character in hostname; underscores are not allowed
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>.
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++nc 10.66.66.66 80
++GET cache_object://localhost/info HTTP/1.0
++HTTP/1.0 403 Forbidden
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:25:56 GMT
++Content-Type: text/html
++Content-Length: 1061
++Expires: Sat, 23 Jul 2011 02:25:56 GMT
++X-Squid-Error: ERR_ACCESS_DENIED 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to retrieve the URL:
++<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Access Denied.
++</STRONG>
++<P>
++Access control configuration prevents your request from
++being allowed at this time. Please contact your service provider if
++you feel this is incorrect.
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>.
++
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++
+diff --git a/ooni/command.py b/ooni/command.py
+index 361190f..df1a58c 100644
+--- a/ooni/command.py
++++ b/ooni/command.py
+@@ -13,6 +13,7 @@ import ooni.captive_portal
+ import ooni.namecheck
+ import ooni.dns_poisoning
+ import ooni.dns_cc_check
++import ooni.transparenthttp
+
+ class Command():
+ def __init__(self, args):
+@@ -48,6 +49,15 @@ class Command():
+ help="run captiveportal tests"
+ )
+
++ # --transhttp
++ def cb_transhttp(option, opt, value, oparser):
++ self.action = opt[2:]
++ optparser.add_option(
++ "--transhttp",
++ action="callback", callback=cb_transhttp,
++ help="run Transparent HTTP tests"
++ )
++
+ # --dns
+ def cb_dnstests(option, opt, value, oparser):
+ self.action = opt[2:]
+@@ -122,7 +132,7 @@ class Command():
+ if (not self.action):
+ raise optparse.OptionError(
+ 'is required',
+- '--dns | --dnsbulk | --captiveportal | --help | --version'
++ '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
+ )
+
+ except optparse.OptionError, err:
+@@ -138,6 +148,10 @@ class Command():
+ captive_portal = ooni.captive_portal.CaptivePortal
+ captive_portal(self).main()
+
++ def transhttp(self):
++ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
++ transparent_http(self).main()
++
+ def dns(self):
+ dnstests = ooni.namecheck.DNS
+ dnstests(self).main()
+diff --git a/ooni/dns.py b/ooni/dns.py
+index 95da6ef..90d50bd 100644
+--- a/ooni/dns.py
++++ b/ooni/dns.py
+@@ -8,7 +8,7 @@ from socket import gethostbyname
+ import ooni.common
+
+ # apt-get install python-dns
+-import DNS
++import dns
+ import random
+
+ """ Wrap gethostbyname """
+diff --git a/ooni/http.py b/ooni/http.py
+index 62365bb..bb72001 100644
+--- a/ooni/http.py
++++ b/ooni/http.py
+@@ -7,8 +7,14 @@
+ from socket import gethostbyname
+ import ooni.common
+ import urllib2
++import httplib
++from urlparse import urlparse
++from pprint import pprint
+ import pycurl
++import random
++import string
+ import re
++from BeautifulSoup import BeautifulSoup
+
+ # By default, we'll be Torbutton's UA
+ default_ua = { 'User-Agent' :
+@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
+ default_proxy_host = "127.0.0.1"
+ default_proxy_port = "9050"
+
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
++#class HTTPResponse(object):
++# def __init__(self):
+
+
+ """A very basic HTTP fetcher that uses Tor by default and returns a curl
+@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
+ http_code = getinfo(pycurl.HTTP_CODE)
+ return response, http_code
+
+-"""A very basic HTTP fetcher that returns a urllib3 response object."""
++"""A very basic HTTP fetcher that returns a urllib2 response object."""
+ def http_fetch(url,
+ headers= default_ua,
+ label="generic HTTP fetch"):
+@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
+ else:
+ return True
+
++def http_request(self, method, url, path=None):
++ """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
++ purl = urlparse(url)
++ host = purl.netloc
++ conn = httplib.HTTPConnection(host, 80)
++ if path is None:
++ path = purl.path
++ conn.request(method, purl.path)
++ response = conn.getresponse()
++ headers = dict(response.getheaders())
++ self.headers = headers
++ self.data = response.read()
++ return True
++
++def search_headers(self, s_headers, url):
++ if http_request(self, "GET", url):
++ headers = self.headers
++ else:
++ return None
++ result = {}
++ for h in s_headers.items():
++ result[h[0]] = h[0] in headers
++ return result
++
++def http_header_match_dict(experimental_url, dict_header):
++ result = {}
++ url_header = http_get_header_dict(experimental_url)
++
++# XXX for testing
++# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
++
++def search_squid_headers(self):
++ url = "http://securityfocus.org/blabla"
++ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
++ ret = search_headers(self, s_headers, url)
++ for i in ret.items():
++ if i[1] is True:
++ return False
++ return True
++
++def random_bad_request(self):
++ url = "http://securityfocus.org/blabla"
++ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
++ if http_request(self, r_str, url):
++ return True
++ else:
++ return None
++
++def squid_search_bad_request(self):
++ if random_bad_request(self):
++ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
++ for i in s_headers.items():
++ if i[0] in self.headers:
++ return False
++ return True
++ else:
++ return None
++
++def squid_cacheobject_request(self):
++ url = "http://securityfocus.org/blabla"
++ if http_request(self, "GET", url, "cache_object://localhost/info"):
++ soup = BeautifulSoup(self.data)
++ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
++ return False
++ else:
++ return True
++ else:
++ return None
++
++
+ def MSHTTP_CP_Tests(self):
+ experiment_url = "http://www.msftncsi.com/ncsi.txt"
+ expectedResponse = "Microsoft NCSI" # Only this - nothing more
+@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
+
+ # Google ChromeOS fetches this url in guest mode
+ # and they expect the user to authenticate
+- def googleChromeOSHTTPTest(self):
+- print "noop"
+- #url = "http://www.google.com/"
++def googleChromeOSHTTPTest(self):
++ print "noop"
++ #url = "http://www.google.com/"
++
++def SquidHeader_TransparentHTTP_Tests(self):
++ return search_squid_headers(self)
++
++def SquidBadRequest_TransparentHTTP_Tests(self):
++ squid_cacheobject_request(self)
++ return squid_search_bad_request(self)
++
++def SquidCacheobject_TransparentHTTP_Tests(self):
++ return squid_cacheobject_request(self)
++
++
diff --git a/old-to-be-ported-code/very-old/ooni/#namecheck.py# b/old-to-be-ported-code/very-old/ooni/#namecheck.py#
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/#namecheck.py#
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = args.randomize
+
+ def DNS_Tests(self):
+ print "DNS tampering detection:"
+ filter_name = "_DNS_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/.DS_Store b/old-to-be-ported-code/very-old/ooni/.DS_Store
new file mode 100644
index 0000000..f5738a5
Binary files /dev/null and b/old-to-be-ported-code/very-old/ooni/.DS_Store differ
diff --git a/old-to-be-ported-code/very-old/ooni/__init__.py b/old-to-be-ported-code/very-old/ooni/__init__.py
new file mode 100644
index 0000000..8f1b96e
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/__init__.py
@@ -0,0 +1,12 @@
+"""\
+This is your package, 'ooni'.
+
+It was provided by the package, `package`.
+
+Please change this documentation, and write this module!
+"""
+
+__version__ = '0.0.1'
+
+# If you run 'make test', this is your failing test.
+# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/old-to-be-ported-code/very-old/ooni/command.py b/old-to-be-ported-code/very-old/ooni/command.py
new file mode 100644
index 0000000..e5f8f9f
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/command.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8
+"""\
+Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
+"""
+
+import os
+import sys
+import re
+import optparse
+
+# Only include high level ooni tests at this time
+import ooni.captive_portal
+import ooni.namecheck
+import ooni.dns_poisoning
+import ooni.dns_cc_check
+import ooni.transparenthttp
+import ooni.helpers
+import ooni.plugooni
+import ooni.input
+
+class Command():
+ def __init__(self, args):
+ sys.argv = sys.argv[0:1]
+ sys.argv.extend(args)
+ self.startup_options()
+
+ def startup_options(self):
+ self.action = None
+ self.from_ = None
+ self.to = None
+ self.parser = None
+ self.emitter = None
+ self.emit_header = None
+ self.emit_trailer = None
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = True
+ self.cc = None
+ self.hostname = None
+ self.listfile = None
+ self.listplugooni = False
+ self.plugin_name = "all"
+ self.controlproxy = None # "socks4a://127.0.0.1:9050/"
+ self.experimentproxy = None
+
+ usage = """
+
+ 'ooni' is the Open Observatory of Network Interference
+
+ command line usage: ooni-probe [options]"""
+
+ optparser = optparse.OptionParser(usage=usage)
+
+ # --plugin
+ def cb_plugin(option, opt, value, oparser):
+ self.action = opt[2:]
+ self.plugin_name = str(value)
+ optparser.add_option(
+ "--plugin", type="string",
+ action="callback", callback=cb_plugin,
+ help="run the Plugooni plgoo plugin specified"
+ )
+
+ # --listplugins
+ def cb_list_plugins(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--listplugins",
+ action="callback", callback=cb_list_plugins,
+ help="list available Plugooni as plgoos plugin names"
+ )
+
+ # --captiveportal
+ def cb_captiveportal(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--captiveportal",
+ action="callback", callback=cb_captiveportal,
+ help="run vendor emulated captiveportal tests"
+ )
+
+ # --transhttp
+ def cb_transhttp(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--transhttp",
+ action="callback", callback=cb_transhttp,
+ help="run Transparent HTTP tests"
+ )
+
+ # --dns
+ def cb_dnstests(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dns",
+ action="callback", callback=cb_dnstests,
+ help="run fixed generic dns tests"
+ )
+
+ # --dnsbulk
+ def cb_dnsbulktests(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dnsbulk",
+ action="callback", callback=cb_dnsbulktests,
+ help="run bulk DNS tests in random.shuffle() order"
+ )
+
+ # --dns-cc-check
+ def cb_dnscccheck(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dnscccheck",
+ action="callback", callback=cb_dnscccheck,
+ help="run cc specific bulk DNS tests in random.shuffle() order"
+ )
+
+ # --cc [country code]
+ def cb_cc(option, opt, value, optparser):
+ # XXX: We should check this against a list of supported county codes
+ # and then return the matching value from the list into self.cc
+ self.cc = str(value)
+ optparser.add_option(
+ "--cc", type="string",
+ action="callback", callback=cb_cc,
+ help="set a specific county code -- default is None",
+ )
+
+ # --list [url/hostname/ip list in file]
+ def cb_list(option, opt, value, optparser):
+ self.listfile = os.path.expanduser(value)
+ if not os.path.isfile(self.listfile):
+ print "Wrong file '" + value + "' in --list."
+ sys.exit(1)
+ optparser.add_option(
+ "--list", type="string",
+ action="callback", callback=cb_list,
+ help="file to read from -- default is None",
+ )
+
+ # --url [url/hostname/ip]
+ def cb_host(option, opt, value, optparser):
+ self.hostname = str(value)
+ optparser.add_option(
+ "--url", type="string",
+ action="callback", callback=cb_host,
+ help="set URL/hostname/IP for use in tests -- default is None",
+ )
+
+ # --controlproxy [scheme://host:port]
+ def cb_controlproxy(option, opt, value, optparser):
+ self.controlproxy = str(value)
+ optparser.add_option(
+ "--controlproxy", type="string",
+ action="callback", callback=cb_controlproxy,
+ help="proxy to be used as a control -- default is None",
+ )
+
+ # --experimentproxy [scheme://host:port]
+ def cb_experimentproxy(option, opt, value, optparser):
+ self.experimentproxy = str(value)
+ optparser.add_option(
+ "--experimentproxy", type="string",
+ action="callback", callback=cb_experimentproxy,
+ help="proxy to be used for experiments -- default is None",
+ )
+
+
+
+ # --randomize
+ def cb_randomize(option, opt, value, optparser):
+ self.randomize = bool(int(value))
+ optparser.add_option(
+ "--randomize", type="choice",
+ choices=['0', '1'], metavar="0|1",
+ action="callback", callback=cb_randomize,
+ help="randomize host order -- default is on",
+ )
+
+ # XXX TODO:
+ # pause/resume scans for dns_BULK_DNS_Tests()
+ # setting of control/experiment resolver
+ # setting of control/experiment proxy
+ #
+
+ def cb_version(option, opt, value, oparser):
+ self.action = 'version'
+ optparser.add_option(
+ "-v", "--version",
+ action="callback", callback=cb_version,
+ help="print ooni-probe version"
+ )
+
+ # parse options
+ (opts, args) = optparser.parse_args()
+
+ # validate options
+ try:
+ if (args):
+ raise optparse.OptionError('extra arguments found', args)
+ if (not self.action):
+ raise optparse.OptionError(
+ 'RTFS', 'required arguments missing'
+ )
+
+ except optparse.OptionError, err:
+ sys.stderr.write(str(err) + '\n\n')
+ optparser.print_help()
+ sys.exit(1)
+
+ def version(self):
+ print """
+ooni-probe pre-alpha
+Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
+See: https://www.torproject.org/ooni/
+
+"""
+
+ def run(self):
+ getattr(self, self.action)()
+
+ def plugin(self):
+ plugin_run = ooni.plugooni.Plugooni
+ plugin_run(self).run(self)
+
+ def listplugins(self):
+ plugin_run = ooni.plugooni.Plugooni
+ plugin_run(self).list_plugoons()
+
+ def captiveportal(self):
+ captive_portal = ooni.captive_portal.CaptivePortal
+ captive_portal(self).main()
+
+ def transhttp(self):
+ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
+ transparent_http(self).main()
+
+ def dns(self):
+ dnstests = ooni.namecheck.DNS
+ dnstests(self).main()
+
+ def dnsbulk(self):
+ dnstests = ooni.dns_poisoning.DNSBulk
+ dnstests(self).main()
+
+ def dnscccheck(self):
+ dnstests = ooni.dns_cc_check.DNSBulk
+ dnstests(self).main()
+
diff --git a/old-to-be-ported-code/very-old/ooni/dns_poisoning.py b/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
new file mode 100644
index 0000000..939391e
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+class DNSBulk():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.randomize = args.randomize
+ self.debug = False
+
+ def DNS_Tests(self):
+ print "DNS tampering detection for list of domains:"
+ filter_name = "_DNS_BULK_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/dnsooni.py b/old-to-be-ported-code/very-old/ooni/dnsooni.py
new file mode 100644
index 0000000..bfdfe51
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/dnsooni.py
@@ -0,0 +1,356 @@
+#!/usr/bin/env python
+#
+# DNS support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+
+from socket import gethostbyname
+import ooni.common
+
+# requires python-dns
+# (pydns.sourceforge.net)
+try:
+ import DNS
+# Mac OS X needs this
+except:
+ try:
+ import dns as DNS
+ except:
+ pass # Never mind, let's break later.
+import random
+from pprint import pprint
+
+""" Wrap gethostbyname """
+def dns_resolve(hostname):
+ try:
+ resolved_host = gethostbyname(hostname)
+ return resolved_host
+ except:
+ return False
+
+"""Perform a resolution on test_hostname and compare it with the expected
+ control_resolved ip address. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True; otherwise
+ it returns False.
+"""
+def dns_resolve_match(experiment_hostname, control_resolved,
+ label="generic DNS comparison"):
+ experiment_resolved = dns_resolve(experiment_hostname)
+ if experiment_resolved == False:
+ return None
+ if experiment_resolved:
+ if str(experiment_resolved) != str(control_resolved):
+ print label + " control " + str(control_resolved) + " data does not " \
+ "match experiment response: " + str(experiment_resolved)
+ return False
+ return True
+
+def generic_DNS_resolve(experiment_hostname, experiment_resolver):
+ if experiment_resolver == None:
+ req = DNS.Request(name=experiment_hostname) # local resolver
+ else:
+ req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
+ resolved_data = req.req().answers
+ return resolved_data
+
+""" Return a list of all known censors. """
+def load_list_of_known_censors(known_proxy_file=None):
+ proxyfile = "proxy-lists/ips.txt"
+ known_proxy_file = open(proxyfile, 'r', 1)
+ known_proxy_list = []
+ for known_proxy in known_proxy_file.readlines():
+ known_proxy_list.append(known_proxy)
+ known_proxy_file.close()
+ known_proxy_count = len(known_proxy_list)
+ print "Loading " + str(known_proxy_count) + " known proxies..."
+ return known_proxy_list, known_proxy_count
+
+def load_list_of_test_hosts(hostfile=None):
+ if hostfile == None:
+ hostfile="censorship-lists/norwegian-dns-blacklist.txt"
+ host_list_file = open(hostfile, 'r', 1)
+ host_list = []
+ for host_name in host_list_file.readlines():
+ if host_name.isspace():
+ continue
+ else:
+ host_list.append(host_name)
+ host_list_file.close()
+ host_count = len(host_list)
+ #print "Loading " + str(host_count) + " test host names..."
+ return host_list, host_count
+
+""" Return True with a list of censors if we find a known censor from
+ known_proxy_list in the experiment_data DNS response. Otherwise return
+ False and None. """
+def contains_known_censors(known_proxy_list, experiment_data):
+ match = False
+ proxy_list = []
+ for answer in range(len(experiment_data)):
+ for known_proxy in known_proxy_list:
+ if answer == known_proxy:
+ print "CONFLICT: known proxy discovered: " + str(known_proxy),
+ proxy_list.append(known_proxy)
+ match = True
+ return match, proxy_list
+
+""" Return True and the experiment response that failed to match."""
+def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
+ known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
+ conflict_list = []
+ conflict = False
+ if known_proxy_found:
+ print "known proxy discovered: " + str(known_proxies)
+ for answer in range(len(control_data)):
+ if control_data[answer]['data'] == experiment_data:
+ print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
+ continue
+ else:
+ conflict = True
+ conflict_list.append(experiment_data)
+ #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
+ return conflict, conflict_list
+
+def dns_DNS_BULK_Tests(self, hostfile=None,
+ known_good_resolver="8.8.8.8", test_resolver=None):
+ tampering = False # By default we'll pretend the internet is nice
+ tampering_list = []
+ host_list, host_count = load_list_of_test_hosts()
+ known_proxies, proxy_count = load_list_of_known_censors()
+ check_count = 1
+ if test_resolver == None:
+ DNS.ParseResolvConf() # Set the local resolver as our default
+ if self.randomize:
+ random.shuffle(host_list) # This makes our list non-sequential for now
+ for host_name in host_list:
+ host_name = host_name.strip()
+ print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+ print "Resolving with control resolver..."
+ print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
+ print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
+ # XXX TODO - we need to keep track of the status of these requests and then resume them
+ while True:
+ try:
+ control_data = generic_DNS_resolve(host_name, known_good_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "control resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Resolving with experiment resolver..."
+ while True:
+ try:
+ experiment_data = generic_DNS_resolve(host_name, test_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "experiment resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Comparing control and experiment...",
+ tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
+ if tampering:
+ tampering_list.append(conflicts)
+ print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+ check_count = check_count + 1
+ host_list.close()
+ return tampering
+
+""" Attempt to resolve random_hostname and return True and None if empty. If an
+ address is returned we return False and the returned address.
+"""
+def dns_response_empty(random_hostname):
+ response = dns_resolve(random_hostname)
+ if response == False:
+ return True, None
+ return False, response
+
+def dns_multi_response_empty(count, size):
+ for i in range(count):
+ randName = ooni.common._randstring(size)
+ response_empty, response_ip = dns_response_empty(randName)
+ if response_empty == True and response_ip == None:
+ responses_are_empty = True
+ else:
+ print label + " " + randName + " found with value " + str(response_ip)
+ responses_are_empty = False
+ return responses_are_empty
+
+""" Attempt to resolve one random host name per tld in tld_list where the
+ hostnames are random strings with a length between min_length and
+ max_length. Return True if list is empty, otherwise return False."""
+def dns_list_empty(tld_list, min_length, max_length,
+ label="generic DNS list test"):
+ for tld in tld_list:
+ randName = ooni.common._randstring(min_length, max_length) + tld
+ response_empty, response_ip = dns_response_empty(randName)
+ return response_empty
+
+# Known bad test
+# Test for their DNS breakage and their HTTP MITM
+# "Family Shield" is 208.67.222.123 and 208.67.220.123
+# returns 67.215.65.130 for filtered sites like kink.com
+# block.opendns.com is a block page where users are redirected
+# 208.67.216.135 208.67.217.135 are the block pages currently point
+# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
+# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
+# whatever the HOST header says
+# Amusingly - their Server header is: "OpenDNS Guide"
+""" Return True if we are not being directed as known OpenDNS block pages."""
+def OpenDNS_DNS_Tests(self):
+ return OpenDNS_Censorship_DNS_TESTS(self)
+ return OpenDNS_NXDomain_DNS_TESTS(self)
+
+def OpenDNS_Censorship_DNS_TESTS(self):
+ known_filter = "67.215.65.130"
+ randName = ooni.common._randstring(10)
+ redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
+ if redirected:
+ return False
+ else:
+ return True
+
+def OpenDNS_NXDomain_DNS_TESTS(self):
+ known_filter = "67.215.65.132"
+ randName = ooni.common._randstring(10)
+ redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
+ if redirected:
+ return False
+ else:
+ return True
+
+"""Returns True if the experiment_url returns the well known Italian block page."""
+def cc_DNS_Tests_it(self):
+ tampering = False # By default we'll pretend the internet is nice
+ tampering_list = []
+ conflicts = []
+ known_good_resolver = "8.8.8.8"
+ host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
+ known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
+ known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
+
+ check_count = 1
+ DNS.ParseResolvConf()
+ # Set the local resolver as our default
+ if self.randomize:
+ random.shuffle(host_list) # This makes our list non-sequential for now
+ print "We're testing (" + str(host_count) + ") URLs"
+ print "We're looking for (" + str(known_block_count) + ") block pages"
+ print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
+ for test_resolver in known_censoring_resolvers:
+ test_resolver = test_resolver.strip()
+ for host_name in host_list:
+ host_name = host_name.strip()
+ print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+ print "Testing " + host_name + " with control resolver: " + known_good_resolver
+ print "Testing " + host_name + " with experiment resolver: " + test_resolver
+ while True:
+ try:
+ control_data = generic_DNS_resolve(host_name, known_good_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "control resolver appears to be failing..."
+ break
+ except:
+ print "Timeout; looping!"
+ continue
+
+ while True:
+ try:
+ experiment_data = generic_DNS_resolve(host_name, test_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "experiment resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Comparing control and experiment...",
+ tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
+ if tampering:
+ tampering_list.append(conflicts)
+ print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+ check_count = check_count + 1
+
+ host_list.close()
+ return tampering
+
+
+## XXX TODO
+## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
+
+""" Returns True if the experiment_host returns a well known Australian filter
+ IP address."""
+def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
+ # http://www.robtex.com/ip/61.88.88.88.html
+ # http://requests.optus.net.au/dns/
+ known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
+ known_censoring_resolvers = ["61.88.88.88"] # Optus
+ for resolver in known_censoring_resolvers:
+ blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
+ if blocked:
+ return True
+
+"""Returns True if experiment_hostname as resolved by experiment_resolver
+ resolves to control_data. Returns False if there is no match or None if the
+ attempt fails."""
+def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
+ control_data):
+ req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
+ resolved_data = s.req().answers
+ for answer in range(len(resolved_data)):
+ if resolved_data[answer]['data'] == control_data:
+ return True
+ return False
+
+# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
+# details
+""" Return True if Tor would consider the network fine; False if it's hostile
+ and has no signs of DNS tampering. """
+def Tor_DNS_Tests(self):
+ response_rfc2606_empty = RFC2606_DNS_Tests(self)
+ tor_tld_list = ["", ".com", ".org", ".net"]
+ response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
+ return response_tor_empty | response_rfc2606_empty
+
+""" Return True if RFC2606 would consider the network hostile; False if it's all
+ clear and has no signs of DNS tampering. """
+def RFC2606_DNS_Tests(self):
+ tld_list = [".invalid", ".test"]
+ return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
+
+""" Return True if googleChromeDNSTest would consider the network OK."""
+def googleChrome_CP_Tests(self):
+ maxGoogleDNSTests = 3
+ GoogleDNSTestSize = 10
+ return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
+ GoogleDNSTestSize)
+def googleChrome_DNS_Tests(self):
+ return googleChrome_CP_Tests(self)
+
+""" Return True if MSDNSTest would consider the network OK."""
+def MSDNS_CP_Tests(self):
+ experimentHostname = "dns.msftncsi.com"
+ expectedResponse = "131.107.255.255"
+ return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
+
+def MSDNS_DNS_Tests(self):
+ return MSDNS_CP_Tests(self)
diff --git a/old-to-be-ported-code/very-old/ooni/helpers.py b/old-to-be-ported-code/very-old/ooni/helpers.py
new file mode 100644
index 0000000..514e65f
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/helpers.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+
+import ooni.common
+import pycurl
+import random
+import zipfile
+import os
+from xml.dom import minidom
+try:
+ from BeautifulSoup import BeautifulSoup
+except:
+ pass # Never mind, let's break later.
+
+def get_random_url(self):
+ filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
+ fp = zipfile.ZipFile(filepath, "r")
+ fp.open("top-1m.csv")
+ content = fp.read("top-1m.csv")
+ return "http://" + random.choice(content.split("\n")).split(",")[1]
+
+"""Pick a random header and use that for the request"""
+def get_random_headers(self):
+ filepath = os.getcwd() + "/test-lists/whatheaders.xml"
+ headers = []
+ content = open(filepath, "r").read()
+ soup = BeautifulSoup(content)
+ measurements = soup.findAll('measurement')
+ i = random.randint(0,len(measurements))
+ for vals in measurements[i].findAll('header'):
+ name = vals.find('name').string
+ value = vals.find('value').string
+ if name != "host":
+ headers.append((name, value))
+ return headers
diff --git a/old-to-be-ported-code/very-old/ooni/http.py b/old-to-be-ported-code/very-old/ooni/http.py
new file mode 100644
index 0000000..59e2abb
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/http.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+#
+
+from socket import gethostbyname
+import ooni.common
+import ooni.helpers
+import ooni.report
+import urllib2
+import httplib
+from urlparse import urlparse
+from pprint import pprint
+import pycurl
+import random
+import string
+import re
+from pprint import pprint
+try:
+ from BeautifulSoup import BeautifulSoup
+except:
+ pass # Never mind, let's break later.
+
+# By default, we'll be Torbutton's UA
+default_ua = { 'User-Agent' :
+ 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
+
+# Use pycurl to connect over a proxy
+PROXYTYPE_SOCKS5 = 5
+default_proxy_type = PROXYTYPE_SOCKS5
+default_proxy_host = "127.0.0.1"
+default_proxy_port = "9050"
+
+#class HTTPResponse(object):
+# def __init__(self):
+
+
+"""A very basic HTTP fetcher that uses Tor by default and returns a curl
+ object."""
+def http_proxy_fetch(url, headers, proxy_type=5,
+ proxy_host="127.0.0.1",
+ proxy_port=9050):
+ request = pycurl.Curl()
+ request.setopt(pycurl.PROXY, proxy_host)
+ request.setopt(pycurl.PROXYPORT, proxy_port)
+ request.setopt(pycurl.PROXYTYPE, proxy_type)
+ request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
+ request.setopt(pycurl.URL, url)
+ response = request.perform()
+ http_code = getinfo(pycurl.HTTP_CODE)
+ return response, http_code
+
+"""A very basic HTTP fetcher that returns a urllib2 response object."""
+def http_fetch(url,
+ headers= default_ua,
+ label="generic HTTP fetch"):
+ request = urllib2.Request(url, None, headers)
+ response = urllib2.urlopen(request)
+ return response
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+ control_result. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True with the http
+ status code; otherwise it returns False.
+"""
+def http_content_match(experimental_url, control_result,
+ headers= { 'User-Agent' : default_ua },
+ label="generic HTTP content comparison"):
+ request = urllib2.Request(experimental_url, None, headers)
+ response = urllib2.urlopen(request)
+ responseContents = response.read()
+ responseCode = response.code
+ if responseContents != False:
+ if str(responseContents) != str(control_result):
+ print label + " control " + str(control_result) + " data does not " \
+ "match experiment response: " + str(responseContents)
+ return False, responseCode
+ return True, responseCode
+ else:
+ print "HTTP connection appears to have failed"
+ return False, False
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+ control_result as a regex. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True with the HTTP
+ status code; otherwise it returns False.
+"""
+def http_content_fuzzy_match(experimental_url, control_result,
+ headers= { 'User-Agent' : default_ua },
+ label="generic HTTP content comparison"):
+ request = urllib2.Request(experimental_url, None, headers)
+ response = urllib2.urlopen(request)
+ responseContents = response.read()
+ responseCode = response.code
+ pattern = re.compile(control_result)
+ match = pattern.search(responseContents)
+ if responseContents != False:
+ if not match:
+ print label + " control " + str(control_result) + " data does not " \
+ "match experiment response: " + str(responseContents)
+ return False, responseCode
+ return True, responseCode
+ else:
+ print "HTTP connection appears to have failed"
+ return False, False
+
+"""Compare two HTTP status codes as integers and return True if they match."""
+def http_status_code_match(experiment_code, control_code):
+ if int(experiment_code) != int(control_code):
+ return False
+ return True
+
+"""Compare two HTTP status codes as integers and return True if they don't match."""
+def http_status_code_no_match(experiment_code, control_code):
+ if http_status_code_match(experiment_code, control_code):
+ return False
+ return True
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it matches, False if it does not."""
+def http_header_match(experiment_url, control_header, control_result):
+ response = http_fetch(url, label=label)
+ remote_header = response.get_header(control_header)
+ if str(remote_header) == str(control_result):
+ return True
+ else:
+ return False
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it does not matche, False if it does."""
+def http_header_no_match(experiment_url, control_header, control_result):
+ match = http_header_match(experiment_url, control_header, control_result)
+ if match:
+ return False
+ else:
+ return True
+
+def send_browser_headers(self, browser, conn):
+ headers = ooni.helpers.get_random_headers(self)
+ for h in headers:
+ conn.putheader(h[0], h[1])
+ conn.endheaders()
+ return True
+
+def http_request(self, method, url, path=None):
+ purl = urlparse(url)
+ host = purl.netloc
+ conn = httplib.HTTPConnection(host, 80)
+ conn.connect()
+ if path is None:
+ path = purl.path
+ conn.putrequest(method, purl.path)
+ send_browser_headers(self, None, conn)
+ response = conn.getresponse()
+ headers = dict(response.getheaders())
+ self.headers = headers
+ self.data = response.read()
+ return True
+
+def search_headers(self, s_headers, url):
+ if http_request(self, "GET", url):
+ headers = self.headers
+ else:
+ return None
+ result = {}
+ for h in s_headers.items():
+ result[h[0]] = h[0] in headers
+ return result
+
+# XXX for testing
+# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
+
+"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
+def search_squid_headers(self):
+ test_name = "squid header"
+ self.logger.info("RUNNING %s test" % test_name)
+ url = ooni.helpers.get_random_url(self)
+ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
+ ret = search_headers(self, s_headers, url)
+ for i in ret.items():
+ if i[1] is True:
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+
+def random_bad_request(self):
+ url = ooni.helpers.get_random_url(self)
+ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
+ if http_request(self, r_str, url):
+ return True
+ else:
+ return None
+
+"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
+def squid_search_bad_request(self):
+ test_name = "squid bad request"
+ self.logger.info("RUNNING %s test" % test_name)
+ if random_bad_request(self):
+ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
+ for i in s_headers.items():
+ if i[0] in self.headers:
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ self.logger.warning("the %s test returned failed" % test_name)
+ return None
+
+"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
+def squid_cacheobject_request(self):
+ url = ooni.helpers.get_random_url(self)
+ test_name = "squid cacheobject"
+ self.logger.info("RUNNING %s test" % test_name)
+ if http_request(self, "GET", url, "cache_object://localhost/info"):
+ soup = BeautifulSoup(self.data)
+ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ else:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ self.logger.warning("the %s test failed" % test_name)
+ return None
+
+
+def MSHTTP_CP_Tests(self):
+ test_name = "MS HTTP Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ experiment_url = "http://www.msftncsi.com/ncsi.txt"
+ expectedResponse = "Microsoft NCSI" # Only this - nothing more
+ expectedResponseCode = "200" # Must be this - nothing else
+ label = "MS HTTP"
+ headers = { 'User-Agent' : 'Microsoft NCSI' }
+ content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
+ headers, label)
+ status_match = http_status_code_match(expectedResponseCode,
+ experiment_code)
+ if status_match and content_match:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+def AppleHTTP_CP_Tests(self):
+ test_name = "Apple HTTP Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ experiment_url = "http://www.apple.com/library/test/success.html"
+ expectedResponse = "Success" # There is HTML that contains this string
+ expectedResponseCode = "200"
+ label = "Apple HTTP"
+ headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
+ 'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
+ ' Mobile/1A543a Safari/419.3' }
+ content_match, experiment_code = http_content_fuzzy_match(
+ experiment_url, expectedResponse, headers)
+ status_match = http_status_code_match(expectedResponseCode,
+ experiment_code)
+ if status_match and content_match:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ print label + "content match:" + str(content_match) + " status match:" + str(status_match)
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+def WC3_CP_Tests(self):
+ test_name = "W3 Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
+ draftResponseCode = "428"
+ label = "WC3 draft-nottingham-http-portal"
+ response = http_fetch(url, label=label)
+ responseCode = response.code
+ if http_status_code_no_match(responseCode, draftResponseCode):
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ print label + " status match:" + status_match
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+# Google ChromeOS fetches this url in guest mode
+# and they expect the user to authenticate
+def googleChromeOSHTTPTest(self):
+ print "noop"
+ #url = "http://www.google.com/"
+
+def SquidHeader_TransparentHTTP_Tests(self):
+ return search_squid_headers(self)
+
+def SquidBadRequest_TransparentHTTP_Tests(self):
+ return squid_search_bad_request(self)
+
+def SquidCacheobject_TransparentHTTP_Tests(self):
+ return squid_cacheobject_request(self)
+
+
diff --git a/old-to-be-ported-code/very-old/ooni/input.py b/old-to-be-ported-code/very-old/ooni/input.py
new file mode 100644
index 0000000..c32ab48
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/input.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+class file:
+ def __init__(self, name=None):
+ if name:
+ self.name = name
+
+ def simple(self, name=None):
+ """ Simple file parsing method:
+ Read a file line by line and output an array with all it's lines, without newlines
+ """
+ if name:
+ self.name = name
+ output = []
+ try:
+ f = open(self.name, "r")
+ for line in f.readlines():
+ output.append(line.strip())
+ return output
+ except:
+ return output
+
+ def csv(self, name=None):
+ if name:
+ self.name = name
+
+ def yaml(self, name):
+ if name:
+ self.name = name
+
+ def consensus(self, name):
+ if name:
+ self.name = name
diff --git a/old-to-be-ported-code/very-old/ooni/namecheck.py b/old-to-be-ported-code/very-old/ooni/namecheck.py
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/namecheck.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = args.randomize
+
+ def DNS_Tests(self):
+ print "DNS tampering detection:"
+ filter_name = "_DNS_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/__init__.py b/old-to-be-ported-code/very-old/ooni/plugins/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
new file mode 100644
index 0000000..0c0cfa7
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+
+import sys
+import re
+from pprint import pprint
+from twisted.internet import reactor, endpoints
+from twisted.names import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class DNSTestPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ""
+ self.control_response = []
+
+ def response_split(self, response):
+ a = []
+ b = []
+ for i in response:
+ a.append(i[0])
+ b.append(i[1])
+
+ return a,b
+
+ def cb(self, type, hostname, dns_server, value):
+ if self.control_response is None:
+ self.control_response = []
+ if type == 'control' and self.control_response != value:
+ print "%s %s" % (dns_server, value)
+ self.control_response.append((dns_server,value))
+ pprint(self.control_response)
+ if type == 'experiment':
+ pprint(self.control_response)
+ _, res = self.response_split(self.control_response)
+ if value not in res:
+ print "res (%s) : " % value
+ pprint(res)
+ print "---"
+ print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
+
+ else:
+ print "%s appears to be clean on %s" % (hostname, dns_server)
+ self.r2.servers = [('212.245.158.66',53)]
+ print "HN: %s %s" % (hostname, value)
+
+ def err(self, pck, error):
+ pprint(pck)
+ error.printTraceback()
+ reactor.stop()
+ print "error!"
+ pass
+
+ def ooni_main(self, args):
+ self.experimentalproxy = ''
+ self.test_hostnames = ['dio.it']
+ self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
+ self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
+
+ self.control_res = []
+ self.control_response = None
+
+ self.r1 = client.Resolver(None, [self.control_dns.pop()])
+ self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
+
+ for hostname in self.test_hostnames:
+ for dns_server in self.control_dns:
+ self.r1.servers = [dns_server]
+ f = self.r1.getHostByName(hostname)
+ pck = (hostname, dns_server)
+ f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+ for dns_server in self.experiment_dns:
+ self.r2.servers = [dns_server]
+ pck = (hostname, dns_server)
+ f = self.r2.getHostByName(hostname)
+ f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+ reactor.run()
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
new file mode 100644
index 0000000..021e863
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+
+import sys
+import re
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ''
+
+ def cb(self, type, content):
+ print "got %d bytes from %s" % (len(content), type) # DEBUG
+ if not self.buf:
+ self.buf = content
+ else:
+ if self.buf == content:
+ print "SUCCESS"
+ else:
+ print "FAIL"
+ reactor.stop()
+
+ def endpoint(self, scheme, host, port):
+ ep = None
+ if scheme == 'http':
+ ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ elif scheme == 'https':
+ ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
+ return ep
+
+ def ooni_main(self):
+ # We don't have the Command object so cheating for now.
+ url = 'http://check.torproject.org/'
+ self.controlproxy = 'socks4a://127.0.0.1:9050'
+ self.experimentalproxy = ''
+
+ if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
+ return None
+ scheme, host, port, path = client._parse(url)
+
+ ctrl_dest = self.endpoint(scheme, host, port)
+ if not ctrl_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ if self.controlproxy:
+ _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
+ control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+ else:
+ control = ctrl_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('control', x))
+ control.connect(f)
+
+ exp_dest = self.endpoint(scheme, host, port)
+ if not exp_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ # FIXME: use the experiment proxy if there is one
+ experiment = exp_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('experiment', x))
+ experiment.connect(f)
+
+ reactor.run()
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
new file mode 100644
index 0000000..cb63df7
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python
+# Copyright 2009 The Tor Project, Inc.
+# License at end of file.
+#
+# This tests connections to a list of Tor nodes in a given Tor consensus file
+# while also recording the certificates - it's not a perfect tool but complete
+# or even partial failure should raise alarms.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+ from ooni.plugooni import Plugoo
+except:
+ print "Error importing Plugoo"
+
+try:
+ from ooni.common import Storage
+except:
+ print "Error importing Storage"
+
+try:
+ from ooni import output
+except:
+ print "Error importing output"
+
+try:
+ from ooni import input
+except:
+ print "Error importing output"
+
+
+
+ssl = OpenSSL = None
+
+try:
+ import ssl
+except ImportError:
+ pass
+
+if ssl is None:
+ try:
+ import OpenSSL.SSL
+ import OpenSSL.crypto
+ except ImportError:
+ pass
+
+if ssl is None and OpenSSL is None:
+ if socket.ssl:
+ print """Your Python is too old to have the ssl module, and you haven't
+installed pyOpenSSL. I'll try to work with what you've got, but I can't
+record certificates so well."""
+ else:
+ print """Your Python has no OpenSSL support. Upgrade to 2.6, install
+pyOpenSSL, or both."""
+ sys.exit(1)
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+HANDSHAKING = "nohandshake"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+def clean_pem_cert(cert):
+ idx = cert.find('-----END')
+ if idx > 1 and cert[idx-1] != '\n':
+ cert = cert.replace('-----END','\n-----END')
+ return cert
+
+def record((addr,port), state, extra=None, cert=None):
+ LOCK.acquire()
+ try:
+ OUT.append({'addr' : addr,
+ 'port' : port,
+ 'state' : state,
+ 'extra' : extra})
+ if cert:
+ CERT_OUT.append({'addr' : addr,
+ 'port' : port,
+ 'clean_cert' : clean_pem_cert(cert)})
+ finally:
+ LOCK.release()
+
+def probe(address,theCtx=None):
+ sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ logging.info("Opening socket to %s",address)
+ try:
+ s.connect(address)
+ except IOError, e:
+ logging.info("Error %s from socket connect.",e)
+ record(address, CONNECTING, e)
+ s.close()
+ return
+ logging.info("Socket to %s open. Launching SSL handshake.",address)
+ if ssl:
+ try:
+ s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
+ # "MARCO!"
+ s.do_handshake()
+ except IOError, e:
+ logging.info("Error %s from ssl handshake",e)
+ record(address, HANDSHAKING, e)
+ s.close()
+ sock.close()
+ return
+ cert = s.getpeercert(True)
+ if cert != None:
+ cert = ssl.DER_cert_to_PEM_cert(cert)
+ elif OpenSSL:
+ try:
+ s = OpenSSL.SSL.Connection(theCtx, s)
+ s.set_connect_state()
+ s.setblocking(True)
+ s.do_handshake()
+ cert = s.get_peer_certificate()
+ if cert != None:
+ cert = OpenSSL.crypto.dump_certificate(
+ OpenSSL.crypto.FILETYPE_PEM, cert)
+ except IOError, e:
+ logging.info("Error %s from OpenSSL handshake",e)
+ record(address, HANDSHAKING, e)
+ s.close()
+ sock.close()
+ return
+ else:
+ try:
+ s = socket.ssl(s)
+ s.write('a')
+ cert = s.server()
+ except IOError, e:
+ logging.info("Error %s from socket.ssl handshake",e)
+ record(address, HANDSHAKING, e)
+ sock.close()
+ return
+
+ logging.info("SSL handshake with %s finished",address)
+ # "POLO!"
+ record(address,OK, cert=cert)
+ if (ssl or OpenSSL):
+ s.close()
+ sock.close()
+
+def parseNetworkstatus(ns):
+ for line in ns:
+ if line.startswith('r '):
+ r = line.split()
+ yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+ for line in cd:
+ if line.startswith('router '):
+ r = line.split()
+ yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+ done = False
+ logging.info("Launching thread.")
+
+ if OpenSSL is not None:
+ context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
+ else:
+ context = None
+
+ while True:
+ LOCK.acquire()
+ try:
+ if addrList:
+ print "Starting test %d/%d"%(
+ 1+origLength-len(addrList),origLength)
+ addr = addrList.pop()
+ else:
+ return
+ finally:
+ LOCK.release()
+
+ try:
+ logging.info("Launching probe for %s",addr)
+ probe(addr, context)
+ except Exception, e:
+ logging.info("Unexpected error from %s",addr)
+ record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+ ts = []
+ origLen = len(addrList)
+ for num in xrange(nThreads):
+ t = threading.Thread(target=worker, args=(addrList,origLen))
+ t.setName("Th#%s"%num)
+ ts.append(t)
+ t.start()
+ for t in ts:
+ logging.info("Joining thread %s",t.getName())
+ t.join()
+
+def main(self, args):
+ # BEGIN
+ # This logic should be present in more or less all plugoos
+ global OUT
+ global CERT_OUT
+ global OUT_DATA
+ global CERT_OUT_DATA
+ OUT_DATA = []
+ CERT_OUT_DATA = []
+
+ try:
+ OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+ except:
+ print "No output file given. quitting..."
+ return -1
+
+ try:
+ CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
+ except:
+ print "No output cert file given. quitting..."
+ return -1
+
+ logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+ datefmt="%b %d %H:%M:%S",
+ level=logging.INFO,
+ filename=args.log)
+ logging.info("============== STARTING NEW LOG")
+ # END
+
+ if ssl is not None:
+ methodName = "ssl"
+ elif OpenSSL is not None:
+ methodName = "OpenSSL"
+ else:
+ methodName = "socket"
+ logging.info("Running marco with method '%s'", methodName)
+
+ addresses = []
+
+ if args.input.ips:
+ for fn in input.file(args.input.ips).simple():
+ a, b = fn.split(":")
+ addresses.append( (a,int(b)) )
+
+ elif args.input.consensus:
+ for fn in args:
+ print fn
+ for a,b in parseNetworkstatus(open(args.input.consensus)):
+ addresses.append( (a,b) )
+
+ if args.input.randomize:
+ # Take a random permutation of the set the knuth way!
+ for i in range(0, len(addresses)):
+ j = random.randint(0, i)
+ addresses[i], addresses[j] = addresses[j], addresses[i]
+
+ if len(addresses) == 0:
+ logging.error("No input source given, quiting...")
+ return -1
+
+ addresses = list(addresses)
+
+ if not args.input.randomize:
+ addresses.sort()
+
+ runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+
+ self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+ "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
+
+ self.input = Storage()
+ self.input.ip = None
+ try:
+ c_file = os.path.expanduser("~/.tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ try:
+ c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ if not self.input.consensus:
+ print "Error importing consensus file"
+ sys.exit(1)
+
+ self.output = Storage()
+ self.output.main = 'reports/marco-1.yamlooni'
+ self.output.certificates = 'reports/marco_certs-1.out'
+
+ # XXX This needs to be moved to a proper function
+ # refactor, refactor and ... refactor!
+ if os.path.exists(self.output.main):
+ basedir = "/".join(self.output.main.split("/")[:-1])
+ fn = self.output.main.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ if os.path.exists(self.output.certificates):
+ basedir = "/".join(self.output.certificates.split("/")[:-1])
+ fn = self.output.certificates.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ # We require for Tor to already be running or have recently run
+ self.args = Storage()
+ self.args.input = self.input
+ self.args.output = self.output
+ self.args.log = 'reports/marco.log'
+
+ def ooni_main(self, cmd):
+ self.args.input.randomize = cmd.randomize
+ self.args.input.ips = cmd.listfile
+ main(self, self.args)
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >> sys.stderr, ("This script takes one or more networkstatus "
+ "files as an argument.")
+ self = None
+ main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
new file mode 100644
index 0000000..d175c1c
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+
+import sys
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ''
+
+ def cb(self, type, content):
+ print "got %d bytes from %s" % (len(content), type) # DEBUG
+ if not self.buf:
+ self.buf = content
+ else:
+ if self.buf == content:
+ print "SUCCESS"
+ else:
+ print "FAIL"
+ reactor.stop()
+
+ def endpoint(self, scheme, host, port):
+ ep = None
+ if scheme == 'http':
+ ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ elif scheme == 'https':
+ from twisted.internet import ssl
+ ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
+ ssl.ClientContextFactory())
+ return ep
+
+ def ooni_main(self, cmd):
+ # We don't have the Command object so cheating for now.
+ url = cmd.hostname
+
+ # FIXME: validate that url is on the form scheme://host[:port]/path
+ scheme, host, port, path = client._parse(url)
+
+ ctrl_dest = self.endpoint(scheme, host, port)
+ if not ctrl_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ if cmd.controlproxy:
+ assert scheme != 'https', "no support for proxied https atm, sorry"
+ _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
+ control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+ print "proxy: ", proxy_host, proxy_port
+ else:
+ control = ctrl_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('control', x))
+ control.connect(f)
+
+ exp_dest = self.endpoint(scheme, host, port)
+ if not exp_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ # FIXME: use the experiment proxy if there is one
+ experiment = exp_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('experiment', x))
+ experiment.connect(f)
+
+ reactor.run()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
new file mode 100644
index 0000000..87d3684
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+from ooni.plugooni import Plugoo
+
+class DNSBulkPlugin(Plugoo):
+ def __init__(self):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.randomize = True # Pass this down properly
+ self.debug = False
+
+ def DNS_Tests(self):
+ print "DNS tampering detection for list of domains:"
+ tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
+ self.run_tests(tests)
+
+ def magic_main(self):
+ self.run_plgoo_tests("_Tests")
+
+ def ooni_main(self, args):
+ self.magic_main()
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
new file mode 100644
index 0000000..01dee81
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
@@ -0,0 +1,278 @@
+#!/usr/bin/python
+# Copyright 2011 The Tor Project, Inc.
+# License at end of file.
+#
+# This is a modified version of the marco plugoo. Given a list of #
+# IP:port addresses, this plugoo will attempt a TCP connection with each
+# host and write the results to a .yamlooni file.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+ from ooni.plugooni import Plugoo
+except:
+ print "Error importing Plugoo"
+
+try:
+ from ooni.common import Storage
+except:
+ print "Error importing Storage"
+
+try:
+ from ooni import output
+except:
+ print "Error importing output"
+
+try:
+ from ooni import input
+except:
+ print "Error importing output"
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+# We will want to log the IP address, the port and the state
+def record((addr,port), state, extra=None):
+ LOCK.acquire()
+ try:
+ OUT.append({'addr' : addr,
+ 'port' : port,
+ 'state' : state,
+ 'extra' : extra})
+ finally:
+ LOCK.release()
+
+# For each IP address in the list, open a socket, write to the log and
+# then close the socket
+def probe(address,theCtx=None):
+ sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ logging.info("Opening socket to %s",address)
+ try:
+ s.connect(address)
+ except IOError, e:
+ logging.info("Error %s from socket connect.",e)
+ record(address, CONNECTING, e)
+ s.close()
+ return
+ logging.info("Socket to %s open. Successfully launched TCP handshake.",address)
+ record(address, OK)
+ s.close()
+
+def parseNetworkstatus(ns):
+ for line in ns:
+ if line.startswith('r '):
+ r = line.split()
+ yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+ for line in cd:
+ if line.startswith('router '):
+ r = line.split()
+ yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+ done = False
+ context = None
+
+ while True:
+ LOCK.acquire()
+ try:
+ if addrList:
+ print "Starting test %d/%d"%(
+ 1+origLength-len(addrList),origLength)
+ addr = addrList.pop()
+ else:
+ return
+ finally:
+ LOCK.release()
+
+ try:
+ logging.info("Launching probe for %s",addr)
+ probe(addr, context)
+ except Exception, e:
+ logging.info("Unexpected error from %s",addr)
+ record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+ ts = []
+ origLen = len(addrList)
+ for num in xrange(nThreads):
+ t = threading.Thread(target=worker, args=(addrList,origLen))
+ t.setName("Th#%s"%num)
+ ts.append(t)
+ t.start()
+ for t in ts:
+ t.join()
+
+def main(self, args):
+ # BEGIN
+ # This logic should be present in more or less all plugoos
+ global OUT
+ global OUT_DATA
+ OUT_DATA = []
+
+ try:
+ OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+ except:
+ print "No output file given. quitting..."
+ return -1
+
+ logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+ datefmt="%b %d %H:%M:%S",
+ level=logging.INFO,
+ filename=args.log)
+ logging.info("============== STARTING NEW LOG")
+ # END
+
+ methodName = "socket"
+ logging.info("Running tcpcon with method '%s'", methodName)
+
+ addresses = []
+
+ if args.input.ips:
+ for fn in input.file(args.input.ips).simple():
+ a, b = fn.split(":")
+ addresses.append( (a,int(b)) )
+
+ elif args.input.consensus:
+ for fn in args:
+ print fn
+ for a,b in parseNetworkstatus(open(args.input.consensus)):
+ addresses.append( (a,b) )
+
+ if args.input.randomize:
+ # Take a random permutation of the set the knuth way!
+ for i in range(0, len(addresses)):
+ j = random.randint(0, i)
+ addresses[i], addresses[j] = addresses[j], addresses[i]
+
+ if len(addresses) == 0:
+ logging.error("No input source given, quiting...")
+ return -1
+
+ addresses = list(addresses)
+
+ if not args.input.randomize:
+ addresses.sort()
+
+ runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+
+ self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+ "os" ]
+
+ self.input = Storage()
+ self.input.ip = None
+ try:
+ c_file = os.path.expanduser("~/.tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ try:
+ c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ if not self.input.consensus:
+ print "Error importing consensus file"
+ sys.exit(1)
+
+ self.output = Storage()
+ self.output.main = 'reports/tcpcon-1.yamlooni'
+ self.output.certificates = 'reports/tcpcon_certs-1.out'
+
+ # XXX This needs to be moved to a proper function
+ # refactor, refactor and ... refactor!
+ if os.path.exists(self.output.main):
+ basedir = "/".join(self.output.main.split("/")[:-1])
+ fn = self.output.main.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ if os.path.exists(self.output.certificates):
+ basedir = "/".join(self.output.certificates.split("/")[:-1])
+ fn = self.output.certificates.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ # We require for Tor to already be running or have recently run
+ self.args = Storage()
+ self.args.input = self.input
+ self.args.output = self.output
+ self.args.log = 'reports/tcpcon.log'
+
+ def ooni_main(self, cmd):
+ self.args.input.randomize = cmd.randomize
+ self.args.input.ips = cmd.listfile
+ main(self, self.args)
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >> sys.stderr, ("This script takes one or more networkstatus "
+ "files as an argument.")
+ self = None
+ main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tor.py b/old-to-be-ported-code/very-old/ooni/plugins/tor.py
new file mode 100644
index 0000000..0d95d4d
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/tor.py
@@ -0,0 +1,80 @@
+import re
+import os.path
+import signal
+import subprocess
+import socket
+import threading
+import time
+import logging
+
+from pytorctl import TorCtl
+
+torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
+# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
+
+class ThreadProc(threading.Thread):
+ def __init__(self, cmd):
+ threading.Thread.__init__(self)
+ self.cmd = cmd
+ self.proc = None
+
+ def run(self):
+ print "running"
+ try:
+ self.proc = subprocess.Popen(self.cmd,
+ shell = False, stdout = subprocess.PIPE,
+ stderr = subprocess.PIPE)
+
+ except OSError:
+ logging.fatal('cannot execute command')
+
+class Tor:
+ def __init__(self):
+ self.start()
+
+ def check(self):
+ conn = TorCtl.connect()
+ if conn != None:
+ conn.close()
+ return True
+
+ return False
+
+
+ def start(self):
+ if not os.path.exists(torrc):
+ raise OSError("torrc doesn't exist (%s)" % torrc)
+
+ tor_cmd = ["tor", "-f", torrc]
+
+ torproc = ThreadProc(tor_cmd)
+ torproc.run()
+
+ bootstrap_line = re.compile("Bootstrapped 100%: ")
+
+ while True:
+ if torproc.proc == None:
+ time.sleep(1)
+ continue
+
+ init_line = torproc.proc.stdout.readline().strip()
+
+ if not init_line:
+ torproc.proc.kill()
+ return False
+
+ if bootstrap_line.search(init_line):
+ break
+
+ return True
+
+ def stop(self):
+ if not self.check():
+ return
+
+ conn = TorCtl.connect()
+ if conn != None:
+ conn.send_signal("SHUTDOWN")
+ conn.close()
+
+t = Tor()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/torrc b/old-to-be-ported-code/very-old/ooni/plugins/torrc
new file mode 100644
index 0000000..b9ffc80
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/torrc
@@ -0,0 +1,9 @@
+SocksPort 9050
+ControlPort 9051
+VirtualAddrNetwork 10.23.47.0/10
+AutomapHostsOnResolve 1
+TransPort 9040
+TransListenAddress 127.0.0.1
+DNSPort 5353
+DNSListenAddress 127.0.0.1
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugooni.py b/old-to-be-ported-code/very-old/ooni/plugooni.py
new file mode 100644
index 0000000..17f17b3
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugooni.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+#
+# Plugooni, ooni plugin module for loading plgoo files.
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+
+import sys
+import os
+
+import imp, pkgutil, inspect
+
+class Plugoo:
+ def __init__(self, name, plugin_type, paranoia, author):
+ self.name = name
+ self.author = author
+ self.type = plugin_type
+ self.paranoia = paranoia
+
+ """
+ Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
+ Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
+ """
+ def get_tests_by_filter(self, filters, plugins):
+ ret_functions = []
+
+ for plugin in plugins:
+ for function_ptr in dir(plugin):
+ if function_ptr.endswith(filters):
+ ret_functions.append((plugin,function_ptr))
+ return ret_functions
+
+ """
+ Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
+ 'dirty' and 'failed'.
+ Run the tests and print 'clean','dirty' or 'failed' according to the test result.
+ """
+ def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
+ for test in tests:
+ filter_result = getattr(test[0], test[1])(self)
+ if filter_result == True:
+ print test[1] + ": " + clean
+ elif filter_result == None:
+ print test[1] + ": " + failed
+ else:
+ print test[1] + ": " + dirty
+
+ """
+ Find all the tests belonging to plgoo 'self' and run them.
+ We know the tests when we see them because they end in 'filter'.
+ """
+ def run_plgoo_tests(self, filter):
+ for function_ptr in dir(self):
+ if function_ptr.endswith(filter):
+ getattr(self, function_ptr)()
+
+PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
+RESERVED_NAMES = [ "skel_plgoo" ]
+
+class Plugooni():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.loadall = True
+ self.plugin_name = args.plugin_name
+ self.listfile = args.listfile
+
+ self.plgoo_found = False
+
+ # Print all the plugoons to stdout.
+ def list_plugoons(self):
+ print "Plugooni list:"
+ for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+ if name not in RESERVED_NAMES:
+ print "\t%s" %(name.split("_")[0])
+
+ # Return name of the plgoo class of a plugin.
+ # We know because it always ends with "Plugin".
+ def get_plgoo_class(self,plugin):
+ for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
+ if memb.__name__.endswith("Plugin"):
+ return memb
+
+ # This function is responsible for loading and running the plugoons
+ # the user wants to run.
+ def run(self, command_object):
+ print "Plugooni: the ooni plgoo plugin module loader"
+
+ # iterate all modules
+ for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+ # see if this module should be loaded
+ if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
+ self.plgoo_found = True # we found at least one plgoo!
+
+ file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
+ # load module
+ plugin = imp.load_module(name, file, pathname, desc)
+ # instantiate plgoo class and call its ooni_main()
+ self.get_plgoo_class(plugin)().ooni_main(command_object)
+
+ # if we couldn't find the plgoo; whine to the user
+ if self.plgoo_found is False:
+ print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/transparenthttp.py b/old-to-be-ported-code/very-old/ooni/transparenthttp.py
new file mode 100644
index 0000000..311fb32
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/transparenthttp.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Captive Portal Detection With Multi-Vendor Emulation
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple tests that match specific vendor
+# mitm proxies
+
+import sys
+import ooni.http
+import ooni.report
+
+class TransparentHTTPProxy():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.logger = ooni.report.Log().logger
+
+ def TransparentHTTPProxy_Tests(self):
+ print "Transparent HTTP Proxy:"
+ filter_name = "_TransparentHTTP_Tests"
+ tests = [ooni.http]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/traceroute.py b/old-to-be-ported-code/very-old/traceroute.py
new file mode 100644
index 0000000..e8252c1
--- /dev/null
+++ b/old-to-be-ported-code/very-old/traceroute.py
@@ -0,0 +1,108 @@
+try:
+ from dns import resolver
+except:
+ print "Error: dnspython is not installed (http://www.dnspython.org/)"
+import gevent
+import os
+import plugoo
+
+try:
+ import scapy
+except:
+ print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+import socket
+
+__plugoo__ = "Traceroute"
+__desc__ = "Performs TTL walking tests"
+
+class TracerouteAsset(Asset):
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+
+class Traceroute(Test):
+ """A *very* quick and dirty traceroute implementation, UDP and TCP
+ """
+ def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
+ dest_addr = socket.gethostbyname(dst)
+ print "Doing traceroute on %s" % dst
+
+ recv = socket.getprotobyname('icmp')
+ send = socket.getprotobyname(proto)
+ ttl = 1
+ while True:
+ recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
+ if proto == "tcp":
+ send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
+ else:
+ send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
+ recv_sock.settimeout(10)
+ send_sock.settimeout(10)
+
+ send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
+ recv_sock.bind(("", src_port))
+ if proto == "tcp":
+ try:
+ send_sock.settimeout(2)
+ send_sock.connect((dst, dst_port))
+ except socket.timeout:
+ pass
+
+ except Exception, e:
+ print "Error doing connect %s" % e
+ else:
+ send_sock.sendto("", (dst, dst_port))
+
+ curr_addr = None
+ try:
+ print "receiving data..."
+ _, curr_addr = recv_sock.recvfrom(512)
+ curr_addr = curr_addr[0]
+
+ except socket.error, e:
+ print "SOCKET ERROR: %s" % e
+
+ except Exception, e:
+ print "ERROR: %s" % e
+
+ finally:
+ send_sock.close()
+ recv_sock.close()
+
+ if curr_addr is not None:
+ curr_host = "%s" % curr_addr
+ else:
+ curr_host = "*"
+
+ print "%d\t%s" % (ttl, curr_host)
+
+ if curr_addr == dest_addr or ttl > max_hops:
+ break
+
+ ttl += 1
+
+
+ def experiment(self, *a, **kw):
+ # this is just a dirty hack
+ address = kw['data'][0]
+
+ self.traceroute(address)
+
+def run(ooni):
+ """Run the test"""
+ config = ooni.config
+ urls = []
+
+ traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
+ config.tests.traceroute))
+
+ assets = [traceroute_experiment]
+
+ traceroute = Traceroute(ooni)
+ ooni.logger.info("starting traceroute test")
+ traceroute.run(assets)
+ ooni.logger.info("finished")
1
0
[ooni-probe/master] Added basic ICMP classes for testing reachability
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 8d2a1919f7755c5c084576a7ac1bdd37bb4c52be
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Aug 19 08:34:12 2012 +0000
Added basic ICMP classes for testing reachability
---
ooni/plugins/echo.py | 127 ++++++++++++++++++++++++++++++++++++++++++++++++++
1 files changed, 127 insertions(+), 0 deletions(-)
diff --git a/ooni/plugins/echo.py b/ooni/plugins/echo.py
new file mode 100644
index 0000000..bc1b2a8
--- /dev/null
+++ b/ooni/plugins/echo.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simply ICMP-8 ping test.
+#
+# :author: Isis Lovecruft
+# :version: 0.1.0-pre-alpha
+# :license: (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from twisted.plugin import IPlugin
+from twisted.python import usage
+from zope.interface import implements
+
+from lib import txscapy
+from utils import log
+from plugoo.assets import Asset
+from plugoo.interface import ITest
+from protocols.scapyproto import ScapyTest
+
+class EchoOptions(usage.Options):
+ optParameters = [
+ ['interface', 'i', None, 'Network interface to use'],
+ ['destination', 'd', None, 'File of hosts to ping'],
+ ['count', 'c', 5, 'Number of packets to send', int],
+ ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+ ['ttl', 't', 25, 'Set the IP Time to Live', int],
+ ]
+ optFlags = []
+
+class EchoAsset(Asset):
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+ def parse_line(self, line):
+ if line.startswith('#'):
+ return
+ else:
+ return line.replace('\n', '')
+
+class EchoTest(ScapyTest):
+ implements(IPlugin, ITest)
+
+ shortName = 'echo'
+ description = 'A simple ICMP-8 test to check if a host is reachable'
+ options = EchoOptions
+ requirements = None
+ blocking = False
+
+ pcap_file = 'echo.pcap'
+ receive = True
+
+ def initialize(self):
+ self.request = {}
+ self.response = {}
+
+ if self.local_options:
+
+ options = self.local_options
+
+ if options['interface']:
+ self.interface = options['interface']
+
+ if options['count']:
+ ## there's a Counter() somewhere, use it
+ self.count = options['count']
+
+ if options['size']:
+ self.size = options['size']
+
+ if options['ttl']:
+ self.ttl = options['ttl']
+
+ def load_assets(self):
+ assets = {}
+ option = self.local_options
+
+ if option and option['destination']:
+
+ try:
+ from scapy.all import IP
+ except:
+ log.err()
+
+ if os.path.isfile(option['destination']):
+ with open(option['destination']) as hosts:
+ for line in hosts.readlines():
+ assets.update({'host': EchoAsset(line)})
+ else:
+ while type(options['destination']) is str:
+ try:
+ IP(options['destination'])
+ except:
+ log.err()
+ break
+ assets.update({'host': options['destination']})
+ else:
+ log.msg("Couldn't understand destination option...")
+ log.msg("Give one IPv4 address, or a file with one address per line.")
+ return assets
+
+ def experiment(self, args):
+ if len(args) == 0:
+ log.err("Error: We're Echo, not Narcissus!")
+ log.err(" Provide a list of hosts to ping...")
+ d = sys.exit(1)
+ return d
+
+ ## XXX v4 / v6
+ from scapy.all import ICMP, IP, sr
+ ping = sr(IP(dst=args)/ICMP())
+ if ping:
+ self.response.update(ping.show())
+ else:
+ log.msg('No response received from %s' % args)
+
+ def control(self, *args):
+ pass
+
+echo = EchoTest(None, None, None)
1
0