tor-commits
Threads by month
- ----- 2025 -----
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
November 2012
- 18 participants
- 1508 discussions

04 Nov '12
commit c30c355fd32117d155055e9c0b540158e04eedf9
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 05:02:16 2012 +0000
* Fix NetTestAdaptor constructor call.
---
ooni/nettest.py | 53 ++++++++++++++++++++++++++++-------------------------
1 files changed, 28 insertions(+), 25 deletions(-)
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 4ec1ccb..f8f3e30 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -73,8 +73,8 @@ class NetTestAdaptor(unittest.TestCase):
@classmethod
def __new__(cls, *args, **kwargs):
super( NetTestAdaptor, cls ).__new__(*args, **kwargs)
- if hasattr(cls, setUpClass):
- setUpClass(cls)
+ if hasattr(cls, "setUpClass"):
+ super( NetTestAdaptor, cls ).setUpClass()
else:
log.debug("NetTestAdaptor: constructor could not find setUpClass")
@@ -191,24 +191,6 @@ class NetTestAdaptor(unittest.TestCase):
def __input_parser__(one_input): return one_input
@classmethod
- def setUpClass(cls):
- """
- Create a NetTestCase. To add futher setup steps before a set of tests
- in a TestCase instance run, create a function called 'setUp'.
-
- Class attributes, such as `report`, `optParameters`, `name`, and
- `author` should be overriden statically as class attributes in any
- subclass of :class:`ooni.nettest.NetTestCase`, so that the calling
- functions during NetTestCase class setup can handle them correctly.
- """
- cls._raw_inputs = __copyattr__(cls, "inputs")
- cls._input_file = __copyattr__(cls, "inputFile")
- cls._input_parser = __copyattr__(cls, "inputParser", alt=__input_parser__)
- cls._nettest_name = __copyattr__(cls, "name", alt="NetTestAdaptor")
- cls.parsed_inputs = __get_inputs__(cls)
- ## XXX we should handle options generation here
-
- @classmethod
def __get_inputs__(cls):
"""
I am called during class setup and you probably should not override
@@ -267,6 +249,32 @@ class NetTestAdaptor(unittest.TestCase):
return parsed
+ @classmethod
+ def __optstruct__(cls):
+
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ Create a NetTestCase. To add futher setup steps before a set of tests
+ in a TestCase instance run, create a function called 'setUp'.
+
+ Class attributes, such as `report`, `optParameters`, `name`, and
+ `author` should be overriden statically as class attributes in any
+ subclass of :class:`ooni.nettest.NetTestCase`, so that the calling
+ functions during NetTestCase class setup can handle them correctly.
+ """
+ cls._raw_inputs = __copyattr__(cls, "inputs")
+ cls._input_file = __copyattr__(cls, "inputFile")
+ cls._input_parser = __copyattr__(cls, "inputParser", alt=__input_parser__)
+ cls._nettest_name = __copyattr__(cls, "name", alt="NetTestAdaptor")
+ cls.parsed_inputs = __get_inputs__(cls)
+
+ ## XXX we should handle options generation here
+ cls._opt_param = __copyattr__(cls, "optParameters")
+ cls._opt_su
+
+
class NetTestCase(NetTestAdaptor):
"""
This is the monad of the OONI nettest universe. When you write a nettest
@@ -324,10 +332,6 @@ class NetTestCase(NetTestAdaptor):
subCommands = None
requiresRoot = False
- @classmethod
- def setUpClass(cls):
- pass
-
def deferSetUp(self, ignored, result):
"""
If we have the reporterFactory set we need to write the header. If
@@ -349,7 +353,6 @@ class NetTestCase(NetTestAdaptor):
log.debug("Running custom input processor")
return inputs
-
def getOptions(self):
'''
for attr in attributes:
1
0

[ooni-probe/master] * Still not sure what to do with the custodiet and bridget, also not sure
by isis@torproject.org 04 Nov '12
by isis@torproject.org 04 Nov '12
04 Nov '12
commit 0e45f922ca7293d1eceac76945385960407ad2ad
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 12:24:42 2012 +0000
* Still not sure what to do with the custodiet and bridget, also not sure
about plugoo.
---
nettests/bridget.py | 499 +++++++++++++++++++++++++++++++++++++++++++++
nettests/echo.py | 196 ++++++++++++++++++
nettests/tls-handshake.py | 32 +++
ooni/custodiet.py | 421 ++++++++++++++++++++++++++++++++++++++
4 files changed, 1148 insertions(+), 0 deletions(-)
diff --git a/nettests/bridget.py b/nettests/bridget.py
new file mode 100644
index 0000000..a334747
--- /dev/null
+++ b/nettests/bridget.py
@@ -0,0 +1,499 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# +-----------+
+# | BRIDGET |
+# | +--------------------------------------------+
+# +--------| Use a Tor process to test making a Tor |
+# | connection to a list of bridges or relays. |
+# +--------------------------------------------+
+#
+# :authors: Isis Lovecruft, Arturo Filasto
+# :licence: see included LICENSE
+# :version: 0.1.0-alpha
+
+from __future__ import with_statement
+from functools import partial
+from random import randint
+
+import os
+import sys
+
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import defer, error, reactor
+from zope.interface import implements
+
+from ooni.utils import log, date
+from ooni.utils.config import ValueChecker
+
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset, MissingAssetException
+from ooni.utils.onion import TxtorconImportError
+from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
+
+try:
+ from ooni.utils.onion import parse_data_dir
+except:
+ log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
+
+class RandomPortException(Exception):
+ """Raised when using a random port conflicts with configured ports."""
+ def __init__(self):
+ log.msg("Unable to use random and specific ports simultaneously")
+ return sys.exit()
+
+class BridgetArgs(usage.Options):
+ """Commandline options."""
+ allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
+ sock_check = ValueChecker(allowed % "SocksPort").port_check
+ ctrl_check = ValueChecker(allowed % "ControlPort").port_check
+
+ optParameters = [
+ ['bridges', 'b', None,
+ 'File listing bridge IP:ORPorts to test'],
+ ['relays', 'f', None,
+ 'File listing relay IPs to test'],
+ ['socks', 's', 9049, None, sock_check],
+ ['control', 'c', 9052, None, ctrl_check],
+ ['torpath', 'p', None,
+ 'Path to the Tor binary to use'],
+ ['datadir', 'd', None,
+ 'Tor DataDirectory to use'],
+ ['transport', 't', None,
+ 'Tor ClientTransportPlugin'],
+ ['resume', 'r', 0,
+ 'Resume at this index']]
+ optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
+
+ def postOptions(self):
+ if not self['bridges'] and not self['relays']:
+ raise MissingAssetException(
+ "Bridget can't run without bridges or relays to test!")
+ if self['transport']:
+ ValueChecker.uid_check(
+ "Can't run bridget as root with pluggable transports!")
+ if not self['bridges']:
+ raise PTNoBridgesException
+ if self['socks'] or self['control']:
+ if self['random']:
+ raise RandomPortException
+ if self['datadir']:
+ ValueChecker.dir_check(self['datadir'])
+ if self['torpath']:
+ ValueChecker.file_check(self['torpath'])
+
+class BridgetAsset(Asset):
+ """Class for parsing bridget Assets ignoring commented out lines."""
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+ def parse_line(self, line):
+ if line.startswith('#'):
+ return
+ else:
+ return line.replace('\n','')
+
+class BridgetTest(OONITest):
+ """
+ XXX fill me in
+
+ :ivar config:
+ An :class:`ooni.lib.txtorcon.TorConfig` instance.
+ :ivar relays:
+ A list of all provided relays to test.
+ :ivar bridges:
+ A list of all provided bridges to test.
+ :ivar socks_port:
+ Integer for Tor's SocksPort.
+ :ivar control_port:
+ Integer for Tor's ControlPort.
+ :ivar transport:
+ String defining the Tor's ClientTransportPlugin, for testing
+ a bridge's pluggable transport functionality.
+ :ivar tor_binary:
+ Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
+ """
+ implements(IPlugin, ITest)
+
+ shortName = "bridget"
+ description = "Use a Tor process to test connecting to bridges or relays"
+ requirements = None
+ options = BridgetArgs
+ blocking = False
+
+ def initialize(self):
+ """
+ Extra initialization steps. We only want one child Tor process
+ running, so we need to deal with most of the TorConfig() only once,
+ before the experiment runs.
+ """
+ self.socks_port = 9049
+ self.control_port = 9052
+ self.circuit_timeout = 90
+ self.tor_binary = '/usr/sbin/tor'
+ self.data_directory = None
+
+ def __make_asset_list__(opt, lst):
+ log.msg("Loading information from %s ..." % opt)
+ with open(opt) as opt_file:
+ for line in opt_file.readlines():
+ if line.startswith('#'):
+ continue
+ else:
+ lst.append(line.replace('\n',''))
+
+ def __count_remaining__(which):
+ total, reach, unreach = map(lambda x: which[x],
+ ['all', 'reachable', 'unreachable'])
+ count = len(total) - reach() - unreach()
+ return count
+
+ ## XXX should we do report['bridges_up'].append(self.bridges['current'])
+ self.bridges = {}
+ self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
+ ([] for i in range(3))
+ self.bridges['reachable'] = lambda: len(self.bridges['up'])
+ self.bridges['unreachable'] = lambda: len(self.bridges['down'])
+ self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
+ self.bridges['current'] = None
+ self.bridges['pt_type'] = None
+ self.bridges['use_pt'] = False
+
+ self.relays = {}
+ self.relays['all'], self.relays['up'], self.relays['down'] = \
+ ([] for i in range(3))
+ self.relays['reachable'] = lambda: len(self.relays['up'])
+ self.relays['unreachable'] = lambda: len(self.relays['down'])
+ self.relays['remaining'] = lambda: __count_remaining__(self.relays)
+ self.relays['current'] = None
+
+ if self.local_options:
+ try:
+ from ooni.lib.txtorcon import TorConfig
+ except ImportError:
+ raise TxtorconImportError
+ else:
+ self.config = TorConfig()
+ finally:
+ options = self.local_options
+
+ if options['bridges']:
+ self.config.UseBridges = 1
+ __make_asset_list__(options['bridges'], self.bridges['all'])
+ if options['relays']:
+ ## first hop must be in TorState().guards
+ self.config.EntryNodes = ','.join(relay_list)
+ __make_asset_list__(options['relays'], self.relays['all'])
+ if options['socks']:
+ self.socks_port = options['socks']
+ if options['control']:
+ self.control_port = options['control']
+ if options['random']:
+ log.msg("Using randomized ControlPort and SocksPort ...")
+ self.socks_port = randint(1024, 2**16)
+ self.control_port = randint(1024, 2**16)
+ if options['torpath']:
+ self.tor_binary = options['torpath']
+ if options['datadir']:
+ self.data_directory = parse_data_dir(options['datadir'])
+ if options['transport']:
+ ## ClientTransportPlugin transport exec pathtobinary [options]
+ ## XXX we need a better way to deal with all PTs
+ log.msg("Using ClientTransportPlugin %s" % options['transport'])
+ self.bridges['use_pt'] = True
+ [self.bridges['pt_type'], pt_exec] = \
+ options['transport'].split(' ', 1)
+
+ if self.bridges['pt_type'] == "obfs2":
+ self.config.ClientTransportPlugin = \
+ self.bridges['pt_type'] + " " + pt_exec
+ else:
+ raise PTNotFoundException
+
+ self.config.SocksPort = self.socks_port
+ self.config.ControlPort = self.control_port
+ self.config.CookieAuthentication = 1
+
+ def __load_assets__(self):
+ """
+ Load bridges and/or relays from files given in user options. Bridges
+ should be given in the form IP:ORport. We don't want to load these as
+ assets, because it's inefficient to start a Tor process for each one.
+
+ We cannot use the Asset model, because that model calls
+ self.experiment() with the current Assets, which would be one relay
+ and one bridge, then it gives the defer.Deferred returned from
+ self.experiment() to self.control(), which means that, for each
+ (bridge, relay) pair, experiment gets called again, which instantiates
+ an additional Tor process that attempts to bind to the same
+ ports. Thus, additionally instantiated Tor processes return with
+ RuntimeErrors, which break the final defer.chainDeferred.callback(),
+ sending it into the errback chain.
+ """
+ assets = {}
+ if self.local_options:
+ if self.local_options['bridges']:
+ assets.update({'bridge':
+ BridgetAsset(self.local_options['bridges'])})
+ if self.local_options['relays']:
+ assets.update({'relay':
+ BridgetAsset(self.local_options['relays'])})
+ return assets
+
+ def experiment(self, args):
+ """
+ if bridges:
+ 1. configure first bridge line
+ 2a. configure data_dir, if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor } if any of these
+ 4. remove bridges which are public relays } fail, add current
+ 5. SIGHUP for each bridge } bridge to unreach-
+ } able bridges.
+ if relays:
+ 1a. configure the data_dir, if it doesn't exist
+ 1b. write torrc to a tempfile in data_dir
+ 2. start tor
+ 3. remove any of our relays which are already part of current
+ circuits
+ 4a. attach CustomCircuit() to self.state
+ 4b. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+ 5.
+ if bridges and relays:
+ 1. configure first bridge line
+ 2a. configure data_dir if it doesn't exist
+ 2b. write torrc to a tempfile in data_dir
+ 3. start tor
+ 4. remove bridges which are public relays
+ 5. remove any of our relays which are already part of current
+ circuits
+ 6a. attach CustomCircuit() to self.state
+ 6b. for each bridge, build three circuits, with three
+ relays each
+ 6c. RELAY_EXTEND for each relay } if this fails, add
+ } current relay to list
+ } of unreachable relays
+
+ :param args:
+ The :class:`BridgetAsset` line currently being used. Except that it
+ in Bridget it doesn't, so it should be ignored and avoided.
+ """
+ try:
+ from ooni.utils import process
+ from ooni.utils.onion import remove_public_relays, start_tor
+ from ooni.utils.onion import start_tor_filter_nodes
+ from ooni.utils.onion import setup_fail, setup_done
+ from ooni.utils.onion import CustomCircuit
+ from ooni.utils.timer import deferred_timeout, TimeoutError
+ from ooni.lib.txtorcon import TorConfig, TorState
+ except ImportError:
+ raise TxtorconImportError
+ except TxtorconImportError, tie:
+ log.err(tie)
+ sys.exit()
+
+ def reconfigure_done(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['up'].
+ """
+ log.msg("Reconfiguring with 'Bridge %s' successful"
+ % bridges['current'])
+ bridges['up'].append(bridges['current'])
+ return state
+
+ def reconfigure_fail(state, bridges):
+ """
+ Append :ivar:`bridges['current']` to the list
+ :ivar:`bridges['down'].
+ """
+ log.msg("Reconfiguring TorConfig with parameters %s failed"
+ % state)
+ bridges['down'].append(bridges['current'])
+ return state
+
+ @defer.inlineCallbacks
+ def reconfigure_bridge(state, bridges):
+ """
+ Rewrite the Bridge line in our torrc. If use of pluggable
+ transports was specified, rewrite the line as:
+ Bridge <transport_type> <IP>:<ORPort>
+ Otherwise, rewrite in the standard form:
+ Bridge <IP>:<ORPort>
+
+ :param state:
+ A fully bootstrapped instance of
+ :class:`ooni.lib.txtorcon.TorState`.
+ :param bridges:
+ A dictionary of bridges containing the following keys:
+
+ bridges['remaining'] :: A function returning and int for the
+ number of remaining bridges to test.
+ bridges['current'] :: A string containing the <IP>:<ORPort>
+ of the current bridge.
+ bridges['use_pt'] :: A boolean, True if we're testing
+ bridges with a pluggable transport;
+ False otherwise.
+ bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
+ this is a string containing the type
+ of pluggable transport to test.
+ :return:
+ :param:`state`
+ """
+ log.msg("Current Bridge: %s" % bridges['current'])
+ log.msg("We now have %d bridges remaining to test..."
+ % bridges['remaining']())
+ try:
+ if bridges['use_pt'] is False:
+ controller_response = yield state.protocol.set_conf(
+ 'Bridge', bridges['current'])
+ elif bridges['use_pt'] and bridges['pt_type'] is not None:
+ controller_reponse = yield state.protocol.set_conf(
+ 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
+ else:
+ raise PTNotFoundException
+
+ if controller_response == 'OK':
+ finish = yield reconfigure_done(state, bridges)
+ else:
+ log.err("SETCONF for %s responded with error:\n %s"
+ % (bridges['current'], controller_response))
+ finish = yield reconfigure_fail(state, bridges)
+
+ defer.returnValue(finish)
+
+ except Exception, e:
+ log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
+ % (bridges['current'], e))
+ defer.returnValue(None)
+
+ def attacher_extend_circuit(attacher, deferred, router):
+ ## XXX todo write me
+ ## state.attacher.extend_circuit
+ raise NotImplemented
+ #attacher.extend_circuit
+
+ def state_attach(state, path):
+ log.msg("Setting up custom circuit builder...")
+ attacher = CustomCircuit(state)
+ state.set_attacher(attacher, reactor)
+ state.add_circuit_listener(attacher)
+ return state
+
+ ## OLD
+ #for circ in state.circuits.values():
+ # for relay in circ.path:
+ # try:
+ # relay_list.remove(relay)
+ # except KeyError:
+ # continue
+ ## XXX how do we attach to circuits with bridges?
+ d = defer.Deferred()
+ attacher.request_circuit_build(d)
+ return d
+
+ def state_attach_fail(state):
+ log.err("Attaching custom circuit builder failed: %s" % state)
+
+ log.msg("Bridget: initiating test ... ") ## Start the experiment
+
+ ## if we've at least one bridge, and our config has no 'Bridge' line
+ if self.bridges['remaining']() >= 1 \
+ and not 'Bridge' in self.config.config:
+
+ ## configure our first bridge line
+ self.bridges['current'] = self.bridges['all'][0]
+ self.config.Bridge = self.bridges['current']
+ ## avoid starting several
+ self.config.save() ## processes
+ assert self.config.config.has_key('Bridge'), "No Bridge Line"
+
+ ## start tor and remove bridges which are public relays
+ from ooni.utils.onion import start_tor_filter_nodes
+ state = start_tor_filter_nodes(reactor, self.config,
+ self.control_port, self.tor_binary,
+ self.data_directory, self.bridges)
+ #controller = defer.Deferred()
+ #controller.addCallback(singleton_semaphore, tor)
+ #controller.addErrback(setup_fail)
+ #bootstrap = defer.gatherResults([controller, filter_bridges],
+ # consumeErrors=True)
+
+ if state is not None:
+ log.debug("state:\n%s" % state)
+ log.debug("Current callbacks on TorState():\n%s"
+ % state.callbacks)
+
+ ## if we've got more bridges
+ if self.bridges['remaining']() >= 2:
+ #all = []
+ for bridge in self.bridges['all'][1:]:
+ self.bridges['current'] = bridge
+ #new = defer.Deferred()
+ #new.addCallback(reconfigure_bridge, state, self.bridges)
+ #all.append(new)
+ #check_remaining = defer.DeferredList(all, consumeErrors=True)
+ #state.chainDeferred(check_remaining)
+ state.addCallback(reconfigure_bridge, self.bridges)
+
+ if self.relays['remaining']() > 0:
+ while self.relays['remaining']() >= 3:
+ #path = list(self.relays.pop() for i in range(3))
+ #log.msg("Trying path %s" % '->'.join(map(lambda node:
+ # node, path)))
+ self.relays['current'] = self.relays['all'].pop()
+ for circ in state.circuits.values():
+ for node in circ.path:
+ if node == self.relays['current']:
+ self.relays['up'].append(self.relays['current'])
+ if len(circ.path) < 3:
+ try:
+ ext = attacher_extend_circuit(state.attacher, circ,
+ self.relays['current'])
+ ext.addCallback(attacher_extend_circuit_done,
+ state.attacher, circ,
+ self.relays['current'])
+ except Exception, e:
+ log.err("Extend circuit failed: %s" % e)
+ else:
+ continue
+
+ #state.callback(all)
+ #self.reactor.run()
+ return state
+
+ def startTest(self, args):
+ """
+ Local override of :meth:`OONITest.startTest` to bypass calling
+ self.control.
+
+ :param args:
+ The current line of :class:`Asset`, not used but kept for
+ compatibility reasons.
+ :return:
+ A fired deferred which callbacks :meth:`experiment` and
+ :meth:`OONITest.finished`.
+ """
+ self.start_time = date.now()
+ self.d = self.experiment(args)
+ self.d.addErrback(log.err)
+ self.d.addCallbacks(self.finished, log.err)
+ return self.d
+
+## So that getPlugins() can register the Test:
+#bridget = BridgetTest(None, None, None)
+
+## ISIS' NOTES
+## -----------
+## TODO:
+## x cleanup documentation
+## x add DataDirectory option
+## x check if bridges are public relays
+## o take bridge_desc file as input, also be able to give same
+## format as output
+## x Add asynchronous timeout for deferred, so that we don't wait
+## o Add assychronous timout for deferred, so that we don't wait
+## forever for bridges that don't work.
diff --git a/nettests/echo.py b/nettests/echo.py
new file mode 100644
index 0000000..bc47519
--- /dev/null
+++ b/nettests/echo.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simple ICMP-8 ping test.
+#
+# :author: Isis Lovecruft
+# :version: 0.0.1-pre-alpha
+# :license: (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from pprint import pprint
+
+from twisted.internet import reactor
+from twisted.plugin import IPlugin
+from twisted.python import usage
+from ooni.nettest import NetTestCase
+from ooni.utils import log, Storage
+from ooni.utils.net import PermissionsError, IfaceError
+
+try:
+ from scapy.all import sr1, IP, ICMP ## XXX v4/v6?
+ from ooni.lib import txscapy
+ from ooni.lib.txscapy import txsr, txsend
+ from ooni.templates.scapyt import ScapyTest
+except:
+ log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
+
+## xxx TODO: move these to a utility function for determining OSes
+LINUX=sys.platform.startswith("linux")
+OPENBSD=sys.platform.startswith("openbsd")
+FREEBSD=sys.platform.startswith("freebsd")
+NETBSD=sys.platform.startswith("netbsd")
+DARWIN=sys.platform.startswith("darwin")
+SOLARIS=sys.platform.startswith("sunos")
+WINDOWS=sys.platform.startswith("win32")
+
+class EchoTest(ScapyTest):
+ """
+ xxx fill me in
+ """
+ name = 'echo'
+ author = 'Isis Lovecruft <isis(a)torproject.org>'
+ description = 'A simple ICMP-8 test to see if a host is reachable.'
+ version = '0.0.1'
+ inputFile = ['file', 'f', None, 'File of list of IPs to ping']
+ requirements = None
+ #report = Storage()
+
+ optParameters = [
+ ['interface', 'i', None, 'Network interface to use'],
+ ['count', 'c', 5, 'Number of packets to send', int],
+ ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+ ['ttl', 'l', 25, 'Set the IP Time to Live', int],
+ ['timeout', 't', 2, 'Seconds until timeout if no response', int],
+ ['pcap', 'p', None, 'Save pcap to this file'],
+ ['receive', 'r', True, 'Receive response packets']
+ ]
+
+ def setUp(self, *a, **kw):
+ '''
+ :ivar ifaces:
+ Struct returned from getifaddrs(3) and turned into a tuple in the
+ form (*ifa_name, AF_FAMILY, *ifa_addr)
+ '''
+
+ if self.local_options:
+ log.debug("%s: local_options found" % self.name)
+ for key, value in self.local_options:
+ log.debug("%s: setting self.%s = %s" % (key, value))
+ setattr(self, key, value)
+
+ ## xxx is this now .subOptions?
+ #self.inputFile = self.localOptions['file']
+ self.timeout *= 1000 ## convert to milliseconds
+
+ if not self.interface:
+ log.msg("No network interface specified!")
+ log.debug("OS detected: %s" % sys.platform)
+ if LINUX or OPENBSD or NETBSD or FREEBSD or DARWIN or SOLARIS:
+ from twisted.internet.test import _posixifaces
+ log.msg("Attempting to discover network interfaces...")
+ ifaces = _posixifaces._interfaces()
+ elif WINDOWS:
+ from twisted.internet.test import _win32ifaces
+ log.msg("Attempting to discover network interfaces...")
+ ifaces = _win32ifaces._interfaces()
+ else:
+ log.debug("Client OS %s not accounted for!" % sys.platform)
+ log.debug("Unable to discover network interfaces...")
+ ifaces = [('lo', '')]
+
+ ## found = {'eth0': '1.1.1.1'}
+ found = [{i[0]: i[2]} for i in ifaces if i[0] != 'lo']
+ log.info("Found interfaces:\n%s" % pprint(found))
+ self.interfaces = self.tryInterfaces(found)
+ else:
+ ## xxx need a way to check that iface exists, is up, and
+ ## we have permissions on it
+ log.debug("Our interface has been set to %s" % self.interface)
+
+ if self.pcap:
+ try:
+ self.pcapfile = open(self.pcap, 'a+')
+ except:
+ log.msg("Unable to write to pcap file %s" % self.pcap)
+ self.pcapfile = None
+
+ try:
+ assert os.path.isfile(self.file)
+ fp = open(self.file, 'r')
+ except Exception, e:
+ hosts = ['8.8.8.8', '38.229.72.14']
+ log.err(e)
+ else:
+ self.inputs = self.inputProcessor(fp)
+ self.removePorts(hosts)
+
+ log.debug("Initialization of %s test completed with:\n%s"
+ % (self.name, ''.join(self.__dict__)))
+
+ @staticmethod
+ def inputParser(inputs):
+ log.debug("Removing possible ports from host addresses...")
+ log.debug("Initial inputs:\n%s" % pprint(inputs))
+
+ assert isinstance(inputs, list)
+ hosts = [h.rsplit(':', 1)[0] for h in inputs]
+ log.debug("Inputs converted to:\n%s" % hosts)
+
+ return hosts
+
+ def tryInterfaces(self, ifaces):
+ try:
+ from scapy.all import sr1 ## we want this check to be blocking
+ except:
+ log.msg("This test requires scapy: www.secdev.org/projects/scapy")
+ raise SystemExit
+
+ ifup = {}
+ while ifaces:
+ for ifname, ifaddr in ifaces:
+ log.debug("Currently testing network capabilities of interface"
+ + "%s by sending a packet to our address %s"
+ % (ifname, ifaddr))
+ try:
+ pkt = IP(dst=ifaddr)/ICMP()
+ ans, unans = sr(pkt, iface=ifname, timeout=self.timeout)
+ except Exception, e:
+ raise PermissionsError if e.find("Errno 1") else log.err(e)
+ else:
+ ## xxx i think this logic might be wrong
+ log.debug("Interface test packet\n%s\n\n%s"
+ % (pkt.summary(), pkt.show2()))
+ if ans.summary():
+ log.info("Received answer for test packet on interface"
+ +"%s :\n%s" % (ifname, ans.summary()))
+ ifup.update(ifname, ifaddr)
+ else:
+ log.info("Our interface test packet was unanswered:\n%s"
+ % unans.summary())
+
+ if len(ifup) > 0:
+ log.msg("Discovered the following working network interfaces: %s"
+ % ifup)
+ return ifup
+ else:
+ raise IfaceError("Could not find a working network interface.")
+
+ def buildPackets(self):
+ log.debug("self.input is %s" % self.input)
+ log.debug("self.hosts is %s" % self.hosts)
+ for addr in self.input:
+ packet = IP(dst=self.input)/ICMP()
+ self.request.append(packet)
+ return packet
+
+ def test_icmp(self):
+ if self.recieve:
+ self.buildPackets()
+ all = []
+ for packet in self.request:
+ d = self.sendReceivePackets(packets=packet)
+ all.append(d)
+ self.response.update({packet: d})
+ d_list = defer.DeferredList(all)
+ return d_list
+ else:
+ d = self.sendPackets()
+ return d
diff --git a/nettests/tls-handshake.py b/nettests/tls-handshake.py
new file mode 100644
index 0000000..eba950e
--- /dev/null
+++ b/nettests/tls-handshake.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+import subprocess
+from subprocess import PIPE
+serverport = "129.21.124.215:443"
+# a subset of those from firefox
+ciphers = [
+ "ECDHE-ECDSA-AES256-SHA",
+ "ECDHE-RSA-AES256-SHA",
+ "DHE-RSA-CAMELLIA256-SHA",
+ "DHE-DSS-CAMELLIA256-SHA",
+ "DHE-RSA-AES256-SHA",
+ "DHE-DSS-AES256-SHA",
+ "ECDH-ECDSA-AES256-CBC-SHA",
+ "ECDH-RSA-AES256-CBC-SHA",
+ "CAMELLIA256-SHA",
+ "AES256-SHA",
+ "ECDHE-ECDSA-RC4-SHA",
+ "ECDHE-ECDSA-AES128-SHA",
+ "ECDHE-RSA-RC4-SHA",
+ "ECDHE-RSA-AES128-SHA",
+ "DHE-RSA-CAMELLIA128-SHA",
+ "DHE-DSS-CAMELLIA128-SHA"
+]
+def checkBridgeConnection(host, port)
+ cipher_arg = ":".join(ciphers)
+ cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
+ cmd += ["-cipher", cipher_arg]
+ proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
+ out, error = proc.communicate()
+ success = "Cipher is DHE-RSA-AES256-SHA" in out
+ return success
diff --git a/ooni/custodiet.py b/ooni/custodiet.py
new file mode 100755
index 0000000..8cbcfce
--- /dev/null
+++ b/ooni/custodiet.py
@@ -0,0 +1,421 @@
+#!/usr/bin/env python
+# -*- coding: UTF-8
+#
+# custodiet
+# *********
+#
+# "...quis custodiet ipsos custodes?"
+# - Juvenal, Satires VI.347-348 (circa 2nd Century, C.E.)
+#
+# "'Hand me the Custodian,' Goodchild demands, inserting the waiflike
+# robot into Bambara's opened navel. 'Providing conscience for those who
+# have none.' Goodchild and the other Breen government agents disappear
+# into the surrounding desert in a vehicle, kicking up cloud of white dust.
+# Bambara awakens, and, patting the dust from his clothing, turns to
+# greet a one-armed child. 'Hi, my name's Bambara; I'm a
+# thirty-six-year-old Virgo and a former killer, who's hobbies include
+# performing recreational autopsies, defecating, and drinking rum. I've
+# recently been given a conscience, and would very much like to help you.'
+# Cut to Bambara and the child, now with one of Bambara's arms, leaving
+# a surgical clinic."
+# - AeonFlux, "The Purge" (sometime in the late 90s)
+#
+# :copyright: (c) 2012 Isis Lovecruft
+# :license: see LICENSE for more details.
+# :version: 0.1.0-beta
+#
+
+# ooniprobe.py imports
+import sys
+from signal import SIGTERM, signal
+from pprint import pprint
+
+from twisted.python import usage
+from twisted.internet import reactor
+from twisted.plugin import getPlugins
+
+from zope.interface.verify import verifyObject
+from zope.interface.exceptions import BrokenImplementation
+from zope.interface.exceptions import BrokenMethodImplementation
+
+from ooni.bridget.tests import bridget
+from ooni.bridget.utils import log, tests, work, reports
+from ooni.bridget.utils.interface import ITest
+from ooni.utils.logo import getlogo
+
+# runner.py imports
+import os
+import types
+import time
+import inspect
+import yaml
+
+from twisted.internet import defer, reactor
+from twisted.python import reflect, failure, usage
+from twisted.python import log as tlog
+
+from twisted.trial import unittest
+from twisted.trial.runner import TrialRunner, TestLoader
+from twisted.trial.runner import isPackage, isTestCase, ErrorHolder
+from twisted.trial.runner import filenameToModule, _importFromFile
+
+from ooni import nettest
+from ooni.inputunit import InputUnitFactory
+from ooni.nettest import InputTestSuite
+from ooni.plugoo import tests as oonitests
+from ooni.reporter import ReporterFactory
+from ooni.utils import log, geodata, date
+from ooni.utils.legacy import LegacyOONITest
+from ooni.utils.legacy import start_legacy_test, adapt_legacy_test
+
+
+__version__ = "0.1.0-beta"
+
+
+#def retrieve_plugoo():
+# """
+# Get all the plugins that implement the ITest interface and get the data
+# associated to them into a dict.
+# """
+# interface = ITest
+# d = {}
+# error = False
+# for p in getPlugins(interface, plugins):
+# try:
+# verifyObject(interface, p)
+# d[p.shortName] = p
+# except BrokenImplementation, bi:
+# print "Plugin Broken"
+# print bi
+# error = True
+# if error != False:
+# print "Plugin Loaded!"
+# return d
+#
+#plugoo = retrieve_plugoo()
+
+"""
+
+ai to watch over which tests to run - custodiet
+
+ * runTest() or getPrefixMethodNames() to run the tests in order for each
+ test (esp. the tcp and icmp parts) to be oonicompat we should use the
+ test_icmp_ping API framework for those.
+
+ * should handle calling
+
+tests to run:
+ echo
+ syn
+ fin
+ conn
+ tls
+ tor
+need fakebridge - canary
+
+"""
+
+def runTest(test, options, global_options, reactor=reactor):
+ """
+ Run an OONI probe test by name.
+
+ @param test: a string specifying the test name as specified inside of
+ shortName.
+
+ @param options: the local options to be passed to the test.
+
+ @param global_options: the global options for OONI
+ """
+ parallelism = int(global_options['parallelism'])
+ worker = work.Worker(parallelism, reactor=reactor)
+ test_class = plugoo[test].__class__
+ report = reports.Report(test, global_options['output'])
+
+ log_to_stdout = True
+ if global_options['quiet']:
+ log_to_stdout = False
+
+ log.start(log_to_stdout,
+ global_options['log'],
+ global_options['verbosity'])
+
+ resume = 0
+ if not options:
+ options = {}
+ if 'resume' in options:
+ resume = options['resume']
+
+ test = test_class(options, global_options, report, reactor=reactor)
+ if test.tool:
+ test.runTool()
+ return True
+
+ if test.ended:
+ print "Ending test"
+ return None
+
+ wgen = work.WorkGenerator(test,
+ dict(options),
+ start=resume)
+ for x in wgen:
+ worker.push(x)
+
+class MainOptions(usage.Options):
+ tests = [bridget, ]
+ subCommands = []
+ for test in tests:
+ print test
+ testopt = getattr(test, 'options')
+ subCommands.append([test, None, testopt, "Run the %s test" % test])
+
+ optFlags = [
+ ['quiet', 'q', "Don't log to stdout"]
+ ]
+
+ optParameters = [
+ ['parallelism', 'n', 10, "Specify the number of parallel tests to run"],
+ #['target-node', 't', 'localhost:31415', 'Select target node'],
+ ['output', 'o', 'bridge.log', "Specify output report file"],
+ ['reportfile', 'o', 'bridge.log', "Specify output log file"],
+ ['verbosity', 'v', 1, "Specify the logging level"],
+ ]
+
+ def opt_version(self):
+ """
+ Display OONI version and exit.
+ """
+ print "OONI version:", __version__
+ sys.exit(0)
+
+ def __str__(self):
+ """
+ Hack to get the sweet ascii art into the help output and replace the
+ strings "Commands" with "Tests".
+ """
+ return getlogo() + '\n' + self.getSynopsis() + '\n' + \
+ self.getUsage(width=None).replace("Commands:", "Tests:")
+
+
+
+def isTestCase(thing):
+ try:
+ return issubclass(thing, unittest.TestCase)
+ except TypeError:
+ return False
+
+def isLegacyTest(obj):
+ """
+ Returns True if the test in question is written using the OONITest legacy
+ class.
+ We do this for backward compatibility of the OONIProbe API.
+ """
+ try:
+ if issubclass(obj, oonitests.OONITest) and not obj == oonitests.OONITest:
+ return True
+ else:
+ return False
+ except TypeError:
+ return False
+
+def processTest(obj, config):
+ """
+ Process the parameters and :class:`twisted.python.usage.Options` of a
+ :class:`ooni.nettest.Nettest`.
+
+ :param obj:
+ An uninstantiated old test, which should be a subclass of
+ :class:`ooni.plugoo.tests.OONITest`.
+ :param config:
+ A configured and instantiated :class:`twisted.python.usage.Options`
+ class.
+ """
+
+ inputFile = obj.inputFile
+
+ if obj.optParameters or inputFile:
+ if not obj.optParameters:
+ obj.optParameters = []
+
+ if inputFile:
+ obj.optParameters.append(inputFile)
+
+ class Options(usage.Options):
+ optParameters = obj.optParameters
+
+ options = Options()
+ options.parseOptions(config['subArgs'])
+ obj.localOptions = options
+
+ if inputFile:
+ obj.inputFile = options[inputFile[0]]
+ try:
+ tmp_obj = obj()
+ tmp_obj.getOptions()
+ except usage.UsageError:
+ options.opt_help()
+
+ return obj
+
+def findTestClassesFromConfig(config):
+ """
+ Takes as input the command line config parameters and returns the test
+ case classes.
+ If it detects that a certain test class is using the old OONIProbe format,
+ then it will adapt it to the new testing system.
+
+ :param config:
+ A configured and instantiated :class:`twisted.python.usage.Options`
+ class.
+ :return:
+ A list of class objects found in a file or module given on the
+ commandline.
+ """
+
+ filename = config['test']
+ classes = []
+
+ module = filenameToModule(filename)
+ for name, val in inspect.getmembers(module):
+ if isTestCase(val):
+ classes.append(processTest(val, config))
+ elif isLegacyTest(val):
+ classes.append(adapt_legacy_test(val, config))
+ return classes
+
+def makeTestCases(klass, tests, methodPrefix):
+ """
+ Takes a class some tests and returns the test cases. methodPrefix is how
+ the test case functions should be prefixed with.
+ """
+
+ cases = []
+ for test in tests:
+ cases.append(klass(methodPrefix+test))
+ return cases
+
+def loadTestsAndOptions(classes, config):
+ """
+ Takes a list of classes and returns their testcases and options.
+ Legacy tests will be adapted.
+ """
+
+ methodPrefix = 'test'
+ suiteFactory = InputTestSuite
+ options = []
+ testCases = []
+ names = []
+
+ _old_klass_type = LegacyOONITest
+
+ for klass in classes:
+ if isinstance(klass, _old_klass_type):
+ try:
+ cases = start_legacy_test(klass)
+ #cases.callback()
+ if cases:
+ print cases
+ return [], []
+ testCases.append(cases)
+ except Exception, e:
+ log.err(e)
+ else:
+ try:
+ opts = klass.local_options
+ options.append(opts)
+ except AttributeError, ae:
+ options.append([])
+ log.err(ae)
+ elif not isinstance(klass, _old_klass_type):
+ tests = reflect.prefixedMethodNames(klass, methodPrefix)
+ if tests:
+ cases = makeTestCases(klass, tests, methodPrefix)
+ testCases.append(cases)
+ try:
+ k = klass()
+ opts = k.getOptions()
+ options.append(opts)
+ except AttributeError, ae:
+ options.append([])
+ log.err(ae)
+ else:
+ try:
+ raise RuntimeError, "Class is some strange type!"
+ except RuntimeError, re:
+ log.err(re)
+
+ return testCases, options
+
+class ORunner(object):
+ """
+ This is a specialized runner used by the ooniprobe command line tool.
+ I am responsible for reading the inputs from the test files and splitting
+ them in input units. I also create all the report instances required to run
+ the tests.
+ """
+ def __init__(self, cases, options=None, config=None, *arg, **kw):
+ self.baseSuite = InputTestSuite
+ self.cases = cases
+ self.options = options
+
+ try:
+ assert len(options) != 0, "Length of options is zero!"
+ except AssertionError, ae:
+ self.inputs = []
+ log.err(ae)
+ else:
+ try:
+ first = options.pop(0)
+ except:
+ first = {}
+ if 'inputs' in first:
+ self.inputs = options['inputs']
+ else:
+ log.msg("Could not find inputs!")
+ log.msg("options[0] = %s" % first)
+ self.inputs = [None]
+
+ try:
+ reportFile = open(config['reportfile'], 'a+')
+ except:
+ filename = 'report_'+date.timestamp()+'.yaml'
+ reportFile = open(filename, 'a+')
+ self.reporterFactory = ReporterFactory(reportFile,
+ testSuite=self.baseSuite(self.cases))
+
+ def runWithInputUnit(self, inputUnit):
+ idx = 0
+ result = self.reporterFactory.create()
+
+ for inputs in inputUnit:
+ result.reporterFactory = self.reporterFactory
+
+ suite = self.baseSuite(self.cases)
+ suite.input = inputs
+ suite(result, idx)
+
+ # XXX refactor all of this index bullshit to avoid having to pass
+ # this index around. Probably what I want to do is go and make
+ # changes to report to support the concept of having multiple runs
+ # of the same test.
+ # We currently need to do this addition in order to get the number
+ # of times the test cases that have run inside of the test suite.
+ idx += (suite._idx - idx)
+
+ result.done()
+
+ def run(self):
+ self.reporterFactory.options = self.options
+ for inputUnit in InputUnitFactory(self.inputs):
+ self.runWithInputUnit(inputUnit)
+
+if __name__ == "__main__":
+ config = Options()
+ config.parseOptions()
+
+ if not config.subCommand:
+ config.opt_help()
+ signal(SIGTERM)
+ #sys.exit(1)
+
+ runTest(config.subCommand, config.subOptions, config)
+ reactor.run()
1
0

[ooni-probe/master] * Moving bridget around to be better integrated with the new ooni structure.
by isis@torproject.org 04 Nov '12
by isis@torproject.org 04 Nov '12
04 Nov '12
commit 276aeea3a524018df7a4535add3e2ad83f755e33
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 12:23:09 2012 +0000
* Moving bridget around to be better integrated with the new ooni structure.
---
ooni/bridget/__init__.py | 14 -
ooni/bridget/custodiet.py | 421 ---------------------
ooni/bridget/tests/__init__.py | 14 -
ooni/bridget/tests/bridget.py | 499 -------------------------
ooni/bridget/tests/echo.py | 205 -----------
ooni/bridget/tests/tls-handshake.py | 32 --
ooni/bridget/utils/__init__.py | 1 -
ooni/bridget/utils/inputs.py | 174 ---------
ooni/bridget/utils/interface.py | 54 ---
ooni/bridget/utils/log.py | 98 -----
ooni/bridget/utils/nodes.py | 176 ---------
ooni/bridget/utils/onion.py | 686 -----------------------------------
ooni/bridget/utils/reports.py | 144 --------
ooni/bridget/utils/tests.py | 141 -------
ooni/bridget/utils/work.py | 147 --------
15 files changed, 0 insertions(+), 2806 deletions(-)
diff --git a/ooni/bridget/__init__.py b/ooni/bridget/__init__.py
deleted file mode 100644
index 4648d77..0000000
--- a/ooni/bridget/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#-*- coding: utf-8 -*-
-
-#import os, sys
-#import copy_reg
-
-## Hack to set the proper sys.path. Overcomes the export PYTHONPATH pain.
-#sys.path[:] = map(os.path.abspath, sys.path)
-#sys.path.insert(0, os.path.abspath(os.getcwd()))
-
-## This is a hack to overcome a bug in python
-#from ooni.utils.hacks import patched_reduce_ex
-#copy_reg._reduce_ex = patched_reduce_ex
-
-__all__ = ['custodiet']
diff --git a/ooni/bridget/custodiet.py b/ooni/bridget/custodiet.py
deleted file mode 100755
index 8cbcfce..0000000
--- a/ooni/bridget/custodiet.py
+++ /dev/null
@@ -1,421 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: UTF-8
-#
-# custodiet
-# *********
-#
-# "...quis custodiet ipsos custodes?"
-# - Juvenal, Satires VI.347-348 (circa 2nd Century, C.E.)
-#
-# "'Hand me the Custodian,' Goodchild demands, inserting the waiflike
-# robot into Bambara's opened navel. 'Providing conscience for those who
-# have none.' Goodchild and the other Breen government agents disappear
-# into the surrounding desert in a vehicle, kicking up cloud of white dust.
-# Bambara awakens, and, patting the dust from his clothing, turns to
-# greet a one-armed child. 'Hi, my name's Bambara; I'm a
-# thirty-six-year-old Virgo and a former killer, who's hobbies include
-# performing recreational autopsies, defecating, and drinking rum. I've
-# recently been given a conscience, and would very much like to help you.'
-# Cut to Bambara and the child, now with one of Bambara's arms, leaving
-# a surgical clinic."
-# - AeonFlux, "The Purge" (sometime in the late 90s)
-#
-# :copyright: (c) 2012 Isis Lovecruft
-# :license: see LICENSE for more details.
-# :version: 0.1.0-beta
-#
-
-# ooniprobe.py imports
-import sys
-from signal import SIGTERM, signal
-from pprint import pprint
-
-from twisted.python import usage
-from twisted.internet import reactor
-from twisted.plugin import getPlugins
-
-from zope.interface.verify import verifyObject
-from zope.interface.exceptions import BrokenImplementation
-from zope.interface.exceptions import BrokenMethodImplementation
-
-from ooni.bridget.tests import bridget
-from ooni.bridget.utils import log, tests, work, reports
-from ooni.bridget.utils.interface import ITest
-from ooni.utils.logo import getlogo
-
-# runner.py imports
-import os
-import types
-import time
-import inspect
-import yaml
-
-from twisted.internet import defer, reactor
-from twisted.python import reflect, failure, usage
-from twisted.python import log as tlog
-
-from twisted.trial import unittest
-from twisted.trial.runner import TrialRunner, TestLoader
-from twisted.trial.runner import isPackage, isTestCase, ErrorHolder
-from twisted.trial.runner import filenameToModule, _importFromFile
-
-from ooni import nettest
-from ooni.inputunit import InputUnitFactory
-from ooni.nettest import InputTestSuite
-from ooni.plugoo import tests as oonitests
-from ooni.reporter import ReporterFactory
-from ooni.utils import log, geodata, date
-from ooni.utils.legacy import LegacyOONITest
-from ooni.utils.legacy import start_legacy_test, adapt_legacy_test
-
-
-__version__ = "0.1.0-beta"
-
-
-#def retrieve_plugoo():
-# """
-# Get all the plugins that implement the ITest interface and get the data
-# associated to them into a dict.
-# """
-# interface = ITest
-# d = {}
-# error = False
-# for p in getPlugins(interface, plugins):
-# try:
-# verifyObject(interface, p)
-# d[p.shortName] = p
-# except BrokenImplementation, bi:
-# print "Plugin Broken"
-# print bi
-# error = True
-# if error != False:
-# print "Plugin Loaded!"
-# return d
-#
-#plugoo = retrieve_plugoo()
-
-"""
-
-ai to watch over which tests to run - custodiet
-
- * runTest() or getPrefixMethodNames() to run the tests in order for each
- test (esp. the tcp and icmp parts) to be oonicompat we should use the
- test_icmp_ping API framework for those.
-
- * should handle calling
-
-tests to run:
- echo
- syn
- fin
- conn
- tls
- tor
-need fakebridge - canary
-
-"""
-
-def runTest(test, options, global_options, reactor=reactor):
- """
- Run an OONI probe test by name.
-
- @param test: a string specifying the test name as specified inside of
- shortName.
-
- @param options: the local options to be passed to the test.
-
- @param global_options: the global options for OONI
- """
- parallelism = int(global_options['parallelism'])
- worker = work.Worker(parallelism, reactor=reactor)
- test_class = plugoo[test].__class__
- report = reports.Report(test, global_options['output'])
-
- log_to_stdout = True
- if global_options['quiet']:
- log_to_stdout = False
-
- log.start(log_to_stdout,
- global_options['log'],
- global_options['verbosity'])
-
- resume = 0
- if not options:
- options = {}
- if 'resume' in options:
- resume = options['resume']
-
- test = test_class(options, global_options, report, reactor=reactor)
- if test.tool:
- test.runTool()
- return True
-
- if test.ended:
- print "Ending test"
- return None
-
- wgen = work.WorkGenerator(test,
- dict(options),
- start=resume)
- for x in wgen:
- worker.push(x)
-
-class MainOptions(usage.Options):
- tests = [bridget, ]
- subCommands = []
- for test in tests:
- print test
- testopt = getattr(test, 'options')
- subCommands.append([test, None, testopt, "Run the %s test" % test])
-
- optFlags = [
- ['quiet', 'q', "Don't log to stdout"]
- ]
-
- optParameters = [
- ['parallelism', 'n', 10, "Specify the number of parallel tests to run"],
- #['target-node', 't', 'localhost:31415', 'Select target node'],
- ['output', 'o', 'bridge.log', "Specify output report file"],
- ['reportfile', 'o', 'bridge.log', "Specify output log file"],
- ['verbosity', 'v', 1, "Specify the logging level"],
- ]
-
- def opt_version(self):
- """
- Display OONI version and exit.
- """
- print "OONI version:", __version__
- sys.exit(0)
-
- def __str__(self):
- """
- Hack to get the sweet ascii art into the help output and replace the
- strings "Commands" with "Tests".
- """
- return getlogo() + '\n' + self.getSynopsis() + '\n' + \
- self.getUsage(width=None).replace("Commands:", "Tests:")
-
-
-
-def isTestCase(thing):
- try:
- return issubclass(thing, unittest.TestCase)
- except TypeError:
- return False
-
-def isLegacyTest(obj):
- """
- Returns True if the test in question is written using the OONITest legacy
- class.
- We do this for backward compatibility of the OONIProbe API.
- """
- try:
- if issubclass(obj, oonitests.OONITest) and not obj == oonitests.OONITest:
- return True
- else:
- return False
- except TypeError:
- return False
-
-def processTest(obj, config):
- """
- Process the parameters and :class:`twisted.python.usage.Options` of a
- :class:`ooni.nettest.Nettest`.
-
- :param obj:
- An uninstantiated old test, which should be a subclass of
- :class:`ooni.plugoo.tests.OONITest`.
- :param config:
- A configured and instantiated :class:`twisted.python.usage.Options`
- class.
- """
-
- inputFile = obj.inputFile
-
- if obj.optParameters or inputFile:
- if not obj.optParameters:
- obj.optParameters = []
-
- if inputFile:
- obj.optParameters.append(inputFile)
-
- class Options(usage.Options):
- optParameters = obj.optParameters
-
- options = Options()
- options.parseOptions(config['subArgs'])
- obj.localOptions = options
-
- if inputFile:
- obj.inputFile = options[inputFile[0]]
- try:
- tmp_obj = obj()
- tmp_obj.getOptions()
- except usage.UsageError:
- options.opt_help()
-
- return obj
-
-def findTestClassesFromConfig(config):
- """
- Takes as input the command line config parameters and returns the test
- case classes.
- If it detects that a certain test class is using the old OONIProbe format,
- then it will adapt it to the new testing system.
-
- :param config:
- A configured and instantiated :class:`twisted.python.usage.Options`
- class.
- :return:
- A list of class objects found in a file or module given on the
- commandline.
- """
-
- filename = config['test']
- classes = []
-
- module = filenameToModule(filename)
- for name, val in inspect.getmembers(module):
- if isTestCase(val):
- classes.append(processTest(val, config))
- elif isLegacyTest(val):
- classes.append(adapt_legacy_test(val, config))
- return classes
-
-def makeTestCases(klass, tests, methodPrefix):
- """
- Takes a class some tests and returns the test cases. methodPrefix is how
- the test case functions should be prefixed with.
- """
-
- cases = []
- for test in tests:
- cases.append(klass(methodPrefix+test))
- return cases
-
-def loadTestsAndOptions(classes, config):
- """
- Takes a list of classes and returns their testcases and options.
- Legacy tests will be adapted.
- """
-
- methodPrefix = 'test'
- suiteFactory = InputTestSuite
- options = []
- testCases = []
- names = []
-
- _old_klass_type = LegacyOONITest
-
- for klass in classes:
- if isinstance(klass, _old_klass_type):
- try:
- cases = start_legacy_test(klass)
- #cases.callback()
- if cases:
- print cases
- return [], []
- testCases.append(cases)
- except Exception, e:
- log.err(e)
- else:
- try:
- opts = klass.local_options
- options.append(opts)
- except AttributeError, ae:
- options.append([])
- log.err(ae)
- elif not isinstance(klass, _old_klass_type):
- tests = reflect.prefixedMethodNames(klass, methodPrefix)
- if tests:
- cases = makeTestCases(klass, tests, methodPrefix)
- testCases.append(cases)
- try:
- k = klass()
- opts = k.getOptions()
- options.append(opts)
- except AttributeError, ae:
- options.append([])
- log.err(ae)
- else:
- try:
- raise RuntimeError, "Class is some strange type!"
- except RuntimeError, re:
- log.err(re)
-
- return testCases, options
-
-class ORunner(object):
- """
- This is a specialized runner used by the ooniprobe command line tool.
- I am responsible for reading the inputs from the test files and splitting
- them in input units. I also create all the report instances required to run
- the tests.
- """
- def __init__(self, cases, options=None, config=None, *arg, **kw):
- self.baseSuite = InputTestSuite
- self.cases = cases
- self.options = options
-
- try:
- assert len(options) != 0, "Length of options is zero!"
- except AssertionError, ae:
- self.inputs = []
- log.err(ae)
- else:
- try:
- first = options.pop(0)
- except:
- first = {}
- if 'inputs' in first:
- self.inputs = options['inputs']
- else:
- log.msg("Could not find inputs!")
- log.msg("options[0] = %s" % first)
- self.inputs = [None]
-
- try:
- reportFile = open(config['reportfile'], 'a+')
- except:
- filename = 'report_'+date.timestamp()+'.yaml'
- reportFile = open(filename, 'a+')
- self.reporterFactory = ReporterFactory(reportFile,
- testSuite=self.baseSuite(self.cases))
-
- def runWithInputUnit(self, inputUnit):
- idx = 0
- result = self.reporterFactory.create()
-
- for inputs in inputUnit:
- result.reporterFactory = self.reporterFactory
-
- suite = self.baseSuite(self.cases)
- suite.input = inputs
- suite(result, idx)
-
- # XXX refactor all of this index bullshit to avoid having to pass
- # this index around. Probably what I want to do is go and make
- # changes to report to support the concept of having multiple runs
- # of the same test.
- # We currently need to do this addition in order to get the number
- # of times the test cases that have run inside of the test suite.
- idx += (suite._idx - idx)
-
- result.done()
-
- def run(self):
- self.reporterFactory.options = self.options
- for inputUnit in InputUnitFactory(self.inputs):
- self.runWithInputUnit(inputUnit)
-
-if __name__ == "__main__":
- config = Options()
- config.parseOptions()
-
- if not config.subCommand:
- config.opt_help()
- signal(SIGTERM)
- #sys.exit(1)
-
- runTest(config.subCommand, config.subOptions, config)
- reactor.run()
diff --git a/ooni/bridget/tests/__init__.py b/ooni/bridget/tests/__init__.py
deleted file mode 100644
index 9ecc88d..0000000
--- a/ooni/bridget/tests/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# -*- coding: UTF-8
-#
-# bridget/tests/__init__.py
-# *************************
-#
-# "...quis custodiet ipsos custodes?"
-# - Juvenal, Satires VI.347-348 (circa 2nd Century, C.E.)
-#
-# :copyright: (c) 2012 Isis Lovecruft
-# :license: see LICENSE for more details.
-# :version: 0.1.0-beta
-#
-
-all = ['bridget']
diff --git a/ooni/bridget/tests/bridget.py b/ooni/bridget/tests/bridget.py
deleted file mode 100644
index a334747..0000000
--- a/ooni/bridget/tests/bridget.py
+++ /dev/null
@@ -1,499 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-# +-----------+
-# | BRIDGET |
-# | +--------------------------------------------+
-# +--------| Use a Tor process to test making a Tor |
-# | connection to a list of bridges or relays. |
-# +--------------------------------------------+
-#
-# :authors: Isis Lovecruft, Arturo Filasto
-# :licence: see included LICENSE
-# :version: 0.1.0-alpha
-
-from __future__ import with_statement
-from functools import partial
-from random import randint
-
-import os
-import sys
-
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import defer, error, reactor
-from zope.interface import implements
-
-from ooni.utils import log, date
-from ooni.utils.config import ValueChecker
-
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset, MissingAssetException
-from ooni.utils.onion import TxtorconImportError
-from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
-
-try:
- from ooni.utils.onion import parse_data_dir
-except:
- log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
-
-class RandomPortException(Exception):
- """Raised when using a random port conflicts with configured ports."""
- def __init__(self):
- log.msg("Unable to use random and specific ports simultaneously")
- return sys.exit()
-
-class BridgetArgs(usage.Options):
- """Commandline options."""
- allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
- sock_check = ValueChecker(allowed % "SocksPort").port_check
- ctrl_check = ValueChecker(allowed % "ControlPort").port_check
-
- optParameters = [
- ['bridges', 'b', None,
- 'File listing bridge IP:ORPorts to test'],
- ['relays', 'f', None,
- 'File listing relay IPs to test'],
- ['socks', 's', 9049, None, sock_check],
- ['control', 'c', 9052, None, ctrl_check],
- ['torpath', 'p', None,
- 'Path to the Tor binary to use'],
- ['datadir', 'd', None,
- 'Tor DataDirectory to use'],
- ['transport', 't', None,
- 'Tor ClientTransportPlugin'],
- ['resume', 'r', 0,
- 'Resume at this index']]
- optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
-
- def postOptions(self):
- if not self['bridges'] and not self['relays']:
- raise MissingAssetException(
- "Bridget can't run without bridges or relays to test!")
- if self['transport']:
- ValueChecker.uid_check(
- "Can't run bridget as root with pluggable transports!")
- if not self['bridges']:
- raise PTNoBridgesException
- if self['socks'] or self['control']:
- if self['random']:
- raise RandomPortException
- if self['datadir']:
- ValueChecker.dir_check(self['datadir'])
- if self['torpath']:
- ValueChecker.file_check(self['torpath'])
-
-class BridgetAsset(Asset):
- """Class for parsing bridget Assets ignoring commented out lines."""
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- if line.startswith('#'):
- return
- else:
- return line.replace('\n','')
-
-class BridgetTest(OONITest):
- """
- XXX fill me in
-
- :ivar config:
- An :class:`ooni.lib.txtorcon.TorConfig` instance.
- :ivar relays:
- A list of all provided relays to test.
- :ivar bridges:
- A list of all provided bridges to test.
- :ivar socks_port:
- Integer for Tor's SocksPort.
- :ivar control_port:
- Integer for Tor's ControlPort.
- :ivar transport:
- String defining the Tor's ClientTransportPlugin, for testing
- a bridge's pluggable transport functionality.
- :ivar tor_binary:
- Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
- """
- implements(IPlugin, ITest)
-
- shortName = "bridget"
- description = "Use a Tor process to test connecting to bridges or relays"
- requirements = None
- options = BridgetArgs
- blocking = False
-
- def initialize(self):
- """
- Extra initialization steps. We only want one child Tor process
- running, so we need to deal with most of the TorConfig() only once,
- before the experiment runs.
- """
- self.socks_port = 9049
- self.control_port = 9052
- self.circuit_timeout = 90
- self.tor_binary = '/usr/sbin/tor'
- self.data_directory = None
-
- def __make_asset_list__(opt, lst):
- log.msg("Loading information from %s ..." % opt)
- with open(opt) as opt_file:
- for line in opt_file.readlines():
- if line.startswith('#'):
- continue
- else:
- lst.append(line.replace('\n',''))
-
- def __count_remaining__(which):
- total, reach, unreach = map(lambda x: which[x],
- ['all', 'reachable', 'unreachable'])
- count = len(total) - reach() - unreach()
- return count
-
- ## XXX should we do report['bridges_up'].append(self.bridges['current'])
- self.bridges = {}
- self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
- ([] for i in range(3))
- self.bridges['reachable'] = lambda: len(self.bridges['up'])
- self.bridges['unreachable'] = lambda: len(self.bridges['down'])
- self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
- self.bridges['current'] = None
- self.bridges['pt_type'] = None
- self.bridges['use_pt'] = False
-
- self.relays = {}
- self.relays['all'], self.relays['up'], self.relays['down'] = \
- ([] for i in range(3))
- self.relays['reachable'] = lambda: len(self.relays['up'])
- self.relays['unreachable'] = lambda: len(self.relays['down'])
- self.relays['remaining'] = lambda: __count_remaining__(self.relays)
- self.relays['current'] = None
-
- if self.local_options:
- try:
- from ooni.lib.txtorcon import TorConfig
- except ImportError:
- raise TxtorconImportError
- else:
- self.config = TorConfig()
- finally:
- options = self.local_options
-
- if options['bridges']:
- self.config.UseBridges = 1
- __make_asset_list__(options['bridges'], self.bridges['all'])
- if options['relays']:
- ## first hop must be in TorState().guards
- self.config.EntryNodes = ','.join(relay_list)
- __make_asset_list__(options['relays'], self.relays['all'])
- if options['socks']:
- self.socks_port = options['socks']
- if options['control']:
- self.control_port = options['control']
- if options['random']:
- log.msg("Using randomized ControlPort and SocksPort ...")
- self.socks_port = randint(1024, 2**16)
- self.control_port = randint(1024, 2**16)
- if options['torpath']:
- self.tor_binary = options['torpath']
- if options['datadir']:
- self.data_directory = parse_data_dir(options['datadir'])
- if options['transport']:
- ## ClientTransportPlugin transport exec pathtobinary [options]
- ## XXX we need a better way to deal with all PTs
- log.msg("Using ClientTransportPlugin %s" % options['transport'])
- self.bridges['use_pt'] = True
- [self.bridges['pt_type'], pt_exec] = \
- options['transport'].split(' ', 1)
-
- if self.bridges['pt_type'] == "obfs2":
- self.config.ClientTransportPlugin = \
- self.bridges['pt_type'] + " " + pt_exec
- else:
- raise PTNotFoundException
-
- self.config.SocksPort = self.socks_port
- self.config.ControlPort = self.control_port
- self.config.CookieAuthentication = 1
-
- def __load_assets__(self):
- """
- Load bridges and/or relays from files given in user options. Bridges
- should be given in the form IP:ORport. We don't want to load these as
- assets, because it's inefficient to start a Tor process for each one.
-
- We cannot use the Asset model, because that model calls
- self.experiment() with the current Assets, which would be one relay
- and one bridge, then it gives the defer.Deferred returned from
- self.experiment() to self.control(), which means that, for each
- (bridge, relay) pair, experiment gets called again, which instantiates
- an additional Tor process that attempts to bind to the same
- ports. Thus, additionally instantiated Tor processes return with
- RuntimeErrors, which break the final defer.chainDeferred.callback(),
- sending it into the errback chain.
- """
- assets = {}
- if self.local_options:
- if self.local_options['bridges']:
- assets.update({'bridge':
- BridgetAsset(self.local_options['bridges'])})
- if self.local_options['relays']:
- assets.update({'relay':
- BridgetAsset(self.local_options['relays'])})
- return assets
-
- def experiment(self, args):
- """
- if bridges:
- 1. configure first bridge line
- 2a. configure data_dir, if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor } if any of these
- 4. remove bridges which are public relays } fail, add current
- 5. SIGHUP for each bridge } bridge to unreach-
- } able bridges.
- if relays:
- 1a. configure the data_dir, if it doesn't exist
- 1b. write torrc to a tempfile in data_dir
- 2. start tor
- 3. remove any of our relays which are already part of current
- circuits
- 4a. attach CustomCircuit() to self.state
- 4b. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
- 5.
- if bridges and relays:
- 1. configure first bridge line
- 2a. configure data_dir if it doesn't exist
- 2b. write torrc to a tempfile in data_dir
- 3. start tor
- 4. remove bridges which are public relays
- 5. remove any of our relays which are already part of current
- circuits
- 6a. attach CustomCircuit() to self.state
- 6b. for each bridge, build three circuits, with three
- relays each
- 6c. RELAY_EXTEND for each relay } if this fails, add
- } current relay to list
- } of unreachable relays
-
- :param args:
- The :class:`BridgetAsset` line currently being used. Except that it
- in Bridget it doesn't, so it should be ignored and avoided.
- """
- try:
- from ooni.utils import process
- from ooni.utils.onion import remove_public_relays, start_tor
- from ooni.utils.onion import start_tor_filter_nodes
- from ooni.utils.onion import setup_fail, setup_done
- from ooni.utils.onion import CustomCircuit
- from ooni.utils.timer import deferred_timeout, TimeoutError
- from ooni.lib.txtorcon import TorConfig, TorState
- except ImportError:
- raise TxtorconImportError
- except TxtorconImportError, tie:
- log.err(tie)
- sys.exit()
-
- def reconfigure_done(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['up'].
- """
- log.msg("Reconfiguring with 'Bridge %s' successful"
- % bridges['current'])
- bridges['up'].append(bridges['current'])
- return state
-
- def reconfigure_fail(state, bridges):
- """
- Append :ivar:`bridges['current']` to the list
- :ivar:`bridges['down'].
- """
- log.msg("Reconfiguring TorConfig with parameters %s failed"
- % state)
- bridges['down'].append(bridges['current'])
- return state
-
- @defer.inlineCallbacks
- def reconfigure_bridge(state, bridges):
- """
- Rewrite the Bridge line in our torrc. If use of pluggable
- transports was specified, rewrite the line as:
- Bridge <transport_type> <IP>:<ORPort>
- Otherwise, rewrite in the standard form:
- Bridge <IP>:<ORPort>
-
- :param state:
- A fully bootstrapped instance of
- :class:`ooni.lib.txtorcon.TorState`.
- :param bridges:
- A dictionary of bridges containing the following keys:
-
- bridges['remaining'] :: A function returning and int for the
- number of remaining bridges to test.
- bridges['current'] :: A string containing the <IP>:<ORPort>
- of the current bridge.
- bridges['use_pt'] :: A boolean, True if we're testing
- bridges with a pluggable transport;
- False otherwise.
- bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
- this is a string containing the type
- of pluggable transport to test.
- :return:
- :param:`state`
- """
- log.msg("Current Bridge: %s" % bridges['current'])
- log.msg("We now have %d bridges remaining to test..."
- % bridges['remaining']())
- try:
- if bridges['use_pt'] is False:
- controller_response = yield state.protocol.set_conf(
- 'Bridge', bridges['current'])
- elif bridges['use_pt'] and bridges['pt_type'] is not None:
- controller_reponse = yield state.protocol.set_conf(
- 'Bridge', bridges['pt_type'] +' '+ bridges['current'])
- else:
- raise PTNotFoundException
-
- if controller_response == 'OK':
- finish = yield reconfigure_done(state, bridges)
- else:
- log.err("SETCONF for %s responded with error:\n %s"
- % (bridges['current'], controller_response))
- finish = yield reconfigure_fail(state, bridges)
-
- defer.returnValue(finish)
-
- except Exception, e:
- log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
- % (bridges['current'], e))
- defer.returnValue(None)
-
- def attacher_extend_circuit(attacher, deferred, router):
- ## XXX todo write me
- ## state.attacher.extend_circuit
- raise NotImplemented
- #attacher.extend_circuit
-
- def state_attach(state, path):
- log.msg("Setting up custom circuit builder...")
- attacher = CustomCircuit(state)
- state.set_attacher(attacher, reactor)
- state.add_circuit_listener(attacher)
- return state
-
- ## OLD
- #for circ in state.circuits.values():
- # for relay in circ.path:
- # try:
- # relay_list.remove(relay)
- # except KeyError:
- # continue
- ## XXX how do we attach to circuits with bridges?
- d = defer.Deferred()
- attacher.request_circuit_build(d)
- return d
-
- def state_attach_fail(state):
- log.err("Attaching custom circuit builder failed: %s" % state)
-
- log.msg("Bridget: initiating test ... ") ## Start the experiment
-
- ## if we've at least one bridge, and our config has no 'Bridge' line
- if self.bridges['remaining']() >= 1 \
- and not 'Bridge' in self.config.config:
-
- ## configure our first bridge line
- self.bridges['current'] = self.bridges['all'][0]
- self.config.Bridge = self.bridges['current']
- ## avoid starting several
- self.config.save() ## processes
- assert self.config.config.has_key('Bridge'), "No Bridge Line"
-
- ## start tor and remove bridges which are public relays
- from ooni.utils.onion import start_tor_filter_nodes
- state = start_tor_filter_nodes(reactor, self.config,
- self.control_port, self.tor_binary,
- self.data_directory, self.bridges)
- #controller = defer.Deferred()
- #controller.addCallback(singleton_semaphore, tor)
- #controller.addErrback(setup_fail)
- #bootstrap = defer.gatherResults([controller, filter_bridges],
- # consumeErrors=True)
-
- if state is not None:
- log.debug("state:\n%s" % state)
- log.debug("Current callbacks on TorState():\n%s"
- % state.callbacks)
-
- ## if we've got more bridges
- if self.bridges['remaining']() >= 2:
- #all = []
- for bridge in self.bridges['all'][1:]:
- self.bridges['current'] = bridge
- #new = defer.Deferred()
- #new.addCallback(reconfigure_bridge, state, self.bridges)
- #all.append(new)
- #check_remaining = defer.DeferredList(all, consumeErrors=True)
- #state.chainDeferred(check_remaining)
- state.addCallback(reconfigure_bridge, self.bridges)
-
- if self.relays['remaining']() > 0:
- while self.relays['remaining']() >= 3:
- #path = list(self.relays.pop() for i in range(3))
- #log.msg("Trying path %s" % '->'.join(map(lambda node:
- # node, path)))
- self.relays['current'] = self.relays['all'].pop()
- for circ in state.circuits.values():
- for node in circ.path:
- if node == self.relays['current']:
- self.relays['up'].append(self.relays['current'])
- if len(circ.path) < 3:
- try:
- ext = attacher_extend_circuit(state.attacher, circ,
- self.relays['current'])
- ext.addCallback(attacher_extend_circuit_done,
- state.attacher, circ,
- self.relays['current'])
- except Exception, e:
- log.err("Extend circuit failed: %s" % e)
- else:
- continue
-
- #state.callback(all)
- #self.reactor.run()
- return state
-
- def startTest(self, args):
- """
- Local override of :meth:`OONITest.startTest` to bypass calling
- self.control.
-
- :param args:
- The current line of :class:`Asset`, not used but kept for
- compatibility reasons.
- :return:
- A fired deferred which callbacks :meth:`experiment` and
- :meth:`OONITest.finished`.
- """
- self.start_time = date.now()
- self.d = self.experiment(args)
- self.d.addErrback(log.err)
- self.d.addCallbacks(self.finished, log.err)
- return self.d
-
-## So that getPlugins() can register the Test:
-#bridget = BridgetTest(None, None, None)
-
-## ISIS' NOTES
-## -----------
-## TODO:
-## x cleanup documentation
-## x add DataDirectory option
-## x check if bridges are public relays
-## o take bridge_desc file as input, also be able to give same
-## format as output
-## x Add asynchronous timeout for deferred, so that we don't wait
-## o Add assychronous timout for deferred, so that we don't wait
-## forever for bridges that don't work.
diff --git a/ooni/bridget/tests/echo.py b/ooni/bridget/tests/echo.py
deleted file mode 100644
index 7f3217a..0000000
--- a/ooni/bridget/tests/echo.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-# +---------+
-# | echo.py |
-# +---------+
-# A simple ICMP-8 ping test.
-#
-# :author: Isis Lovecruft
-# :version: 0.0.1-pre-alpha
-# :license: (c) 2012 Isis Lovecruft
-# see attached LICENCE file
-#
-
-import os
-import sys
-
-from pprint import pprint
-
-from twisted.internet import reactor
-from twisted.plugin import IPlugin
-from twisted.python import usage
-from ooni.nettest import NetTestCase
-from ooni.utils import log, Storage
-from ooni.utils.net import PermissionsError, IfaceError
-
-try:
- from scapy.all import sr1, IP, ICMP ## XXX v4/v6?
- from ooni.lib import txscapy
- from ooni.lib.txscapy import txsr, txsend
- from ooni.templates.scapyt import ScapyTest
-except:
- log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
-
-## xxx TODO: move these to a utility function for determining OSes
-LINUX=sys.platform.startswith("linux")
-OPENBSD=sys.platform.startswith("openbsd")
-FREEBSD=sys.platform.startswith("freebsd")
-NETBSD=sys.platform.startswith("netbsd")
-DARWIN=sys.platform.startswith("darwin")
-SOLARIS=sys.platform.startswith("sunos")
-WINDOWS=sys.platform.startswith("win32")
-
-class EchoTest(ScapyTest):
- """
- xxx fill me in
- """
- name = 'echo'
- author = 'Isis Lovecruft <isis(a)torproject.org>'
- description = 'A simple ICMP-8 test to see if a host is reachable.'
- version = '0.0.1'
- inputFile = ['file', 'f', None, 'File of list of IPs to ping']
- requirements = None
- report = Storage()
-
- optParameters = [
- ['interface', 'i', None, 'Network interface to use'],
- ['count', 'c', 5, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 'l', 25, 'Set the IP Time to Live', int],
- ['timeout', 't', 2, 'Seconds until timeout if no response', int],
- ['pcap', 'p', None, 'Save pcap to this file'],
- ['receive', 'r', True, 'Receive response packets']
- ]
-
- def setUpClass(self, *a, **kw):
- '''
- :ivar ifaces:
- Struct returned from getifaddrs(3) and turned into a tuple in the
- form (*ifa_name, AF_FAMILY, *ifa_addr)
- '''
- super(EchoTest, self).__init__(*a, **kw)
-
- ## allow subclasses which register/implement external classes
- ## to define their own reactor without overrides:
- if not hasattr(super(EchoTest, self), 'reactor'):
- log.debug("%s test: Didn't find reactor!" % self.name)
- self.reactor = reactor
-
- if self.localOptions:
- log.debug("%s localOptions found" % self.name)
- log.debug("%s test options: %s" % (self.name, self.subOptions))
- self.local_options = self.localOptions.parseOptions(self.subOptions)
- for key, value in self.local_options:
- log.debug("Set attribute %s[%s] = %s" % (self.name, key, value))
- setattr(self, key, value)
-
- ## xxx is this now .subOptions?
- #self.inputFile = self.localOptions['file']
- self.timeout *= 1000 ## convert to milliseconds
-
- if not self.interface:
- log.msg("No network interface specified!")
- log.debug("OS detected: %s" % sys.platform)
- if LINUX or OPENBSD or NETBSD or FREEBSD or DARWIN or SOLARIS:
- from twisted.internet.test import _posixifaces
- log.msg("Attempting to discover network interfaces...")
- ifaces = _posixifaces._interfaces()
- elif WINDOWS:
- from twisted.internet.test import _win32ifaces
- log.msg("Attempting to discover network interfaces...")
- ifaces = _win32ifaces._interfaces()
- else:
- log.debug("Client OS %s not accounted for!" % sys.platform)
- log.debug("Unable to discover network interfaces...")
- ifaces = [('lo', '')]
-
- ## found = {'eth0': '1.1.1.1'}
- found = [{i[0]: i[2]} for i in ifaces if i[0] != 'lo']
- log.info("Found interfaces:\n%s" % pprint(found))
- self.interfaces = self.tryInterfaces(found)
- else:
- ## xxx need a way to check that iface exists, is up, and
- ## we have permissions on it
- log.debug("Our interface has been set to %s" % self.interface)
-
- if self.pcap:
- try:
- self.pcapfile = open(self.pcap, 'a+')
- except:
- log.msg("Unable to write to pcap file %s" % self.pcap)
- self.pcapfile = None
-
- try:
- assert os.path.isfile(self.file)
- fp = open(self.file, 'r')
- except Exception, e:
- hosts = ['8.8.8.8', '38.229.72.14']
- log.err(e)
- else:
- self.inputs = self.inputProcessor(fp)
- self.removePorts(hosts)
-
- log.debug("Initialization of %s test completed with:\n%s"
- % (self.name, ''.join(self.__dict__)))
-
- @staticmethod
- def inputParser(inputs):
- log.debug("Removing possible ports from host addresses...")
- log.debug("Initial inputs:\n%s" % pprint(inputs))
-
- assert isinstance(inputs, list)
- hosts = [h.rsplit(':', 1)[0] for h in inputs]
- log.debug("Inputs converted to:\n%s" % hosts)
-
- return hosts
-
- def tryInterfaces(self, ifaces):
- try:
- from scapy.all import sr1 ## we want this check to be blocking
- except:
- log.msg("This test requires scapy: www.secdev.org/projects/scapy")
- raise SystemExit
-
- ifup = {}
- while ifaces:
- for ifname, ifaddr in ifaces:
- log.debug("Currently testing network capabilities of interface"
- + "%s by sending a packet to our address %s"
- % (ifname, ifaddr))
- try:
- pkt = IP(dst=ifaddr)/ICMP()
- ans, unans = sr(pkt, iface=ifname, timeout=self.timeout)
- except Exception, e:
- raise PermissionsError if e.find("Errno 1") else log.err(e)
- else:
- ## xxx i think this logic might be wrong
- log.debug("Interface test packet\n%s\n\n%s"
- % (pkt.summary(), pkt.show2()))
- if ans.summary():
- log.info("Received answer for test packet on interface"
- +"%s :\n%s" % (ifname, ans.summary()))
- ifup.update(ifname, ifaddr)
- else:
- log.info("Our interface test packet was unanswered:\n%s"
- % unans.summary())
-
- if len(ifup) > 0:
- log.msg("Discovered the following working network interfaces: %s"
- % ifup)
- return ifup
- else:
- raise IfaceError("Could not find a working network interface.")
-
- def buildPackets(self):
- log.debug("self.input is %s" % self.input)
- log.debug("self.hosts is %s" % self.hosts)
- for addr in self.input:
- packet = IP(dst=self.input)/ICMP()
- self.request.append(packet)
- return packet
-
- def test_icmp(self):
- if self.recieve:
- self.buildPackets()
- all = []
- for packet in self.request:
- d = self.sendReceivePackets(packets=packet)
- all.append(d)
- self.response.update({packet: d})
- d_list = defer.DeferredList(all)
- return d_list
- else:
- d = self.sendPackets()
- return d
diff --git a/ooni/bridget/tests/tls-handshake.py b/ooni/bridget/tests/tls-handshake.py
deleted file mode 100644
index eba950e..0000000
--- a/ooni/bridget/tests/tls-handshake.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-import subprocess
-from subprocess import PIPE
-serverport = "129.21.124.215:443"
-# a subset of those from firefox
-ciphers = [
- "ECDHE-ECDSA-AES256-SHA",
- "ECDHE-RSA-AES256-SHA",
- "DHE-RSA-CAMELLIA256-SHA",
- "DHE-DSS-CAMELLIA256-SHA",
- "DHE-RSA-AES256-SHA",
- "DHE-DSS-AES256-SHA",
- "ECDH-ECDSA-AES256-CBC-SHA",
- "ECDH-RSA-AES256-CBC-SHA",
- "CAMELLIA256-SHA",
- "AES256-SHA",
- "ECDHE-ECDSA-RC4-SHA",
- "ECDHE-ECDSA-AES128-SHA",
- "ECDHE-RSA-RC4-SHA",
- "ECDHE-RSA-AES128-SHA",
- "DHE-RSA-CAMELLIA128-SHA",
- "DHE-DSS-CAMELLIA128-SHA"
-]
-def checkBridgeConnection(host, port)
- cipher_arg = ":".join(ciphers)
- cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
- cmd += ["-cipher", cipher_arg]
- proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
- out, error = proc.communicate()
- success = "Cipher is DHE-RSA-AES256-SHA" in out
- return success
diff --git a/ooni/bridget/utils/__init__.py b/ooni/bridget/utils/__init__.py
deleted file mode 100644
index 92893d6..0000000
--- a/ooni/bridget/utils/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-all = ['inputs', 'log', 'onion', 'tests', 'interface', 'nodes', 'reports', 'work']
diff --git a/ooni/bridget/utils/inputs.py b/ooni/bridget/utils/inputs.py
deleted file mode 100644
index fe058cc..0000000
--- a/ooni/bridget/utils/inputs.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#-*- coding: utf-8 -*-
-#
-# inputs.py
-# *********
-#
-# "...quis custodiet ipsos custodes?"
-# - Juvenal, Satires VI.347-348 (circa 2nd Century, C.E.)
-#
-# :copyright: (c) 2012 Isis Lovecruft
-# :license: see LICENSE for more details.
-# :version: 0.1.0-beta
-#
-
-#from types import FunctionType, FileType
-import types
-
-from ooni.bridget import log
-from ooni.utils import date, Storage
-
-class InputFile:
- """
- This is a class describing a file used to store Tor bridge or relays
- inputs. It is a python iterator object, allowing it to be efficiently
- looped.
-
- This class should not be used directly, but rather its subclasses,
- BridgeFile and RelayFile should be used instead.
- """
-
- def __init__(self, file, **kw):
- """
- ## This is an InputAsset file, created because you tried to pass a
- ## non-existent filename to a test.
- ##
- ## To use this file, place one input to be tested per line. Each
- ## test takes different inputs. Lines which are commented out with
- ## a '#' are not used.
- """
- self.file = file
- self.eof = False
- self.all = Storage()
-
- for key, value in input_dict:
- self.all[key] = value
-
- try:
- self.handler = open(self.file, 'r')
- except IOError:
- with open(self.file, 'w') as explain:
- for line in self.__init__.__doc__:
- explain.writeline(line)
- self.handler = open(self.file, 'r')
- try:
- assert isinstance(self.handler, file), "That's not a file!"
- except AssertionError, ae:
- log.err(ae)
-
- # def __handler__(self):
- # """
- # Attempt to open InputFile.file and check that it is actually a file.
- # If it's not, create it and add an explaination for how InputFile files
- # should be used.
-
- # :return:
- # A :type:`file` which has been opened in read-only mode.
- # """
- # try:
- # handler = open(self.file, 'r')
- # except IOError, ioerror: ## not the hacker <(A)3
- # log.err(ioerror)
- # explanation = (
- # with open(self.file, 'w') as explain:
- # for line in explanation:
- # explain.writeline(line)
- # handler = open(self.file, 'r')
- # try:
- # assert isinstance(handler, file), "That's not a file!"
- # except AssertionError, ae:
- # log.err(ae)
- # else:
- # return handler
-
- def __iter__(next, StopIteration):
- """
- Returns the next input from the file.
- """
- #return self.next()
- return self
-
- def len(self):
- """
- Returns the number of the lines in the InputFile.
- """
- with open(self.file, 'r') as input_file:
- lines = input_file.readlines()
- for number, line in enumerate(lines):
- self.input_dict[number] = line
- return number + 1
-
- def next(self):
- try:
- return self.next_input()
- except:
- raise StopIteration
-
- def next_input(self):
- """
- Return the next input.
- """
- line = self.handler.readline()
- if line:
- parsed_line = self.parse_line(line)
- if parsed_line:
- return parsed_line
- else:
- self.fh.seek(0)
- raise StopIteration
-
- def default_parser(self, line):
- """
- xxx fill me in
- """
- if not line.startswith('#'):
- return line.replace('\n', '')
- else:
- return False
-
- def parse_line(self, line):
- """
- Override this method if you need line by line parsing of an Asset.
-
- The default parsing action is to ignore lines which are commented out
- with a '#', and to strip the newline character from the end of the
- line.
-
- If the line was commented out return an empty string instead.
-
- If a subclass Foo incorporates another class Bar, when Bar is not
- also a subclass of InputFile, and Bar.parse_line() exists, then
- do not overwrite Bar's parse_line method.
- """
- assert not hasattr(super(InputFile, self), 'parse_line')
-
- if self.parser is None:
- if not line.startswith('#'):
- return line.replace('\n', '')
- else:
- return ''
- else:
- try:
- assert isinstance(self.parser, FunctionType),"Not a function!"
- except AssertionError, ae:
- log.err(ae)
- else:
- return self.parser(line)
-
-class BridgeFile(InputFile):
- """
- xxx fill me in
- """
- def __init__(self, **kw):
- super(BridgeFile, self).__init__(**kw)
-
-class MissingInputException(Exception):
- """
-
- Raised when an :class:`InputFile` necessary for running the Test is
- missing.
-
- """
- def __init__(self, error_message):
- print error_message
- import sys
- return sys.exit()
diff --git a/ooni/bridget/utils/interface.py b/ooni/bridget/utils/interface.py
deleted file mode 100644
index aa55436..0000000
--- a/ooni/bridget/utils/interface.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from zope.interface import implements, Interface, Attribute
-
-class ITest(Interface):
- """
- This interface represents an OONI test. It fires a deferred on completion.
- """
-
- shortName = Attribute("""A short user facing description for this test""")
- description = Attribute("""A string containing a longer description for the test""")
-
- requirements = Attribute("""What is required to run this this test, for example raw socket access or UDP or TCP""")
-
- options = Attribute("""These are the arguments to be passed to the test for it's execution""")
-
- blocking = Attribute("""True or False, stating if the test should be run in a thread or not.""")
-
- def control(experiment_result, args):
- """
- @param experiment_result: The result returned by the experiment method.
-
- @param args: the keys of this dict are the names of the assets passed in
- from load_assets. The value is one item of the asset.
-
- Must return a dict containing what should be written to the report.
- Anything returned by control ends up inside of the YAMLOONI report.
- """
-
- def experiment(args):
- """
- Perform all the operations that are necessary to running a test.
-
- @param args: the keys of this dict are the names of the assets passed in
- from load_assets. The value is one item of the asset.
-
- Must return a dict containing the values to be passed to control.
- """
-
- def load_assets():
- """
- Load the assets that should be passed to the Test. These are the inputs
- to the OONI test.
- Must return a dict that has as keys the asset names and values the
- asset contents.
- If the test does not have any assets it should return an empty dict.
- """
-
- def end():
- """
- This can be called at any time to terminate the execution of all of
- these test instances.
-
- What this means is that no more test instances with new parameters will
- be created. A report will be written.
- """
diff --git a/ooni/bridget/utils/log.py b/ooni/bridget/utils/log.py
deleted file mode 100644
index eef50d8..0000000
--- a/ooni/bridget/utils/log.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""
-OONI logging facility.
-"""
-from sys import stderr, stdout
-
-from twisted.python import log, util
-from twisted.python.failure import Failure
-
-def _get_log_level(level):
- english = ['debug', 'info', 'warn', 'err', 'crit']
-
- levels = dict(zip(range(len(english)), english))
- number = dict(zip(english, range(len(english))))
-
- if not level:
- return number['info']
- else:
- ve = "Unknown log level: %s\n" % level
- ve += "Allowed levels: %s\n" % [word for word in english]
-
- if type(level) is int:
- if 0 <= level <= 4:
- return level
- elif type(level) is str:
- if number.has_key(level.lower()):
- return number[level]
- else:
- raise ValueError, ve
- else:
- raise ValueError, ve
-
-class OONITestFailure(Failure):
- """
- For handling Exceptions asynchronously.
-
- Can be given an Exception as an argument, else will use the
- most recent Exception from the current stack frame.
- """
- def __init__(self, exception=None, _type=None,
- _traceback=None, _capture=False):
- Failure.__init__(self, exc_type=_type,
- exc_tb=_traceback, captureVars=_capture)
-
-class OONILogObserver(log.FileLogObserver):
- """
- Supports logging level verbosity.
- """
- def __init__(self, logfile, verb=None):
- log.FileLogObserver.__init__(self, logfile)
- self.level = _get_log_level(verb) if verb is not None else 1
- assert type(self.level) is int
-
- def emit(self, eventDict):
- if 'logLevel' in eventDict:
- msgLvl = _get_log_level(eventDict['logLevel'])
- assert type(msgLvl) is int
- ## only log our level and higher
- if self.level <= msgLvl:
- text = log.textFromEventDict(eventDict)
- else:
- text = None
- else:
- text = log.textFromEventDict(eventDict)
-
- if text is None:
- return
-
- timeStr = self.formatTime(eventDict['time'])
- fmtDict = {'system': eventDict['system'],
- 'text': text.replace('\n','\n\t')}
- msgStr = log._safeFormat("[%(system)s] %(text)s\n", fmtDict)
-
- util.untilConcludes(self.write, timeStr + " " + msgStr)
- util.untilConcludes(self.flush)
-
-def start(logfile=None, verbosity=None):
- if log.defaultObserver:
- verbosity = _get_log_level(verbosity)
-
- ## Always log to file, keep level at info
- file = open(logfile, 'a') if logfile else stderr
- OONILogObserver(file, "info").start()
-
- log.msg("Starting OONI...")
-
-def debug(message, level="debug", **kw):
- print "[%s] %s" % (level, message)
- ## If we want debug messages in the logfile:
- #log.msg(message, logLevel=level, **kw)
-
-def msg(message, level="info", **kw):
- log.msg(message, logLevel=level, **kw)
-
-def err(message, level="err", **kw):
- log.err(logLevel=level, **kw)
-
-def fail(message, exception, level="crit", **kw):
- log.failure(message, OONITestFailure(exception, **kw), logLevel=level)
diff --git a/ooni/bridget/utils/nodes.py b/ooni/bridget/utils/nodes.py
deleted file mode 100644
index 155f183..0000000
--- a/ooni/bridget/utils/nodes.py
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: UTF-8
-"""
- nodes
- *****
-
- This contains all the code related to Nodes
- both network and code execution.
-
- :copyright: (c) 2012 by Arturo Filastò, Isis Lovecruft
- :license: see LICENSE for more details.
-
-"""
-
-import os
-from binascii import hexlify
-
-try:
- import paramiko
-except:
- print "Error: module paramiko is not installed."
-from pprint import pprint
-import sys
-import socks
-import xmlrpclib
-
-class Node(object):
- def __init__(self, address, port):
- self.address = address
- self.port = port
-
-class LocalNode(object):
- def __init__(self):
- pass
-
-"""
-[]: node = NetworkNode("192.168.0.112", 5555, "SOCKS5")
-[]: node_socket = node.wrap_socket()
-"""
-class NetworkNode(Node):
- def __init__(self, address, port, node_type="SOCKS5", auth_creds=None):
- self.node = Node(address,port)
-
- # XXX support for multiple types
- # node type (SOCKS proxy, HTTP proxy, GRE tunnel, ...)
- self.node_type = node_type
- # type-specific authentication credentials
- self.auth_creds = auth_creds
-
- def _get_socksipy_socket(self, proxy_type, auth_creds):
- import socks
- s = socks.socksocket()
- # auth_creds[0] -> username
- # auth_creds[1] -> password
- s.setproxy(proxy_type, self.node.address, self.node.port,
- self.auth_creds[0], self.auth_creds[1])
- return s
-
- def _get_socket_wrapper(self):
- if (self.node_type.startswith("SOCKS")): # SOCKS proxies
- if (self.node_type != "SOCKS5"):
- proxy_type = socks.PROXY_TYPE_SOCKS5
- elif (self.node_type != "SOCKS4"):
- proxy_type = socks.PROXY_TYPE_SOCKS4
- else:
- print "We don't know this proxy type."
- sys.exit(1)
-
- return self._get_socksipy_socket(proxy_type)
- elif (self.node_type == "HTTP"): # HTTP proxies
- return self._get_socksipy_socket(PROXY_TYPE_HTTP)
- else: # Unknown proxies
- print "We don't know this proxy type."
- sys.exit(1)
-
- def wrap_socket(self):
- return self._get_socket_wrapper()
-
-class CodeExecNode(Node):
- def __init__(self, address, port, node_type, auth_creds):
- self.node = Node(address,port)
-
- # node type (SSH proxy, etc.)
- self.node_type = node_type
- # type-specific authentication credentials
- self.auth_creds = auth_creds
-
- def add_unit(self):
- pass
-
- def get_status(self):
- pass
-
-class PlanetLab(CodeExecNode):
- def __init__(self, address, auth_creds, ooni):
- self.auth_creds = auth_creds
-
- self.config = ooni.utils.config
- self.logger = ooni.logger
- self.name = "PlanetLab"
-
- def _api_auth(self):
- api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/')
- auth = {}
- ## should be changed to separate node.conf file
- auth['Username'] = self.config.main.pl_username
- auth['AuthString'] = self.config.main.pl_password
- auth['AuthMethod'] = "password"
- authorized = api_server.AuthCheck(auth)
-
- if authorized:
- print 'We are authorized!'
- return auth
- else:
- print 'Authorization failed. Please check your settings for pl_username and pl_password in the ooni-probe.conf file.'
-
- def _search_for_nodes(self, node_filter=None):
- api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/', allow_none=True)
- node_filter = {'hostname': '*.cert.org.cn'}
- return_fields = ['hostname', 'site_id']
- all_nodes = api_server.GetNodes(self.api_auth(), node_filter, boot_state_filter)
- pprint(all_nodes)
- return all_nodes
-
- def _add_nodes_to_slice(self):
- api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/', allow_none=True)
- all_nodes = self.search_for_nodes()
- for node in all_nodes:
- api_server.AddNode(self.api_auth(), node['site_id'], all_nodes)
- print 'Adding nodes %s' % node['hostname']
-
- def _auth_login(slicename, machinename):
- """Attempt to authenticate to the given PL node, slicename and
- machinename, using any of the private keys in ~/.ssh/ """
-
- agent = paramiko.Agent()
- agent_keys = agent.get_keys()
- if len(agent_keys) == 0:
- return
-
- for key in agent_keys:
- print 'Trying ssh-agent key %s' % hexlify(key.get_fingerprint()),
- try:
- paramiko.transport.auth_publickey(machinename, slicename)
- print 'Public key authentication to PlanetLab node %s successful.' % machinename,
- return
- except paramiko.SSHException:
- print 'Public key authentication to PlanetLab node %s failed.' % machinename,
-
- def _get_command():
- pass
-
- def ssh_and_run_(slicename, machinename, command):
- """Attempt to make a standard OpenSSH client to PL node, and run
- commands from a .conf file."""
-
- ## needs a way to specify 'ssh -l <slicename> <machinename>'
- ## with public key authentication.
-
- command = PlanetLab.get_command()
-
- client = paramiko.SSHClient()
- client.load_system_host_keys()
- client.connect(machinename)
-
- stdin, stdout, stderr = client.exec_command(command)
-
- def send_files_to_node(directory, files):
- """Attempt to rsync a tree to the PL node."""
- pass
-
- def add_unit():
- pass
-
- def get_status():
- pass
diff --git a/ooni/bridget/utils/onion.py b/ooni/bridget/utils/onion.py
deleted file mode 100644
index 9d4cae7..0000000
--- a/ooni/bridget/utils/onion.py
+++ /dev/null
@@ -1,686 +0,0 @@
-#
-# onion.py
-# ----------
-# Utilities for working with Tor.
-#
-# This code is largely taken from txtorcon and its documentation, and as such
-# any and all credit should go to Meejah. Minor adjustments have been made to
-# use OONI's logging system, and to build custom circuits without actually
-# attaching streams.
-#
-# :author: Meejah, Isis Lovecruft
-# :license: see included LICENSE file
-# :copyright: copyright (c) 2012 The Tor Project, Inc.
-# :version: 0.1.0-alpha
-#
-# XXX TODO add report keys for onion methods
-
-import random
-import sys
-
-from twisted.internet import defer
-from zope.interface import implements
-
-from ooni.lib.txtorcon import CircuitListenerMixin, IStreamAttacher
-from ooni.lib.txtorcon import TorState, TorConfig
-from ooni.utils import log
-from ooni.utils.timer import deferred_timeout, TimeoutError
-
-def parse_data_dir(data_dir):
- """
- Parse a string that a has been given as a DataDirectory and determine
- its absolute path on the filesystem.
-
- :param data_dir:
- A directory for Tor's DataDirectory, to be parsed.
- :return:
- The absolute path of :param:data_dir.
- """
- from os import path, getcwd
- import sys
-
- try:
- assert isinstance(data_dir, str), \
- "Parameter type(data_dir) must be str"
- except AssertionError, ae:
- log.err(ae)
-
- if data_dir.startswith('~'):
- data_dir = path.expanduser(data_dir)
- elif data_dir.startswith('/'):
- data_dir = path.join(getcwd(), data_dir)
- elif data_dir.startswith('./'):
- data_dir = path.abspath(data_dir)
- else:
- data_dir = path.join(getcwd(), data_dir)
-
- try:
- assert path.isdir(data_dir), "Could not find %s" % data_dir
- except AssertionError, ae:
- log.err(ae)
- sys.exit()
- else:
- return data_dir
-
-def write_torrc(conf, data_dir=None):
- """
- Create a torrc in our data_dir. If we don't yet have a data_dir, create a
- temporary one. Any temporary files or folders are added to delete_list.
-
- :param conf:
- A :class:`ooni.lib.txtorcon.TorConfig` object, with all configuration
- values saved.
- :param data_dir:
- The Tor DataDirectory to use.
- :return: torrc, data_dir, delete_list
- """
- try:
- from os import write, close
- from tempfile import mkstemp, mkdtemp
- except ImportError, ie:
- log.err(ie)
-
- delete_list = []
-
- if data_dir is None:
- data_dir = mkdtemp(prefix='bridget-tordata')
- delete_list.append(data_dir)
- conf.DataDirectory = data_dir
-
- (fd, torrc) = mkstemp(dir=data_dir)
- delete_list.append(torrc)
- write(fd, conf.create_torrc())
- close(fd)
-
- return torrc, data_dir, delete_list
-
-def delete_files_or_dirs(delete_list):
- """
- Given a list of files or directories to delete, delete all and suppress
- all errors.
-
- :param delete_list:
- A list of files or directories to delete.
- """
- try:
- from os import unlink
- from shutil import rmtree
- except ImportError, ie:
- log.err(ie)
-
- for temp in delete_list:
- try:
- unlink(temp)
- except OSError:
- rmtree(temp, ignore_errors=True)
-
-def remove_node_from_list(node, list):
- for item in list: ## bridges don't match completely
- if item.startswith(node): ## due to the :<port>.
- try:
- log.msg("Removing %s because it is a public relay" % node)
- list.remove(item)
- except ValueError, ve:
- log.err(ve)
-
-def remove_public_relays(state, bridges):
- """
- Remove bridges from our bridge list which are also listed as public
- relays. This must be called after Tor has fully bootstrapped and we have a
- :class:`ooni.lib.txtorcon.TorState` with the
- :attr:`ooni.lib.txtorcon.TorState.routers` attribute assigned.
-
- XXX Does state.router.values() have all of the relays in the consensus, or
- just the ones we know about so far?
-
- XXX FIXME: There is a problem in that Tor needs a Bridge line to already be
- configured in order to bootstrap. However, after bootstrapping, we grab the
- microdescriptors of all the relays and check if any of our bridges are
- listed as public relays. Because of this, the first bridge does not get
- checked for being a relay.
- """
- IPs = map(lambda addr: addr.split(':',1)[0], bridges['all'])
- both = set(state.routers.values()).intersection(IPs)
-
- if len(both) > 0:
- try:
- updated = map(lambda node: remove_node_from_list(node), both)
- log.debug("Bridges in both: %s" % both)
- log.debug("Updated = %s" % updated)
- #if not updated:
- # defer.returnValue(state)
- #else:
- # defer.returnValue(state)
- return state
- except Exception, e:
- log.err("Removing public relays %s from bridge list failed:\n%s"
- % (both, e))
-
-def setup_done(proto):
- log.msg("Setup Complete")
- state = TorState(proto.tor_protocol)
- state.post_bootstrap.addCallback(state_complete)
- state.post_bootstrap.addErrback(setup_fail)
-
-def setup_fail(proto):
- log.msg("Setup Failed:\n%s" % proto)
- return proto
- #reactor.stop()
-
-def state_complete(state):
- """Called when we've got a TorState."""
- log.msg("We've completely booted up a Tor version %s at PID %d"
- % (state.protocol.version, state.tor_pid))
- log.msg("This Tor has the following %d Circuits:"
- % len(state.circuits))
- for circ in state.circuits.values():
- log.msg("%s" % circ)
- return state
-
-def updates(_progress, _tag, _summary):
- """Log updates on the Tor bootstrapping process."""
- log.msg("%d%%: %s" % (_progress, _summary))
-
-def bootstrap(ctrl):
- """
- Bootstrap Tor from an instance of
- :class:`ooni.lib.txtorcon.TorControlProtocol`.
- """
- conf = TorConfig(ctrl)
- conf.post_bootstrap.addCallback(setup_done).addErrback(setup_fail)
- log.msg("Tor process connected, bootstrapping ...")
-
-def start_tor(reactor, config, control_port, tor_binary, data_dir,
- report=None, progress=updates,
- process_cb=None, process_eb=None):
- """
- Use a txtorcon.TorConfig() instance, config, to write a torrc to a
- tempfile in our DataDirectory, data_dir. If data_dir is None, a temp
- directory will be created. Finally, create a TCP4ClientEndpoint at our
- control_port, and connect it to our reactor and a spawned Tor
- process. Compare with :meth:`txtorcon.launch_tor` for differences.
-
- :param reactor:
- An instance of class:`twisted.internet.reactor`.
- :param config:
- An instance of class:`txtorcon.TorConfig` with all torrc options
- already configured. ivar:`config.ControlPort`,
- ivar:`config.SocksPort`, ivar:`config.CookieAuthentication`, should
- already be set, as well as ivar:`config.UseBridges` and
- ivar:`config.Bridge` if bridges are to be used.
- ivar:`txtorcon.DataDirectory` does not need to be set.
- :param control_port:
- The port number to use for Tor's ControlPort.
- :param tor_binary:
- The full path to the Tor binary to use.
- :param data_dir:
- The directory to use as Tor's DataDirectory.
- :param report:
- The class:`ooni.plugoo.reports.Report` instance.
- :param progress:
- A non-blocking function to handle bootstrapping updates, which takes
- three parameters: _progress, _tag, and _summary.
- :param process_cb:
- The function to callback to after
- class:`ooni.lib.txtorcon.TorProcessProtocol` returns with the fully
- bootstrapped Tor process.
- :param process_eb:
- The function to errback to if
- class:`ooni.lib.txtorcon.TorProcessProtocol` fails.
- :return:
- The result of the callback of a
- class:`ooni.lib.txtorcon.TorProcessProtocol` which callbacks with a
- class:`txtorcon.TorControlProtocol` as .protocol.
- """
- try:
- from functools import partial
- from twisted.internet.endpoints import TCP4ClientEndpoint
- from ooni.lib.txtorcon import TorProtocolFactory
- from ooni.lib.txtorcon import TorProcessProtocol
- except ImportError, ie:
- log.err(ie)
-
- ## TODO: add option to specify an already existing torrc, which
- ## will require prior parsing to enforce necessary lines
- (torrc, data_dir, to_delete) = write_torrc(config, data_dir)
-
- log.msg("Starting Tor ...")
- log.msg("Using the following as our torrc:\n%s" % config.create_torrc())
- if report is None:
- report = {'torrc': config.create_torrc()}
- else:
- report.update({'torrc': config.create_torrc()})
-
- end_point = TCP4ClientEndpoint(reactor, 'localhost', control_port)
- connection_creator = partial(end_point.connect, TorProtocolFactory())
- process_protocol = TorProcessProtocol(connection_creator, progress)
- process_protocol.to_delete = to_delete
-
- if process_cb is not None and process_eb is not None:
- process_protocol.connected_cb.addCallbacks(process_cb, process_eb)
-
- reactor.addSystemEventTrigger('before', 'shutdown',
- partial(delete_files_or_dirs, to_delete))
- try:
- transport = reactor.spawnProcess(process_protocol,
- tor_binary,
- args=(tor_binary,'-f',torrc),
- env={'HOME': data_dir},
- path=data_dir)
- transport.closeStdin()
- except RuntimeError, e:
- log.err("Starting Tor failed:")
- process_protocol.connected_cb.errback(e)
- except NotImplementedError, e:
- url = "http://starship.python.net/crew/mhammond/win32/Downloads.html"
- log.msg("Running bridget on Windows requires pywin32: %s" % url)
- process_protocol.connected_cb.errback(e)
-
- return process_protocol.connected_cb
-
-(a)defer.inlineCallbacks
-def start_tor_filter_nodes(reactor, config, control_port, tor_binary,
- data_dir, bridges):
- """
- Bootstrap a Tor process and return a fully-setup
- :class:`ooni.lib.txtorcon.TorState`. Then search for our bridges
- to test in the list of known public relays,
- :ivar:`ooni.lib.txtorcon.TorState.routers`, and remove any bridges
- which are known public relays.
-
- :param reactor:
- The :class:`twisted.internet.reactor`.
- :param config:
- An instance of :class:`ooni.lib.txtorcon.TorConfig`.
- :param control_port:
- The port to use for Tor's ControlPort. If already configured in
- the TorConfig instance, this can be given as
- TorConfig.config.ControlPort.
- :param tor_binary:
- The full path to the Tor binary to execute.
- :param data_dir:
- The full path to the directory to use as Tor's DataDirectory.
- :param bridges:
- A dictionary which has a key 'all' which is a list of bridges to
- test connecting to, e.g.:
- bridges['all'] = ['1.1.1.1:443', '22.22.22.22:9001']
- :return:
- A fully initialized :class:`ooni.lib.txtorcon.TorState`.
- """
- setup = yield start_tor(reactor, config, control_port,
- tor_binary, data_dir,
- process_cb=setup_done, process_eb=setup_fail)
- filter_nodes = yield remove_public_relays(setup, bridges)
- defer.returnValue(filter_nodes)
-
-(a)defer.inlineCallbacks
-def start_tor_with_timer(reactor, config, control_port, tor_binary, data_dir,
- bridges, timeout):
- """
- Start bootstrapping a Tor process wrapped with an instance of the class
- decorator :func:`ooni.utils.timer.deferred_timeout` and complete callbacks
- to either :func:`setup_done` or :func:`setup_fail`. Return a fully-setup
- :class:`ooni.lib.txtorcon.TorState`. Then search for our bridges to test
- in the list of known public relays,
- :ivar:`ooni.lib.txtorcon.TorState.routers`, and remove any bridges which
- are listed as known public relays.
-
- :param reactor:
- The :class:`twisted.internet.reactor`.
- :param config:
- An instance of :class:`ooni.lib.txtorcon.TorConfig`.
- :param control_port:
- The port to use for Tor's ControlPort. If already configured in
- the TorConfig instance, this can be given as
- TorConfig.config.ControlPort.
- :param tor_binary:
- The full path to the Tor binary to execute.
- :param data_dir:
- The full path to the directory to use as Tor's DataDirectory.
- :param bridges:
- A dictionary which has a key 'all' which is a list of bridges to
- test connecting to, e.g.:
- bridges['all'] = ['1.1.1.1:443', '22.22.22.22:9001']
- :param timeout:
- The number of seconds to attempt to bootstrap the Tor process before
- raising a :class:`ooni.utils.timer.TimeoutError`.
- :return:
- If the timeout limit is not exceeded, return a fully initialized
- :class:`ooni.lib.txtorcon.TorState`, else return None.
- """
- error_msg = "Bootstrapping has exceeded the timeout limit..."
- with_timeout = deferred_timeout(timeout, e=error_msg)(start_tor)
- try:
- setup = yield with_timeout(reactor, config, control_port, tor_binary,
- data_dir, process_cb=setup_done,
- process_eb=setup_fail)
- except TimeoutError, te:
- log.err(te)
- defer.returnValue(None)
- #except Exception, e:
- # log.err(e)
- # defer.returnValue(None)
- else:
- state = yield remove_public_relays(setup, bridges)
- defer.returnValue(state)
-
-(a)defer.inlineCallbacks
-def start_tor_filter_nodes_with_timer(reactor, config, control_port,
- tor_binary, data_dir, bridges, timeout):
- """
- Start bootstrapping a Tor process wrapped with an instance of the class
- decorator :func:`ooni.utils.timer.deferred_timeout` and complete callbacks
- to either :func:`setup_done` or :func:`setup_fail`. Then, filter our list
- of bridges to remove known public relays by calling back to
- :func:`remove_public_relays`. Return a fully-setup
- :class:`ooni.lib.txtorcon.TorState`. Then search for our bridges to test
- in the list of known public relays,
- :ivar:`ooni.lib.txtorcon.TorState.routers`, and remove any bridges which
- are listed as known public relays.
-
- :param reactor:
- The :class:`twisted.internet.reactor`.
- :param config:
- An instance of :class:`ooni.lib.txtorcon.TorConfig`.
- :param control_port:
- The port to use for Tor's ControlPort. If already configured in
- the TorConfig instance, this can be given as
- TorConfig.config.ControlPort.
- :param tor_binary:
- The full path to the Tor binary to execute.
- :param data_dir:
- The full path to the directory to use as Tor's DataDirectory.
- :param bridges:
- A dictionary which has a key 'all' which is a list of bridges to
- test connecting to, e.g.:
- bridges['all'] = ['1.1.1.1:443', '22.22.22.22:9001']
- :param timeout:
- The number of seconds to attempt to bootstrap the Tor process before
- raising a :class:`ooni.utils.timer.TimeoutError`.
- :return:
- If the timeout limit is not exceeded, return a fully initialized
- :class:`ooni.lib.txtorcon.TorState`, else return None.
- """
- error_msg = "Bootstrapping has exceeded the timeout limit..."
- with_timeout = deferred_timeout(timeout, e=error_msg)(start_tor_filter_nodes)
- try:
- state = yield with_timeout(reactor, config, control_port,
- tor_binary, data_dir, bridges)
- except TimeoutError, te:
- log.err(te)
- defer.returnValue(None)
- #except Exception, e:
- # log.err(e)
- # defer.returnValue(None)
- else:
- defer.returnValue(state)
-
-class CustomCircuit(CircuitListenerMixin):
- """
- Utility class for controlling circuit building. See
- 'attach_streams_by_country.py' in the txtorcon documentation.
-
- :param state:
- A fully bootstrapped instance of :class:`ooni.lib.txtorcon.TorState`.
- :param relays:
- A dictionary containing a key 'all', which is a list of relays to
- test connecting to.
- :ivar waiting_circuits:
- The list of circuits which we are waiting to attach to. You shouldn't
- need to touch this.
- """
- implements(IStreamAttacher)
-
- def __init__(self, state, relays=None):
- self.state = state
- self.waiting_circuits = []
- self.relays = relays
-
- def waiting_on(self, circuit):
- """
- Whether or not we are waiting on the given circuit before attaching to
- it.
-
- :param circuit:
- An item from :ivar:`ooni.lib.txtorcon.TorState.circuits`.
- :return:
- True if we are waiting on the circuit, False if not waiting.
- """
- for (circid, d) in self.waiting_circuits:
- if circuit.id == circid:
- return True
- return False
-
- def circuit_extend(self, circuit, router):
- "ICircuitListener"
- if circuit.purpose != 'GENERAL':
- return
- if self.waiting_on(circuit):
- log.msg("Circuit %d (%s)" % (circuit.id, router.id_hex))
-
- def circuit_built(self, circuit):
- "ICircuitListener"
- if circuit.purpose != 'GENERAL':
- return
- log.msg("Circuit %s built ..." % circuit.id)
- log.msg("Full path of %s: %s" % (circuit.id, circuit.path))
- for (circid, d) in self.waiting_circuits:
- if circid == circuit.id:
- self.waiting_circuits.remove((circid, d))
- d.callback(circuit)
-
- def circuit_failed(self, circuit, reason):
- """
- If building a circuit has failed, try to remove it from our list of
- :ivar:`waiting_circuits`, else request to build it.
-
- :param circuit:
- An item from :ivar:`ooni.lib.txtorcon.TorState.circuits`.
- :param reason:
- A :class:`twisted.python.fail.Failure` instance.
- :return:
- None
- """
- if self.waiting_on(circuit):
- log.msg("Circuit %s failed for reason %s" % (circuit.id, reason))
- circid, d = None, None
- for c in self.waiting_circuits:
- if c[0] == circuit.id:
- circid, d = c
- if d is None:
- raise Exception("Expected to find circuit.")
-
- self.waiting_circuits.remove((circid, d))
- log.msg("Trying to build a circuit for %s" % circid)
- self.request_circuit_build(d)
-
- def check_circuit_route(self, router):
- """
- Check if a relay is a hop in one of our already built circuits.
-
- :param router:
- An item from the list
- :func:`ooni.lib.txtorcon.TorState.routers.values()`.
- """
- for circ in self.state.circuits.values():
- if router in circ.path:
- #router.update() ## XXX can i use without args? no.
- TorInfo.dump(self)
-
- def request_circuit_build(self, deferred, path=None):
- """
- Request a custom circuit.
-
- :param deferred:
- A :class:`twisted.internet.defer.Deferred` for this circuit.
- :param path:
- A list of router ids to build a circuit from. The length of this
- list must be at least three.
- """
- if path is None:
-
- pick = self.relays['all'].pop
- n = self.state.entry_guards.values()
- choose = random.choice
-
- first, middle, last = (None for i in range(3))
-
- if self.relays['remaining']() >= 3:
- first, middle, last = (pick() for i in range(3))
- elif self.relays['remaining']() < 3:
- first = choose(n)
- middle = pick()
- if self.relays['remaining'] == 2:
- middle, last = (pick() for i in range(2))
- elif self.relay['remaining'] == 1:
- middle = pick()
- last = choose(n)
- else:
- log.msg("Qu'est-que fuque?")
- else:
- middle, last = (random.choice(self.state.routers.values())
- for i in range(2))
-
- path = [first, middle, last]
-
- else:
- assert isinstance(path, list), \
- "Circuit path must be a list of relays!"
- assert len(path) >= 3, \
- "Circuit path must be at least three hops!"
-
- log.msg("Requesting a circuit: %s"
- % '->'.join(map(lambda node: node, path)))
-
- class AppendWaiting:
- def __init__(self, attacher, deferred):
- self.attacher = attacher
- self.d = deferred
- def __call__(self, circ):
- """
- Return from build_circuit is a Circuit, however,
- we want to wait until it is built before we can
- issue an attach on it and callback to the Deferred
- we issue here.
- """
- log.msg("Circuit %s is in progress ..." % circ.id)
- self.attacher.waiting_circuits.append((circ.id, self.d))
-
- return self.state.build_circuit(path).addCallback(
- AppendWaiting(self, deferred)).addErrback(
- log.err)
-
-class TxtorconImportError(ImportError):
- """
- Raised when ooni.lib.txtorcon cannot be imported from. Checks our current
- working directory and the path given to see if txtorcon has been
- initialized via /ooni/lib/Makefile.
- """
- from os import getcwd, path
-
- cwd, tx = getcwd(), 'lib/txtorcon/torconfig.py'
- try:
- log.msg("Unable to import from ooni.lib.txtorcon")
- if cwd.endswith('ooni'):
- check = path.join(cwd, tx)
- elif cwd.endswith('utils'):
- check = path.join(cwd, '../'+tx)
- else:
- check = path.join(cwd, 'ooni/'+tx)
- assert path.isfile(check)
- except:
- log.msg("Error: Some OONI libraries are missing!")
- log.msg("Please go to /ooni/lib/ and do \"make all\"")
-
-class PTNoBridgesException(Exception):
- """Raised when a pluggable transport is specified, but not bridges."""
- def __init__(self):
- log.msg("Pluggable transport requires the bridges option")
- return sys.exit()
-
-class PTNotFoundException(Exception):
- def __init__(self, transport_type):
- m = "Pluggable Transport type %s was unaccounted " % transport_type
- m += "for, please contact isis(at)torproject(dot)org and it will "
- m += "get included."
- log.msg("%s" % m)
- return sys.exit()
-
-(a)defer.inlineCallbacks
-def __start_tor_with_timer__(reactor, config, control_port, tor_binary,
- data_dir, bridges=None, relays=None, timeout=None,
- retry=None):
- """
- A wrapper for :func:`start_tor` which wraps the bootstrapping of a Tor
- process and its connection to a reactor with a
- :class:`twisted.internet.defer.Deferred` class decorator utility,
- :func:`ooni.utils.timer.deferred_timeout`, and a mechanism for resets.
-
- ## XXX fill me in
- """
- raise NotImplementedError
-
- class RetryException(Exception):
- pass
-
- import sys
- from ooni.utils.timer import deferred_timeout, TimeoutError
-
- def __make_var__(old, default, _type):
- if old is not None:
- assert isinstance(old, _type)
- new = old
- else:
- new = default
- return new
-
- reactor = reactor
- timeout = __make_var__(timeout, 120, int)
- retry = __make_var__(retry, 1, int)
-
- with_timeout = deferred_timeout(timeout)(start_tor)
-
- @defer.inlineCallbacks
- def __start_tor__(rc=reactor, cf=config, cp=control_port, tb=tor_binary,
- dd=data_dir, br=bridges, rl=relays, cb=setup_done,
- eb=setup_fail, af=remove_public_relays, retry=retry):
- try:
- setup = yield with_timeout(rc,cf,cp,tb,dd)
- except TimeoutError:
- retry -= 1
- defer.returnValue(retry)
- else:
- if setup.callback:
- setup = yield cb(setup)
- elif setup.errback:
- setup = yield eb(setup)
- else:
- setup = setup
-
- if br is not None:
- state = af(setup,br)
- else:
- state = setup
- defer.returnValue(state)
-
- @defer.inlineCallbacks
- def __try_until__(tries):
- result = yield __start_tor__()
- try:
- assert isinstance(result, int)
- except AssertionError:
- defer.returnValue(result)
- else:
- if result >= 0:
- tried = yield __try_until__(result)
- defer.returnValue(tried)
- else:
- raise RetryException
- try:
- tried = yield __try_until__(retry)
- except RetryException:
- log.msg("All retry attempts to bootstrap Tor have timed out.")
- log.msg("Exiting ...")
- defer.returnValue(sys.exit())
- else:
- defer.returnValue(tried)
diff --git a/ooni/bridget/utils/reports.py b/ooni/bridget/utils/reports.py
deleted file mode 100644
index ae67b13..0000000
--- a/ooni/bridget/utils/reports.py
+++ /dev/null
@@ -1,144 +0,0 @@
-from __future__ import with_statement
-
-import os
-import yaml
-
-import itertools
-from ooni.utils import log, date, net
-
-class Report:
- """This is the ooni-probe reporting mechanism. It allows
- reporting to multiple destinations and file formats.
-
- :scp the string of <host>:<port> of an ssh server
-
- :yaml the filename of a the yaml file to write
-
- :file the filename of a simple txt file to write
-
- :tcp the <host>:<port> of a TCP server that will just listen for
- inbound connection and accept a stream of data (think of it
- as a `nc -l -p <port> > filename.txt`)
- """
- def __init__(self, testname=None, file="report.log",
- scp=None,
- tcp=None):
-
- self.testname = testname
- self.file = file
- self.tcp = tcp
- self.scp = scp
- #self.config = ooni.config.report
-
- #if self.config.timestamp:
- # tmp = self.file.split('.')
- # self.file = '.'.join(tmp[:-1]) + "-" + \
- # datetime.now().isoformat('-') + '.' + \
- # tmp[-1]
- # print self.file
-
- self.scp = None
- self.write_header()
-
- def write_header(self):
- pretty_date = date.pretty_date()
- header = "# OONI Probe Report for Test %s\n" % self.testname
- header += "# %s\n\n" % pretty_date
- self._write_to_report(header)
- # XXX replace this with something proper
- address = net.getClientAddress()
- test_details = {'start_time': str(date.now()),
- 'asn': address['asn'],
- 'test_name': self.testname,
- 'addr': address['ip']}
- self(test_details)
-
- def _write_to_report(self, dump):
- reports = []
-
- if self.file:
- reports.append("file")
-
- if self.tcp:
- reports.append("tcp")
-
- if self.scp:
- reports.append("scp")
-
- #XXX make this non blocking
- for report in reports:
- self.send_report(dump, report)
-
- def __call__(self, data):
- """
- This should be invoked every time you wish to write some
- data to the reporting system
- """
- dump = yaml.dump([data])
- self._write_to_report(dump)
-
- def file_report(self, data):
- """
- This reports to a file in YAML format
- """
- with open(self.file, 'a+') as f:
- f.write(data)
-
- def send_report(self, data, type):
- """
- This sends the report using the
- specified type.
- """
- #print "Reporting %s to %s" % (data, type)
- log.msg("Reporting to %s" % type)
- getattr(self, type+"_report").__call__(data)
-
-class NewReport(object):
- filename = 'report.log'
- startTime = None
- endTime = None
- testName = None
- ipAddr = None
- asnAddr = None
-
- def _open():
- self.fp = open(self.filename, 'a+')
-
- @property
- def header():
- pretty_date = date.pretty_date()
- report_header = "# OONI Probe Report for Test %s\n" % self.testName
- report_header += "# %s\n\n" % pretty_date
- test_details = {'start_time': self.startTime,
- 'asn': asnAddr,
- 'test_name': self.testName,
- 'addr': ipAddr}
- report_header += yaml.dump([test_details])
- return report_header
-
- def create():
- """
- Create a new report by writing it's header.
- """
- self.fp = open(self.filename, 'w+')
- self.fp.write(self.header)
-
- def exists():
- """
- Returns False if the file does not exists.
- """
- return os.path.exists(self.filename)
-
- def write(data):
- """
- Write a report to the file.
-
- :data: python data structure to be written to report.
- """
- if not self.exists():
- self.create()
- else:
- self._open()
- yaml_encoded_data = yaml.dump([data])
- self.fp.write(yaml_encoded_data)
- self.fp.close()
diff --git a/ooni/bridget/utils/tests.py b/ooni/bridget/utils/tests.py
deleted file mode 100644
index ea4be0b..0000000
--- a/ooni/bridget/utils/tests.py
+++ /dev/null
@@ -1,141 +0,0 @@
-import os
-import yaml
-from zope.interface import Interface, Attribute
-
-import logging
-import itertools
-from twisted.internet import reactor, defer, threads
-## XXX why is this imported and not used?
-from twisted.python import failure
-
-from ooni.utils import log, date
-from ooni.plugoo import assets, work
-from ooni.plugoo.reports import Report
-from ooni.plugoo.interface import ITest
-
-class OONITest(object):
- """
- This is the base class for writing OONI Tests.
-
- It should be used in conjunction with the ITest Interface. It allows the
- developer to benefit from OONIs reporting system and command line argument
- parsing system.
- """
- name = "oonitest"
- # By default we set this to False, meaning that we don't block
- blocking = False
- reactor = reactor
- tool = False
- ended = False
-
- def __init__(self, local_options, global_options, report, ooninet=None,
- reactor=reactor):
- # These are the options that are read through the tests suboptions
- self.local_options = local_options
- # These are the options global to all of OONI
- self.global_options = global_options
- self.report = report
- #self.ooninet = ooninet
- self.reactor = reactor
- self.result = {}
- self.initialize()
- self.assets = self.load_assets()
-
- def initialize(self):
- """
- Override this method if you are interested in having some extra
- behavior when your test class is instantiated.
- """
- pass
-
- def load_assets(self):
- """
- This method should be overriden by the test writer to provide the
- logic for loading their assets.
- """
- return {}
-
- def __repr__(self):
- return "<OONITest %s %s %s>" % (self.local_options,
- self.global_options,
- self.assets)
-
- def end(self):
- """
- State that the current test should finish.
- """
- self.ended = True
-
- def finished(self, return_value):
- """
- The Test has finished running, we must now calculate the test runtime
- and add all time data to the report.
- """
- #self.ooninet.report(result)
- self.end_time = date.now()
- result = self.result
- result['start_time'] = str(self.start_time)
- result['end_time'] = str(self.end_time)
- result['run_time'] = str(self.end_time - self.start_time)
- result['return_value'] = return_value
- log.msg("FINISHED %s" % result)
- self.report(result)
- return result
-
- def _do_experiment(self, args):
- """
- A wrapper around the launch of experiment.
- If we are running a blocking test experiment will be run in a thread if
- not we expect it to return a Deferred.
-
- @param args: the asset line(s) that we are working on.
-
- returns a deferred.
- """
- if self.blocking:
- self.d = threads.deferToThread(self.experiment, args)
- else:
- self.d = self.experiment(args)
-
- self.d.addCallback(self.control, args)
- self.d.addCallback(self.finished)
- self.d.addErrback(self.finished)
- return self.d
-
- def control(self, result, args):
- """
- Run the control.
-
- @param result: what was returned by experiment.
-
- @param args: the asset(s) lines that we are working on.
- """
- log.msg("Doing control")
- return result
-
- def experiment(self, args):
- """
- Run the experiment. This sample implementation returns a deferred,
- making it a non-blocking test.
-
- @param args: the asset(s) lines that we are working on.
- """
- log.msg("Doing experiment")
- d = defer.Deferred()
- return d
-
- def startTest(self, args):
- """
- This method is invoked by the worker to start the test with one line of
- the asset file.
-
- @param args: the asset(s) lines that we are working on.
- """
- self.start_time = date.now()
-
- if self.shortName:
- log.msg("Starting test %s" % self.shortName)
- else:
- log.msg("Starting test %s" % self.__class__)
-
- return self._do_experiment(args)
diff --git a/ooni/bridget/utils/work.py b/ooni/bridget/utils/work.py
deleted file mode 100644
index c329c20..0000000
--- a/ooni/bridget/utils/work.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# -*- coding: UTF-8
-"""
- work.py
- **********
-
- This contains all code related to generating
- Units of Work and processing it.
-
- :copyright: (c) 2012 by Arturo Filastò.
- :license: see LICENSE for more details.
-
-"""
-import itertools
-import yaml
-from datetime import datetime
-
-from zope.interface import Interface, Attribute
-
-from twisted.python import failure
-from twisted.internet import reactor, defer
-
-class Worker(object):
- """
- This is the core of OONI. It takes as input Work Units and
- runs them concurrently.
- """
- def __init__(self, maxconcurrent=10, reactor=reactor):
- """
- @param maxconcurrent: how many test instances should be run
- concurrently.
- """
- self.reactor = reactor
- self.maxconcurrent = maxconcurrent
- self._running = 0
- self._queued = []
-
- def _run(self, r):
- """
- Check if we should start another test because we are below maximum
- concurrency.
-
- This function is called every time a test finishes running.
-
- @param r: the return value of a previous test.
- """
- if self._running > 0:
- self._running -= 1
-
- if self._running < self.maxconcurrent and self._queued:
- workunit, d = self._queued.pop(0)
- asset, test, idx = workunit
- while test.ended and workunit:
- try:
- workunit, d = self._queued.pop(0)
- asset, test, idx = workunit
- except:
- workunit = None
-
- if not test.ended:
- self._running += 1
- actuald = test.startTest(asset).addBoth(self._run)
-
- if isinstance(r, failure.Failure):
- # XXX probably we should be doing something to retry test running
- r.trap()
-
- if self._running == 0 and not self._queued:
- self.reactor.stop()
-
- return r
-
- def push(self, workunit):
- """
- Add a test to the test queue and run it if we are not maxed out on
- concurrency.
-
- @param workunit: a tuple containing the (asset, test, idx), where asset
- is the line of the asset(s) we are working on, test
- is an instantiated test and idx is the index we are
- currently at.
- """
- if self._running < self.maxconcurrent:
- asset, test, idx = workunit
- if not test.ended:
- self._running += 1
- return test.startTest(asset).addBoth(self._run)
-
- d = defer.Deferred()
- self._queued.append((workunit, d))
- return d
-
-class WorkGenerator(object):
- """
- Factory responsible for creating units of work.
-
- This shall be run on the machine running OONI-cli. The returned WorkUnits
- can either be run locally or on a remote OONI Node or Network Node.
- """
- size = 10
-
- def __init__(self, test, arguments=None, start=None):
- self.Test = test
-
- if self.Test.assets and self.Test.assets.values()[0]:
- self.assetGenerator = itertools.product(*self.Test.assets.values())
- else:
- self.assetGenerator = None
-
- self.assetNames = self.Test.assets.keys()
-
- self.idx = 0
- self.end = False
- if start:
- self.skip(start)
-
- def __iter__(self):
- return self
-
- def skip(self, start):
- """
- Skip the first x number of lines of the asset.
-
- @param start: int how many items we should skip.
- """
- for j in xrange(0, start-1):
- for i in xrange(0, self.size):
- self.assetGenerator.next()
- self.idx += 1
-
- def next(self):
- if self.end:
- raise StopIteration
-
- if not self.assetGenerator:
- self.end = True
- return ({}, self.Test, self.idx)
-
- try:
- asset = self.assetGenerator.next()
- ret = {}
- for i, v in enumerate(asset):
- ret[self.assetNames[i]] = v
- except StopIteration:
- raise StopIteration
-
- self.idx += 1
- return (ret, self.Test, self.idx)
1
0
commit 9abf23f9fd2f8ef5b05e9c99b9213917439231ab
Author: Hackerberry Finn <hackerberry(a)fi.nn>
Date: Sat Nov 3 22:01:37 2012 -0700
Fix some things that are broken
---
ooni/bridget/tests/echo.py | 2 +-
ooni/nettest.py | 6 +++---
ooni/templates/httpt.py | 4 ++--
ooni/templates/scapyt.py | 4 ++--
4 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/ooni/bridget/tests/echo.py b/ooni/bridget/tests/echo.py
index a0826b6..7f3217a 100644
--- a/ooni/bridget/tests/echo.py
+++ b/ooni/bridget/tests/echo.py
@@ -20,7 +20,7 @@ from pprint import pprint
from twisted.internet import reactor
from twisted.plugin import IPlugin
from twisted.python import usage
-from ooni.nettest import TestCase
+from ooni.nettest import NetTestCase
from ooni.utils import log, Storage
from ooni.utils.net import PermissionsError, IfaceError
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 4ec1ccb..542b777 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -73,8 +73,8 @@ class NetTestAdaptor(unittest.TestCase):
@classmethod
def __new__(cls, *args, **kwargs):
super( NetTestAdaptor, cls ).__new__(*args, **kwargs)
- if hasattr(cls, setUpClass):
- setUpClass(cls)
+ if hasattr(cls, "setUpClass"):
+ super( NetTestAdaptor, cls ).setUpClass()
else:
log.debug("NetTestAdaptor: constructor could not find setUpClass")
@@ -184,7 +184,7 @@ class NetTestAdaptor(unittest.TestCase):
return _copy(new=args[1], alt=args[2])
elif kwargs:
return _copy(kwargs)
- else:
+ else:
return
@staticmethod
diff --git a/ooni/templates/httpt.py b/ooni/templates/httpt.py
index f453c74..acad538 100644
--- a/ooni/templates/httpt.py
+++ b/ooni/templates/httpt.py
@@ -13,7 +13,7 @@ from twisted.internet.ssl import ClientContextFactory
from twisted.web.http_headers import Headers
-from ooni.nettest import TestCase
+from ooni.nettest import NetTestCase
from ooni.utils import log
useragents = [("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6", "Firefox 2.0, Windows XP"),
@@ -39,7 +39,7 @@ class BodyReceiver(protocol.Protocol):
def connectionLost(self, reason):
self.finished.callback(self.data)
-class HTTPTest(TestCase):
+class HTTPTest(NetTestCase):
"""
A utility class for dealing with HTTP based testing. It provides methods to
be overriden for dealing with HTTP based testing.
diff --git a/ooni/templates/scapyt.py b/ooni/templates/scapyt.py
index 77f2807..f71ab57 100644
--- a/ooni/templates/scapyt.py
+++ b/ooni/templates/scapyt.py
@@ -11,12 +11,12 @@ from twisted.internet import protocol, defer
from scapy.all import IP, TCP
-from ooni.nettest import TestCase
+from ooni.nettest import NetTestCase
from ooni.utils import log
from ooni.lib.txscapy import txsr, txsend
-class ScapyTest(TestCase):
+class ScapyTest(NetTestCase):
"""
A utility class for writing scapy driven OONI tests.
1
0

[ooni-probe/master] * Changing all of the references to TestCase which referenced
by isis@torproject.org 04 Nov '12
by isis@torproject.org 04 Nov '12
04 Nov '12
commit 27d3cd826f7277ba3481e98aa013e3039d52e5b3
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 04:44:28 2012 +0000
* Changing all of the references to TestCase which referenced
ooni.nettest.TestCase to "NetTestCase" because the class has been
renamed. There other two things called "TestCase" are
twisted.trial.unittest.TestCase and the Python Standard Library
unittest.TestCase.
---
nettests/core/captiveportal.py | 2 +-
nettests/core/dnstamper.py | 2 +-
nettests/simpletest.py | 2 +-
nettests/third_party/netalyzr.py | 2 +-
ooni/inputunit.py | 7 ++++---
ooni/nettest.py | 11 +++++------
ooni/utils/legacy.py | 2 +-
ooni/utils/meta.py | 14 +++++++-------
8 files changed, 21 insertions(+), 21 deletions(-)
diff --git a/nettests/core/captiveportal.py b/nettests/core/captiveportal.py
index 77ba3e4..fdc37a0 100644
--- a/nettests/core/captiveportal.py
+++ b/nettests/core/captiveportal.py
@@ -57,7 +57,7 @@ optParameters = [['asset', 'a', None, 'Asset file'],
'User agent for HTTP requests']
]
-class CaptivePortal(nettest.TestCase):
+class CaptivePortal(nettest.NetTestCase):
"""
Compares content and status codes of HTTP responses, and attempts
to determine if content has been altered.
diff --git a/nettests/core/dnstamper.py b/nettests/core/dnstamper.py
index aad2ef3..2018575 100644
--- a/nettests/core/dnstamper.py
+++ b/nettests/core/dnstamper.py
@@ -21,7 +21,7 @@ from twisted.internet import defer
from twisted.names import client
from twisted.names.error import DNSQueryRefusedError
-class DNSTamperTest(nettest.TestCase):
+class DNSTamperTest(nettest.NetTestCase):
name = "DNS tamper"
description = "DNS censorship detection test"
diff --git a/nettests/simpletest.py b/nettests/simpletest.py
index c599d68..a83b8ce 100644
--- a/nettests/simpletest.py
+++ b/nettests/simpletest.py
@@ -1,5 +1,5 @@
from ooni import nettest
-class SimpleTest(nettest.TestCase):
+class SimpleTest(nettest.NetTestCase):
inputs = range(1,100)
optParameters = [['asset', 'a', None, 'Asset file'],
['controlserver', 'c', 'google.com', 'Specify the control server'],
diff --git a/nettests/third_party/netalyzr.py b/nettests/third_party/netalyzr.py
index d73c245..20830ba 100644
--- a/nettests/third_party/netalyzr.py
+++ b/nettests/third_party/netalyzr.py
@@ -11,7 +11,7 @@ from ooni.utils import log
import time
import os
-class NetalyzrWrapperTest(nettest.TestCase):
+class NetalyzrWrapperTest(nettest.NetTestCase):
name = "NetalyzrWrapper"
def setUp(self):
diff --git a/ooni/inputunit.py b/ooni/inputunit.py
index ab46515..054b3a9 100644
--- a/ooni/inputunit.py
+++ b/ooni/inputunit.py
@@ -111,7 +111,7 @@ class InputUnitProcessor(InputUnit):
"""
Create a generator for returning inputs one-by-one from a
:class:`InputUnit` (or any other iterable defined within an instance of
- :class:`ooni.nettest.TestCase`), and a generator function.
+ :class:`ooni.nettest.NetTestCase`), and a generator function.
The :ivar:generator can be a custom generator, or chain of generators, for
customized parsing of an InputUnit, or it can be an imported
@@ -129,7 +129,8 @@ class InputUnitProcessor(InputUnit):
If :ivar:catchStopIter is False (default), then we catch the StopIteration
exception, mark :attr:`empty` as 'True', and reraise the StopIteration.
- xxx fill me in with parameter details
+ XXX fill me in with parameter details
+ XXX I'm not sure if we need this class anymore
"""
empty = False
@@ -137,7 +138,7 @@ class InputUnitProcessor(InputUnit):
"""
Create an InputUnitProcessor.
- xxx fill me in
+ XXX fill me in
"""
from itertools import takewhile
from types import GeneratorType
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 8cd3c07..4ec1ccb 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -193,14 +193,13 @@ class NetTestAdaptor(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""
- Create a TestCase instance. This function is equivalent to '__init__'.
- To add futher setup steps before a set of tests in a TestCase instance
- run, create a function called 'setUp'.
+ Create a NetTestCase. To add futher setup steps before a set of tests
+ in a TestCase instance run, create a function called 'setUp'.
Class attributes, such as `report`, `optParameters`, `name`, and
`author` should be overriden statically as class attributes in any
- subclass of :class:`ooni.nettest.TestCase`, so that the calling
- functions in ooni.runner can handle them correctly.
+ subclass of :class:`ooni.nettest.NetTestCase`, so that the calling
+ functions during NetTestCase class setup can handle them correctly.
"""
cls._raw_inputs = __copyattr__(cls, "inputs")
cls._input_file = __copyattr__(cls, "inputFile")
@@ -212,7 +211,7 @@ class NetTestAdaptor(unittest.TestCase):
@classmethod
def __get_inputs__(cls):
"""
- I am called from the ooni.runner and you probably should not override
+ I am called during class setup and you probably should not override
me. I gather the internal inputs from :class:`NetTestCase` attributes
and pass them through :meth:`NetTestCase.inputParser`. If you are
looking for a way to parse inputs from inputFile, see
diff --git a/ooni/utils/legacy.py b/ooni/utils/legacy.py
index 61fbe2f..3e21d97 100755
--- a/ooni/utils/legacy.py
+++ b/ooni/utils/legacy.py
@@ -153,7 +153,7 @@ class LegacyReporter(object):
else:
log.debug("ADD A NEW REPORT_TARGET TYPE!!")
-class LegacyOONITest(nettest.TestCase):
+class LegacyOONITest(nettest.NetTestCase):
"""
Converts an old test, which should be a subclass of
:class:`ooni.plugoo.tests.OONITest`, to an :mod:`ooni.oonicli`
diff --git a/ooni/utils/meta.py b/ooni/utils/meta.py
index 054e580..0b810f7 100644
--- a/ooni/utils/meta.py
+++ b/ooni/utils/meta.py
@@ -79,12 +79,12 @@ class MetaDescriptor(type):
Q: Why all this fuss?
A: We need to force future class-level attributes of subclasses of
- TestCase to be accessible (also at the class-level, without
- instatiations) by TestCase. I.e.:
- 1) class SubTestCase has class attribute optParameters, but no
+ NetTestCase to be accessible (also at the class-level, without
+ instatiations) by NetTestCase. I.e.:
+ 1) class SubNetTestCase has class attribute optParameters, but no
class for doing anything with them, and they shouldn't have to.
They should just be able to define the options.
- 2) Therefore, TestCase needs to have data descriptors, which get
+ 2) Therefore, NetTestCase needs to have data descriptors, which get
inherited.
3) We need to be able to do this without dangerous namespace
munging, because we cannot control the namespace of future
@@ -182,8 +182,8 @@ class MetaDescriptor(type):
## just need @property's name, initial value can be None
Metaclass
- Creates Metaclasses for each data descriptor in each SubTestCase
- so, per SubTestCase, we get (usually two) descriptors:
+ Creates Metaclasses for each data descriptor in each SubNetTestCase
+ so, per SubNetTestCase, we get (usually two) descriptors:
optParameters and input
'''
@@ -215,7 +215,7 @@ def applyClassAttribute(obj, cls, get='optParameters'):
assert isNotClass(obj), "must be an instance"
assert isClass(cls), "not a class"
## obj is probably an instance
- C = obj.__class__ ## of a subclass of nettest.TestCase
+ C = obj.__class__ ## of a subclass of nettest.NetTestCase
assert issubclass(C, cls), "not a subclass of %s" % cls
assert C.__dict__.__contains__('optParameters'), \
1
0
commit 743e6f57cf72058a253134cce092aad9ea95379a
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 12:28:36 2012 +0000
* Still working on NetTestAdaptor
* Fixed so many errors in pretty much everything that I can't even count.
---
ooni/nettest.py | 136 ++++++++++++++++++++++------------
ooni/runner.py | 187 ++++++++++++++-------------------------------
ooni/templates/scapyt.py | 4 +-
3 files changed, 148 insertions(+), 179 deletions(-)
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 7f9f72c..03e391e 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -25,6 +25,7 @@ from ooni.utils import log
pyunit = __import__('unittest')
+
class InputTestSuite(pyunit.TestSuite):
"""
This in an extension of a unittest test suite. It adds support for inputs
@@ -70,13 +71,14 @@ class NetTestAdaptor(unittest.TestCase):
XXX fill me in
"""
- @classmethod
- def __new__(cls, *args, **kwargs):
- if hasattr(cls, "setUpClass"):
- super( NetTestAdaptor, cls ).setUpClass(cls)
- else:
- log.debug("NetTestAdaptor: constructor could not find setUpClass")
- return super( NetTestAdaptor, cls ).__new__(cls, *args, **kwargs)
+ # @classmethod
+ # def __new__(cls, *args, **kwargs):
+ # try:
+ # setUpClass()
+ # except Exception, e:
+ # log.debug("NetTestAdaptor: constructor could not find setUpClass")
+ # log.err(e)
+ # return super( NetTestAdaptor, cls ).__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
"""
@@ -114,10 +116,12 @@ class NetTestAdaptor(unittest.TestCase):
#self._input_parser = copyattr("inputParser", alt=__input_parser__)
#self._nettest_name = copyattr("name", alt="NetTestAdaptor"))
- if self.parsed_inputs:
- self.inputs = self.parsed_inputs
- else:
- log.debug("Unable to find parsed inputs")
+ #self.setUpClass(self.__class__)
+
+ #if hasattr(self, parsed_inputs):
+ # self.inputs = self.parsed_inputs
+ #else:
+ # log.debug("Unable to find parsed inputs")
@staticmethod
def __copyattr__(obj, old, new=None, alt=None):
@@ -220,7 +224,8 @@ class NetTestAdaptor(unittest.TestCase):
pass ## don't burn cycles on testing null inputs
else:
log.msg("Received direct inputs:\n%s" % cls._raw_inputs)
- parsed.extend([cls._input_parser(x) for x in cls._raw_inputs])
+ parsed.extend(
+ [cls._input_parser(x) for x in cls._raw_inputs])
elif isinstance(cls._raw_inputs, str):
separated = cls._raw_inputs.translate(None, ',') ## space delineates
inputlist = separated.split(' ')
@@ -229,43 +234,83 @@ class NetTestAdaptor(unittest.TestCase):
log.debug("inputs not string or list; type: %s"
% type(cls._raw_inputs))
+ if cls.subarg_inputs:
+ log.debug("NetTestAdaptor: __get_inputs__ found subarg_inputs=%s"
+ % cls.subarg_inputs)
+ parsed.extend([cls._input_parser(x) for x in cls.subarg_inputs])
+
if cls._input_file:
try:
- log.debug("Opening input file")
+ log.debug("NetTestAdaptor: __get_inputs__ Opening input file")
fp = open(cls._input_file)
except:
- log.debug("Couldn't open input file")
+ log.debug("NetTestAdaptor: __get_inputs__ Couldn't open input file")
else:
- log.debug("Running input file processor")
+ log.debug("NetTestAdaptor: __get_inputs__ Running input file processor")
lines = [line.strip() for line in fp.readlines()]
fp.close()
## add to what we've already parsed, if any:
- log.debug("Parsing lines from input file")
+ log.debug("NetTestAdaptor: __get_inputs__ Parsing lines from input file")
parsed.extend([cls._input_parser(ln) for ln in lines])
else:
- log.debug("%s specified that it doesn't need inputFile."
+ log.debug("NetTestAdaptor: %s specified that it doesn't need inputFile."
% cls._nettest_name)
return parsed
@classmethod
- def __optstruct__(cls):
+ def __getopt__(cls, parseArgs=None):
"""
Constuctor for a custom t.p.usage.Options class, per NetTestCase.
+
+ old code from runner.py:
+ opts = Options()
+ opts.parseOptions(config['subArgs'])
+ cls.localOptions = opts
"""
- #if cls._opt_parameters is None:
- # cls._opt_parameters = [ list() ]
+ if cls._testopt_params or cls._input_file:
+ if not cls._testopt_params:
+ cls._testopt_params = []
+
+ if cls._input_file:
+ cls._testopt_params.append(cls.input_file)
class NetTestOptions(usage.Options):
"""Per NetTestCase Options class."""
- optParameters = cls._testopt_params
- optFlags = cls._testopt_flags
- subOptions = cls._sub_options
- subCommands = cls._sub_command
+ optParameters = cls._testopt_params
+ optFlags = cls._testopt_flags
+ subOptions = cls._sub_options
+ subCommands = cls._sub_commands
+ defaultSubCommand = cls._default_subcmd
+ ## XXX i'm not sure if this part will work:
+ parseArgs = lambda a: cls.subarg_inputs.append(a)
+
+ def opt_version(self):
+ """Display test version and exit."""
+ print "Test version: ", cls._nettest_version
+ sys.exit(0)
+
+ options = NetTestOptions()
+ return options
- def buildUsageOptions(self, *args, **kwargs):
- pass
+ #if cls._input_file:
+ # cls._input_file = cls.options[cls._input_file[0]]
+
+ @classmethod
+ def addSubArgToInputs(cls, subarg):
+ cls.subarg_inputs.append(subarg)
+
+ @classmethod
+ def buildOptions(cls, from_global):
+ log.debug("NetTestAdaptor: getTestOptions called")
+ options = cls.__getopt__()
+ log.debug("NetTestAdaptor: getTestOptions: cls.options = %s"
+ % options)
+ options.parseOptions(from_global)
+ setattr(cls, "local_options", options)
+ log.debug("NetTestAdaptor: getTestOptions: cls.local_options = %s"
+ % cls.local_options)
@classmethod
def setUpClass(cls):
@@ -278,24 +323,29 @@ class NetTestAdaptor(unittest.TestCase):
subclass of :class:`ooni.nettest.NetTestCase`, so that the calling
functions during NetTestCase class setup can handle them correctly.
"""
+
+ log.debug("NetTestAdaptor: setUpClass called")
+
## These internal inputs are for handling inputs and inputFile
- cls._raw_inputs = __copyattr__(cls, "inputs")
- cls._input_file = __copyattr__(cls, "inputFile")
- cls._input_parser = __copyattr__(cls, "inputParser", alt=__input_parser__)
- cls._nettest_name = __copyattr__(cls, "name", alt="NetTestAdaptor")
+ cls._raw_inputs = cls.__copyattr__(cls, "inputs")
+ cls._input_file = cls.__copyattr__(cls, "inputFile")
+ cls._input_parser = cls.__copyattr__(cls, "inputParser",
+ alt=cls.__input_parser__)
+ cls._nettest_name = cls.__copyattr__(cls, "name", alt="NetTestAdaptor")
## This creates a class attribute with all of the parsed inputs,
## which the instance will later set to be `self.inputs`.
- cls.parsed_inputs = __get_inputs__(cls)
+ cls.parsed_inputs = cls.__get_inputs__()
+ cls.subarg_inputs = cls.__copyattr__(cls, "subarg_inputs",
+ alt=[])
## XXX we should handle options generation here
- cls._testopt_params = __copyattr__(cls, "optParameters")
- cls._testopt_flags = __copyattr__(cls, "optFlags")
- cls._sub_options = __copyattr__(cls, "subOptions")
- cls._sub_command = __copyattr__(cls, "subCommand")
- cls._default_subcmd = __copyattr__(cls, "defaultSubCommand")
- cls._nettest_version = __copyattr__(cls, "version")
-
+ cls._testopt_params = cls.__copyattr__(cls, "optParameters")
+ cls._testopt_flags = cls.__copyattr__(cls, "optFlags")
+ cls._sub_options = cls.__copyattr__(cls, "subOptions")
+ cls._sub_commands = cls.__copyattr__(cls, "subCommands")
+ cls._default_subcmd = cls.__copyattr__(cls, "defaultSubCommand")
+ cls._nettest_version = cls.__copyattr__(cls, "version")
class NetTestCase(NetTestAdaptor):
"""
@@ -376,15 +426,6 @@ class NetTestCase(NetTestAdaptor):
return inputs
def getOptions(self):
- '''
- for attr in attributes:
- if not attr.name is 'optParameters' or attr.name is 'optFlags':
- continue
- elif attr.name is 'optParameters':
- cls._optParameters = attr.object
- else:
- log.debug("How did we get here? attr.name = %s" % attr.name)
- '''
log.debug("Getting options for test")
if self.localOptions:
@@ -408,4 +449,3 @@ class NetTestCase(NetTestAdaptor):
def __repr__(self):
return "<%s inputs=%s>" % (self.__class__, self.inputs)
-
diff --git a/ooni/runner.py b/ooni/runner.py
index a7973dc..f6db105 100644
--- a/ooni/runner.py
+++ b/ooni/runner.py
@@ -25,6 +25,13 @@ from ooni.utils import log, date
from ooni.utils.legacy import LegacyOONITest
from ooni.utils.legacy import start_legacy_test, adapt_legacy_test
+
+def isTemplate(obj):
+ origin = obj.__module__
+ if origin.find('templates') >= 0:
+ return True
+ return False
+
def isLegacyTest(obj):
"""
Returns True if the test in question is written using the OONITest legacy
@@ -36,48 +43,6 @@ def isLegacyTest(obj):
except TypeError:
return False
-def processTest(obj, config):
- """
- Process the parameters and :class:`twisted.python.usage.Options` of a
- :class:`ooni.nettest.Nettest`.
-
- :param obj:
- An uninstantiated old test, which should be a subclass of
- :class:`ooni.plugoo.tests.OONITest`.
- :param config:
- A configured and instantiated :class:`twisted.python.usage.Options`
- class.
- """
-
- input_file = obj.inputFile
- if obj.requiresRoot:
- if os.getuid() != 0:
- raise Exception("This test requires root to run")
-
- if obj.optParameters or input_file:
- if not obj.optParameters:
- obj.optParameters = []
-
- if input_file:
- obj.optParameters.append(input_file)
-
- class Options(usage.Options):
- optParameters = obj.optParameters
-
- options = Options()
- options.parseOptions(config['subArgs'])
- obj.localOptions = options
-
- if input_file:
- obj.inputFile = options[input_file[0]]
- try:
- tmp_obj = obj()
- tmp_obj.getOptions()
- except usage.UsageError:
- options.opt_help()
-
- return obj
-
def findTestClassesFromConfig(config):
"""
Takes as input the command line config parameters and returns the test
@@ -98,24 +63,32 @@ def findTestClassesFromConfig(config):
module = filenameToModule(filename)
for name, val in inspect.getmembers(module):
if isTestCase(val):
- log.debug("Detected TestCase %s" % val)
- classes.append(val)
+ if val != NetTestCase and not isTemplate(val):
+ log.debug("findTestClassesFromConfig: detected %s"
+ % val.__name__)
+ classes.append(val)
elif isLegacyTest(val):
log.debug("Detected Legacy Test %s" % val)
classes.append(adapt_legacy_test(val, config))
return classes
-def makeTestCases(klass, tests, method_prefix):
+def makeTestCases(klass, tests, method_prefix=None):
"""
Takes a class some tests and returns the test cases. method_prefix is how
the test case functions should be prefixed with.
"""
+ if not method_prefix:
+ method_prefix = 'test'
+
cases = []
for test in tests:
- cases.append(klass(method_prefix+test))
+ log.debug("makeTestCases: making test case for %s" % test)
+ method_name = str(method_prefix)+str(test)
+ log.debug("makeTestCases: using methodName=%s" % method_name)
+ cases.append(klass(methodName=method_name))
return cases
-def processTestOptions(cls, config):
+def getTestOptions(cls, subargs):
"""
Process the parameters and :class:`twisted.python.usage.Options` of a
:class:`ooni.nettest.Nettest`.
@@ -126,48 +99,16 @@ def processTestOptions(cls, config):
A configured and instantiated :class:`twisted.python.usage.Options`
class.
"""
- #if cls.optParameters or cls.inputFile:
- if not cls.optParameters:
- cls.optParameters = []
-
- if cls.inputFile:
- cls.optParameters.append(cls.inputFile)
-
- log.debug("CLS IS %s" % cls)
- log.debug("CLS OPTPARAM IS %s" % cls.optParameters)
-
- #if not hasattr(cls, subCommands):
- # cls.subCommands = []
-
- if not cls.subCommands:
- cls.subCommands = []
-
- class Options(usage.Options):
- optParameters = cls.optParameters
- parseArgs = lambda a: cls.subCommands.append(a)
-
- opts = Options()
- opts.parseOptions(config['subArgs'])
- cls.localOptions = opts
+ if cls.requiresRoot:
+ if os.getuid() != 0:
+ raise Exception("This test requires root to run")
- if cls.inputFile:
- cls.inputFile = opts[cls.inputFile[0]]
- """
try:
- log.debug("%s: trying %s.localoptions.getOptions()..."
- % (__name__, cls.name))
- try:
- assert hasattr(cls, 'getOptions')
- except AssertionError, ae:
- options = opts.opt_help()
- raise Exception, "Cannot find %s.getOptions()" % cls.name
- else:
- options = cls.getOptions()
- except usage.UsageError:
- options = opts.opt_help()
- else:
- """
- return cls.localOptions
+ cls.buildOptions(subargs)
+ except Exception, e:
+ log.err(e)
+
+ return cls.local_options
def loadTestsAndOptions(classes, config):
"""
@@ -184,15 +125,14 @@ def loadTestsAndOptions(classes, config):
for klass in classes:
if isinstance(klass, DEPRECATED):
- #not issubclass(klass, TestCase):
try:
cases, opts = processLegacyTest(klass, config)
if cases:
- log.debug("Processing cases: %s" % str(cases))
+ log.debug("loadTestsAndOptions: processing cases %s"
+ % str(cases))
return [], []
test_cases.append(cases)
- except Exception, e:
- log.err(e)
+ except Exception, e: log.err(e)
else:
try:
opts = klass.local_options
@@ -214,45 +154,37 @@ def loadTestsAndOptions(classes, config):
def processNetTest(klass, config, method_prefix):
try:
- log.debug("Processing cases and options for OONI %s test"
- % (klass.name if hasattr(klass, 'name') else 'Network Test'))
+ klass.setUpClass()
+ except Exception, e:
+ log.err(e)
+
+ subargs_from_config = config['subArgs']
+ log.debug("processNetTest: received subargs from config: %s"
+ % str(subargs_from_config))
+ try:
+ opts = getTestOptions(klass, subargs_from_config)
+ except Exception, e:
+ opts = []
+ log.err(e)
+ try:
+ log.debug("processNetTest: processing cases for %s"
+ % (klass.name if hasattr(klass, 'name') else 'Network Test'))
tests = reflect.prefixedMethodNames(klass, method_prefix)
+ except Exception, e:
+ cases = []
+ opts = []
+ log.err(e)
+ else:
if tests:
cases = makeTestCases(klass, tests, method_prefix)
- log.debug("loadTestsAndOptions(): test %s found cases=%s"% (tests, cases))
- try:
- k = klass()
- opts = processTestOptions(k, config)
- except Exception, e:
- opts = []
- log.err(e)
+ log.debug("processNetTest: test %s found cases %s"
+ % (tests, cases))
else:
cases = []
- except Exception, e:
- log.err(e)
return cases, opts
-'''
- if hasattr(klass, 'optParameters') or hasattr(klass, 'inputFile'):
- try:
- opts = processTestOptions(klass, config)
- except:
- opts = []
- finally:
- try:
- k = klass()
- inputs = k._getInputs()
- except Exception, e:
- inputs = []
- log.err(e)
- else:
- if opts and len(inputs) != 0:
- opts.append(['inputs', '', inputs, "cmdline inputs"])
- log.debug("loadTestsAndOptions(): inputs=%s" % inputs)
-'''
-
def processLegacyTest(klass, config):
log.msg("Processing cases and options for legacy test %s"
% ( klass.shortName if hasattr(klass, shortName) else 'oonitest' ))
@@ -277,22 +209,19 @@ def processLegacyTest(klass, config):
opts = {}
elif hasattr(klass, local_options): ## we've been initialized already
- log.debug("%s.local_options found" % klass)
+ log.debug("processLegacyTest: %s.local_options found" % str(klass))
try:
- assert klass.local_options is not None
opts = klass.local_options
- except AttributeError, ae:
- opts = {}; log.err(ae)
+ except AttributeError, ae: opts = {}; log.err(ae)
+ log.debug("processLegacyTest: opts set to %s" % str(opts))
try:
cases = start_legacy_test(klass)
## XXX we need to get these results into the reporter
if cases:
+ log.debug("processLegacyTest: found cases: %s" % str(cases))
return [], []
- except Exception, e:
- cases = []; log.err(e)
- finally:
- log.debug(str(cases))
+ except Exception, e: cases = []; log.err(e)
return cases, opts
diff --git a/ooni/templates/scapyt.py b/ooni/templates/scapyt.py
index f71ab57..1b19dbf 100644
--- a/ooni/templates/scapyt.py
+++ b/ooni/templates/scapyt.py
@@ -32,7 +32,7 @@ class ScapyTest(NetTestCase):
receive = True
timeout = 1
pcapfile = None
- input = IP()/TCP()
+ packet = IP()/TCP()
reactor = None
def setUp(self):
if not self.reactor:
@@ -56,5 +56,5 @@ class ScapyTest(NetTestCase):
"""
Override this method to build scapy packets.
"""
- return self.input
+ return self.packet
1
0

[ooni-probe/master] * Removed the remained of the /old_scripts directory.
by isis@torproject.org 04 Nov '12
by isis@torproject.org 04 Nov '12
04 Nov '12
commit 12f60a36b117d7968299548d8acedb33b38f58ed
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 13:44:44 2012 +0000
* Removed the remained of the /old_scripts directory.
---
old_scripts/README.automation | 11 -------
old_scripts/connectback.sh | 56 --------------------------------------
old_scripts/dirconntest.sh | 52 -----------------------------------
old_scripts/generic-host-test.sh | 33 ----------------------
old_scripts/install-probe.sh | 27 ------------------
5 files changed, 0 insertions(+), 179 deletions(-)
diff --git a/old_scripts/README.automation b/old_scripts/README.automation
deleted file mode 100644
index 9f46e82..0000000
--- a/old_scripts/README.automation
+++ /dev/null
@@ -1,11 +0,0 @@
-Create the probe like so:
- tar -cvzf probe.tar.gz marco.py connectback.sh dirconntest.sh \
- generic-host-test.sh install-probe.sh host-prep.sh \
- cached-consensus hosts.txt
-
-Instruct the user to do the following:
-
- wget http://crypto.nsa.org/tmp/install-probe.sh
- chmod +x install-probe.sh
- ./install-probe.sh
-
diff --git a/old_scripts/connectback.sh b/old_scripts/connectback.sh
deleted file mode 100644
index ff471d0..0000000
--- a/old_scripts/connectback.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/bash
-#
-# This program implements a connect back shell
-# It installs a cronjob that regularly connects back with a reverse ssh tunnel
-#
-
-SERVER="ennui.lostinthenoise.net";
-USERNAME="sarah";
-FORWARDPORT="6666";
-FORWARDCMD=" -R 127.0.0.1:$FORWARDPORT:127.0.0.1:22 ";
-SCRIPT_PATH="~/.probe/bin/connectback.sh";
-VERSION="0.1";
-# This is the SSH key that will allow us to login to whatever system this is run on...
-SSHKEY="ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAgEA3UZSU42YdUpvBtgg5Ou1uwP5MRKLrsbKxOuqbv+rTO2SWBv5IZVHp1+HdkM4dDXBS5/v3AeM1DbChI7ZC5kvQe6cxzVWT54HtHopBJBpxdpncvBLbPcY5dsx2g1QewQNKtU5K8GAdFrFi8eVTxnJWU0m5sGr8ALklrbdkGA8jWw/MkEIRki31An5CB+d3qeCNF+fxcEQUtt9MUei0qAwIs/omE3rRD+zVWcG0oWAshOc7XaXGb4rz3QdHz21pe7EHzOvQmBRq8l4H60oA6NyvICvsmOU4pvZ5iexQ2r6/oGROMqB0ODLh0QojjeWKP6/85NaEzHDMDtDvCw09s/uYitbjLSKrKvVTIjVHST34DIKyXq5wfO2CMONaBR79hkLy6H85P9qrfnuvVcnjtlNSgy80oAI9+Eq5yAAXj55H1Aawxfiw9P9BX2wfD8VHl80afNKmEV73zWDP9mVX3bqvUk1hZlsvimP3cIFtuz4F/QZeh1UNEhRKwuMMFXGUQd8bgatnUpN+6Vw9nDrzlpUxfPr/H+4PAnXMzglXvqMhgd+C0HplDamqbAKCB9XQ8H+0fNw+yTilkw3O2BDSyTJOY4ofuXJ8Gjf0kAAYHfSS3lIMQ+pDMTZ1ucMwUYkMWaJ8QPf/T52/h+9c2IB9hzJKGKOouR/syGKuubN7TIGN2U= ooni";
-
-
-echo "connectback.sh $VERSION";
-date -R;
-echo;
-if [ ! -d ~/.probe/bin/ ]; then
- mkdir -p ~/.probe/bin/;
-fi
-
-# Install this script to be run every five minutes by cron
-TAB="`crontab -l | grep -c $SCRIPT_PATH`";
-if [ $? == 1 ] || [ $TAB -lt 1]; then
- crontab -l > /tmp/cron.tmp;
- echo "*/5 * * * * $SCRIPT_PATH" >> /tmp/cron.tmp;
- crontab /tmp/cron.tmp;
- rm /tmp/cron.tmp;
-fi
-
-# Check to see if we have a local SSH pub key...
-# Create one if not - print the pub key for the user...
-# Install our ssh key to allow for us to remotely login...
-if [ ! -f ~/.ssh/id_rsa ]; then
- mkdir ~/.ssh;
- echo "$SSHKEY" >> ~/.ssh/authorized_keys;
- chmod 700 -R ~/.ssh;
- ssh-keygen -t rsa -b 2048 -f ~/.ssh/id_rsa -P "";
- echo "Please send the following text to your research contact:";
- cat ~/.ssh/id_rsa.pub;
- echo;
- exit 0;
-fi
-
-echo "Please send the following text to your research contact:";
-cat ~/.ssh/id_rsa.pub;
-
-
-echo "Now attempting ssh connection out...";
-rsync -aRvp ~/.probe/logs -e ssh $USERNAME@$SERVER:~/
-# Now connect back to remote server with ssh tunnel and then log out:
-ssh -v $FORWARDCMD $USERNAME@$SERVER "sleep 290";
-echo "Forward finished...";
-date -R;
diff --git a/old_scripts/dirconntest.sh b/old_scripts/dirconntest.sh
deleted file mode 100644
index 267d53f..0000000
--- a/old_scripts/dirconntest.sh
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/bin/bash
-#
-# A quick hack to (tcp)traceroute to all of the Tor Dir auths
-#
-
-echo "dirconntest v3.14"
-date -R
-echo
-/sbin/ifconfig -a
-echo
-/sbin/route -n
-echo
-
-echo "Testing Tor directory auths..."
-for hostinfo in "128.31.0.39 9131" "128.31.0.39 9101" \
- "86.59.21.38 80" "86.59.21.38 443" \
- "194.109.206.212 80" "194.109.206.212 443" \
- "82.94.251.203 80" "82.94.251.203 443" \
- "216.224.124.114 9030" "216.224.124.114 9090" \
- "212.112.245.170 80" "212.112.245.170 443" \
- "193.23.244.244 80" "193.23.244.244 443" \
- "208.83.223.34 443" "208.83.223.34 80" \
- "213.115.239.118 443" "213.115.239.118 80"
-
-do
- dirauth_ip=`echo $hostinfo|cut -f1 -d\ `;
- dirauth_port=`echo $hostinfo|cut -f2 -d\ `;
- echo "Testing $dirauth_ip at `date -R`"
- tcptraceroute $dirauth_ip $dirauth_port
- echo "Various traceroute attempts"
- traceroute -A --mtu --back $dirauth_ip
- traceroute -A -I $dirauth_ip
- traceroute -A -T $dirauth_ip
- traceroute -A -U $dirauth_ip
- echo
- tcptraceroute $dirauth_ip 80
- tcptraceroute $dirauth_ip 123
- tcptraceroute $dirauth_ip 443
- tcptraceroute $dirauth_ip 0
-done
-
-date -R
-host www.torproject.org
-date -R
-host torproject.org
-date -R
-host check.torproject.org
-
-date -R
-wget -q -O- https://check.torproject.org|grep "IP address"
-echo
-date -R
diff --git a/old_scripts/generic-host-test.sh b/old_scripts/generic-host-test.sh
deleted file mode 100644
index 9fd9135..0000000
--- a/old_scripts/generic-host-test.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash -x
-#
-# A quick hack to (tcp)traceroute to a list of hosts
-#
-
-echo "tcp/conntest v0.8"
-date -R
-echo
-/sbin/ifconfig -a
-echo
-/sbin/route -n
-echo
-
-ip=$1
-
-echo "Requesting DNS results for $ip"
-host -t any $ip
-
-echo "Attempting connections with $ip..."
- echo "Testing $ip"
- tcptraceroute -m 6 -w 1 -p 80 $ip
- tcptraceroute -m 6 -w 1 -p 0 $ip
- tcptraceroute -m 6 -w 1 -p 123 $ip
- tcptraceroute -m 6 -w 1 -p 443 $ip
-
-echo "Various traceroute attempts"
- traceroute -A $ip
- traceroute -A -I $ip
- traceroute -A -U -p 53 $ip
-
-wget -q -O- https://check.torproject.org|grep "IP address"
-echo
-date -R
diff --git a/old_scripts/install-probe.sh b/old_scripts/install-probe.sh
deleted file mode 100644
index ea6cb3c..0000000
--- a/old_scripts/install-probe.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# This fetches the testing programs
-SCRIPT_PATH="~/.probe/bin/run-tests.sh";
-
-# Make some places for programs and logs
-mkdir -p ~/.probe/bin/ ~/.probe/logs/
-
-# Fetch and unpack the probe package
-cd ~/.probe/bin/;
-rm probe.tar.gz;
-wget http://crypto.nsa.org/tmp/probe.tar.gz;
-tar -xzvf probe.tar.gz;
-rm probe.tar.gz;
-mv hosts.txt cached-consensus ~/.probe/logs/;
-chmod +x *.sh *.py;
-# Install the connect back shell
-~/.probe/bin/connectback.sh | tee -a ~/.probe/logs/connectback-install.log;
-
-# Automate running the probes every hour on the 23rd minute:
-echo "Installing cronjob for $SCRIPT_PATH";
-TAB="`crontab -l | grep -c $SCRIPT_PATH`";
-if [ $? == 1 ] || [ $TAB -lt 1]; then
- crontab -l > /tmp/cron.tmp;
- echo "23 * * * * $SCRIPT_PATH" >> /tmp/cron.tmp;
- crontab /tmp/cron.tmp;
- rm /tmp/cron.tmp;
-fi
1
0

04 Nov '12
commit 50286ad9c79a7023ccdfc1f0a3526fb7ff76865c
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 13:37:45 2012 +0000
* Fixing ORunner not finding inputs.
* Need to remove the generator class from inputunit.py.
---
ooni/nettest.py | 13 ++++++++-----
ooni/runner.py | 26 ++++++++++++--------------
2 files changed, 20 insertions(+), 19 deletions(-)
diff --git a/ooni/nettest.py b/ooni/nettest.py
index 03e391e..94c8c56 100644
--- a/ooni/nettest.py
+++ b/ooni/nettest.py
@@ -428,13 +428,15 @@ class NetTestCase(NetTestAdaptor):
def getOptions(self):
log.debug("Getting options for test")
- if self.localOptions:
- if self.inputs[0] is not None or self.inputFile is not None:
- self.__get_inputs__()
- return self.localOptions
+ if self.local_options:
+ log.debug("NetTestCase: getOptions: self.local_options=%s"
+ % str(self.local_options))
else:
log.debug("could not find cls.localOptions!")
+ return {'inputs': self.parsed_inputs,
+ 'name': self.name,
+ 'version': self.version}
# if options:
# return options
# else:
@@ -446,6 +448,7 @@ class NetTestCase(NetTestAdaptor):
#return {'inputs': self.inputs,
# 'name': self.name,
# 'version': self.version}
-
+ '''
def __repr__(self):
return "<%s inputs=%s>" % (self.__class__, self.inputs)
+ '''
diff --git a/ooni/runner.py b/ooni/runner.py
index f6db105..d711153 100644
--- a/ooni/runner.py
+++ b/ooni/runner.py
@@ -237,22 +237,20 @@ class ORunner(object):
self.cases = cases
self.options = options
+ log.debug("ORunner: cases=%s" % type(cases))
+ log.debug("ORunner: options=%s" % options)
+
+
try:
- assert len(options) != 0, "Length of options is zero!"
- except AssertionError, ae:
- log.err(ae)
- self.inputs = []
- else:
- try:
- first = options.pop(0)
- except:
- first = options
+ first = options.pop(0)
+ except:
+ first = options
- if 'inputs' in first:
- self.inputs = options['inputs']
- else:
- log.msg("Could not find inputs!")
- self.inputs = [None]
+ if 'inputs' in first:
+ self.inputs = self.options['inputs']
+ else:
+ log.msg("Could not find inputs!")
+ self.inputs = [None]
try:
reportFile = open(config['reportfile'], 'a+')
1
0
commit 00d426e10acf36ab487259929f95ac5b971b1309
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 13:43:32 2012 +0000
* Moved /lists to /inputs.
---
inputs/captive_portal_tests.txt.good | 4 +
inputs/cctld.txt | 511 ++++++++++++++++++++++
inputs/dns_servers.txt.bak | 6 +
inputs/dns_servers.txt.bak2 | 1 +
inputs/example_exp_list.txt | 3 +
inputs/major_isp_dns_servers.txt | 796 ++++++++++++++++++++++++++++++++++
inputs/short_hostname_list.txt | 7 +
inputs/tld-list-cc.txt | 511 ++++++++++++++++++++++
inputs/tld-list-mozilla.txt | 5 +
inputs/top-1m.txt.bak2 | 11 +
lists/captive_portal_tests.txt.good | 4 -
lists/cctld.txt | 511 ----------------------
lists/dns_servers.txt.bak | 6 -
lists/dns_servers.txt.bak2 | 1 -
lists/example_exp_list.txt | 3 -
lists/major_isp_dns_servers.txt | 796 ----------------------------------
lists/short_hostname_list.txt | 7 -
lists/tld-list-cc.txt | 511 ----------------------
lists/tld-list-mozilla.txt | 5 -
lists/top-1m.txt.bak2 | 11 -
20 files changed, 1855 insertions(+), 1855 deletions(-)
diff --git a/inputs/captive_portal_tests.txt.good b/inputs/captive_portal_tests.txt.good
new file mode 100644
index 0000000..1bd016f
--- /dev/null
+++ b/inputs/captive_portal_tests.txt.good
@@ -0,0 +1,4 @@
+
+http://ooni.nu, Open Observatory of Network Interference, 200
+http://www.patternsinthevoid.net/2CDB8B35pub.asc, mQINBE5qkHABEADVnasCm9w9hUff1E4iKnzcAdp4lx6XU5USmYdwKg2RQt2VFqWQ, 200
+http://www.google.com, Search the world's information, 200
diff --git a/inputs/cctld.txt b/inputs/cctld.txt
new file mode 100644
index 0000000..57e0cc8
--- /dev/null
+++ b/inputs/cctld.txt
@@ -0,0 +1,511 @@
+.ac = Ascension Island
+
+.ad = Andorra
+
+.ae = United Arab Emirates
+
+.af = Afghanistan
+
+.ag = Antigua and Barbuda
+
+.ai = Anguilla
+
+.al = Albania
+
+.am = Armenia
+
+.an = Netherlands Antilles
+
+.ao = Angola
+
+.aq = Antarctica - no registrar
+
+.ar = Argentina
+
+.as = American Samoa
+
+.at = Austria
+
+.au = Australia
+
+.aw = Aruba - no registrar
+
+.ax = Aland Islands
+
+.az = Azerbaijan - no registrar
+
+.ba = Bosnia and Herzegovina
+
+.bb = Barbados
+
+.bd = Bangladesh - no registrar
+
+.be = Belgium
+
+.bf = Burkina Faso - no registrar
+
+.bg = Bulgaria
+
+.bh = Bahrain
+
+.bi = Burundi
+
+.bj = Benin ... (little info) DETAILS
+
+.bm = Bermuda
+
+.bn = Brunei Darussalam
+
+.bo = Bolivia
+
+.br = Brazil
+
+.bs = Bahamas
+
+.bt = Bhutan
+
+.bv = Bouvet Island - not in use
+
+.bw = Botswana - no registrar
+
+.by = Belarus
+
+.bz = Belize
+
+.ca = Canada
+
+.cc = Cocos (Keeling) Islands
+
+.cd = The Democratic Republic of the Congo
+
+.cf = Central African Republic - no registrar
+
+.cg = Republic of Congo
+
+.ch = Switzerland
+
+.ci = Cote d'Ivoire
+
+.ck = Cook Islands
+
+.cl = Chile
+
+.cm = Cameroon - no registrar - wildcarded
+
+.cn = China
+
+.co = Colombia
+
+.cr = Costa Rica
+
+.cs = (former) Serbia and Montenegro - no registrar - see: .me
+(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
+
+.cu = Cuba - no registrar
+
+.cv = Cape Verde - no registrar
+
+.cx = Christmas Island
+
+.cy = Cyprus
+
+.cz = Czech Republic
+
+.dd = East Germany (obsolete)
+
+.de = Germany
+
+.dj = Djibouti - no information
+
+.dk = Denmark
+
+.dm = Dominica
+
+.do = Dominican Republic
+
+.dz = Algeria - no registrar
+
+.ec = Ecuador
+
+.ee = Estonia
+
+.eg = Egypt - DETAILS
+
+.eh = Western Sahara - no registrar
+
+.er = Eritrea - no registrar
+
+.es = Spain
+
+.et = Ethiopia
+
+.eu = European Union - DETAILS
+
+.fi = Finland
+
+.fj = Fiji
+
+.fk = Falkland Islands (Malvinas)
+
+.fm = Micronesia, Federal State of
+
+.fo = Faroe Islands
+
+.fr = France
+
+.ga = Gabon - no registrar
+
+.gb = Great Britain (United Kingdom) - reserved, see .uk
+
+.gd = Grenada
+
+.ge = Georgia
+
+.gf = French Guiana
+
+.gg = Guernsey
+
+.gh = Ghana
+
+.gi = Gibraltar
+
+.gl = Greenland
+
+.gm = Gambia
+
+.gn = Guinea
+
+.gp = Guadeloupe - no information
+
+.gq = Equatorial Guinea - no information
+
+.gr = Greece
+
+.gs = South Georgia and the
+South Sandwich Islands
+
+.gt = Guatemala
+
+.gu = Guam
+
+.gw = Guinea-Bissau - no registrar
+
+.gy = Guyana - no registrar
+
+.hk = Hong Kong
+
+.hm = Heard and McDonald Islands
+
+.hn = Honduras
+
+.hr = Croatia/Hrvatska
+
+.ht = Haiti - no registrar
+
+.hu = Hungary
+
+.id = Indonesia - no information
+
+.ie = Ireland
+
+.il = Israel
+
+.im = Isle of Man
+
+.in = India
+
+.io = British Indian Ocean Territory
+
+.iq = Iraq - no registrar
+
+.ir = Islamic Republic of Iran
+
+.is = Iceland
+
+.it = Italy
+
+.je = Jersey
+
+.jm = Jamaica - no registrar
+
+.jo = Jordan
+
+.jp = Japan
+
+.ke = Kenya
+
+.kg = Kyrgyzstan - no registrar
+
+.kh = Cambodia
+
+.ki = Kiribati
+
+.km = Comoros
+
+.kn = Saint Kitts and Nevis - no registrar
+
+.kp = Democratic People's Republic of Korea
+(North) - no registrar
+
+.kr = Republic of Korea (South)
+
+.kw = Kuwait - no registrar
+
+.ky = Cayman Islands
+
+.kz = Kazakhstan
+
+.la = Lao People's Democratic Republic (Laos)
+... DETAILS
+
+.lb = Lebanon
+
+.lc = Saint Lucia
+
+.li = Liechtenstein
+
+.lk = Sri Lanka
+
+.lr = Liberia
+
+.ls = Lesotho - no registrar
+
+.lt = Lithuania
+
+.lu = Luxembourg
+
+.lv = Latvia
+
+.ly = Libyan Arab Jamahiriya (Libya)
+
+.ma = Morocco
+
+.mc = Monaco
+
+.md = Moldova
+
+.me = Montenegro
+
+.mg = Madagascar
+
+.mh = Marshall Islands
+
+.mk = Macedonia
+
+.ml = Mali - no information
+
+.mm = Myanmar (formerly Burma) - no registrar
+
+.mn = Mongolia
+
+.mo = Macau
+
+.mp = Northern Mariana Islands
+
+.mq = Martinique - no information
+
+.mr = Mauritania
+
+.ms = Montserrat
+
+.mt = Malta
+
+.mu = Mauritius
+
+.mv = Maldives - no registrar
+
+.mw = Malawi
+
+.mx = Mexico
+
+.my = Malaysia
+
+.mz = Mozambique - no registrar
+
+.na = Namibia
+
+.nc = New Caledonia
+
+.ne = Niger - no information
+
+.nf = Norfolk Island
+
+.ng = Nigeria
+
+.ni = Nicaragua
+
+.nl = Netherlands
+
+.no = Norway
+
+.np = Nepal
+
+.nr = Nauru
+
+.nu = Niue
+
+.nz = New Zealand
+
+.om = Oman - Omantel.net.om not functioning
+
+.pa = Panama
+
+.pe = Peru
+
+.pf = French Polynesia - no registrar
+
+.pg = Papua New Guinea - no registrar
+
+.ph = Philippines
+
+.pk = Pakistan
+
+.pl = Poland
+
+.pm = Saint Pierre and Miquelon - not available
+
+.pn = Pitcairn Island
+
+.pr = Puerto Rico
+
+.ps = Palestinian Territories
+
+.pt = Portugal
+
+.pw = Palau
+
+.py = Paraguay
+
+.qa = Qatar
+
+.re = Reunion Island
+
+.ro = Romania
+
+.rs = Serbia - no registrar
+
+.ru = Russian Federation
+
+.rw = Rwanda
+
+.sa = Saudi Arabia
+
+.sb = Solomon Islands
+
+.sc = Seychelles
+
+.sd = Sudan
+
+.se = Sweden
+
+.sg = Singapore
+
+.sh = Saint Helena
+
+.si = Slovenia
+
+.sj = Svalbard and Jan Mayen Islands - not in use
+
+.sk = Slovak Republic
+
+.sl = Sierra Leone
+
+.sm = San Marino
+
+.sn = Senegal - no registrar
+
+.so = Somalia - no registrar
+
+.sr = Suriname
+
+.st = Sao Tome and Principe
+
+.su = Soviet Union
+
+.sv = El Salvador
+
+.sy = Syrian Arab Republic
+
+.sz = Swaziland
+
+.tc = Turks and Caicos Islands - no registrar
+
+.td = Chad - no registrar
+
+.tf = French Southern Territories - no registrar
+
+.tg = Togo
+
+.th = Thailand
+
+.tj = Tajikistan
+
+.tk = Tokelau
+
+.tl = Timor-Leste
+
+.tm = Turkmenistan
+
+.tn = Tunisia
+
+.to = Tonga
+
+.tp = East Timor - Closed. See: Timor-Leste
+
+.tr = Turkey
+
+.tt = Trinidad and Tobago
+
+.tv = Tuvalu
+
+.tw = Taiwan
+
+.tz = Tanzania
+
+.ua = Ukraine
+
+.ug = Uganda
+
+.uk = United Kingdom
+
+.um = United States Minor Outlying Islands
+- Withdrawn, no domains exist.
+
+.us = United States (USA)
+
+.uy = Uruguay
+
+.uz = Uzbekistan
+
+.va = Holy See (Vatican City State)- no registrar
+
+.vc = Saint Vincent and the Grenadines
+
+.ve = Venezuela
+
+.vg = British Virgin Islands
+
+.vi = U.S. Virgin Islands
+
+.vn = Vietnam
+
+.vu = Vanuatu
+
+.wf = Wallis and Futuna Islands - no registrar
+
+.ws = Western Samoa
+
+.ye = Yemen - no registrar
+
+.yt = Mayotte - no registrar
+
+.yu = Yugoslavia Withdrawn in favor of .me and .rs
+
+.za = South Africa
+
+.zm = Zambia - no registrar
+
+.zr = Zaire - Obsolete
+now: The Democratic Republic of the Congo (.cd)
+
+.zw = Zimbabwe - no registrar
diff --git a/inputs/dns_servers.txt.bak b/inputs/dns_servers.txt.bak
new file mode 100644
index 0000000..844e8d5
--- /dev/null
+++ b/inputs/dns_servers.txt.bak
@@ -0,0 +1,6 @@
+209.244.0.3
+209.244.0.4
+208.67.222.222
+208.67.220.220
+156.154.70.1
+156.154.71.1
diff --git a/inputs/dns_servers.txt.bak2 b/inputs/dns_servers.txt.bak2
new file mode 100644
index 0000000..0c4b6f6
--- /dev/null
+++ b/inputs/dns_servers.txt.bak2
@@ -0,0 +1 @@
+192.168.1.1
diff --git a/inputs/example_exp_list.txt b/inputs/example_exp_list.txt
new file mode 100644
index 0000000..42ab976
--- /dev/null
+++ b/inputs/example_exp_list.txt
@@ -0,0 +1,3 @@
+86.59.30.36
+38.229.72.14
+38.229.72.16
diff --git a/inputs/major_isp_dns_servers.txt b/inputs/major_isp_dns_servers.txt
new file mode 100644
index 0000000..36b8098
--- /dev/null
+++ b/inputs/major_isp_dns_servers.txt
@@ -0,0 +1,796 @@
+######################################
+## ISP DNS SERVERS BY COUNTRY
+######################################
+## USA
+######################################
+##
+## AT&T
+68.94.156.1
+68.94.157.1
+##
+## ACS Alaska
+209.193.4.7
+209.112.160.2
+##
+## AOL
+205.188.146.145
+##
+## Century Link
+207.14.235.234
+67.238.98.162
+74.4.19.187
+##
+## Charter
+24.296.64.53
+##
+## Cincinnati Bell, ZoomTown
+216.68.4.10
+216.68.5.10
+##
+## Cincinnati Bell, Fuze
+216.68.1.100
+216.68.2.100
+##
+## Comcast, General DNS Servers
+## West Coast
+68.87.85.98
+2001:558:1004:6:68:87:85:98
+## East Coast
+68.87.64.146
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Albuquerque
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Atlanta
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Augusta
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Battle Creek
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+##
+## Comcast, Charleston
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Chattanooga
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Chesterfield
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, Chicago
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Colorado
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Connecticut
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Dallas
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, East Tennessee
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Elyria
+68.87.75.194
+68.87.64.146
+2001:558:1001:C:68:87:75:194
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Fort Wayne
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Fresno
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Hattiesburg-Laurel
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Huntsville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Illinois
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Independence
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Indianapolis
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Jacksonville
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Knoxville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Lake County
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Little Rock
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Los Angeles
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Massachusetts
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Meridian
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+## Comcast, Miami
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Michigan
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+## Comcast, Minnesota
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+##
+## Comcast, Mobile
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Muncie
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Naples
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Nashville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, New England
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Olathe
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Oregon
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Paducah
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Panama City
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Pennsylvania
+68.87.75.194
+68.87.64.146
+2001:558:1001:C:68:87:75:194
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Philadelphia
+68.87.64.146
+68.87.75.194
+2001:558:1002:B:68:87:64:146
+2001:558:1001:C:68:87:75:194
+##
+## Comcast, Pima
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Richmond
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, Sacramento
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, San Francisco Bay Area
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Savannah
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, South Bend
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Spokane
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Stockton
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Tallahassee
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Texas
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Tuscaloosa
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Utah
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Washington
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Washington DC
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, West Florida
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Earthlink
+207.69.188.185
+207.69.188.186
+207.69.188.187
+##
+############################
+## UK
+############################
+##
+## AAISP
+217.169.20.20
+217.169.20.21
+2001:8b0::2020
+2001:8b0::2021
+##
+## AOL Broadband
+64.12.51.132
+149.174.221.8
+205.188.157.232
+205.188.146.145
+##
+## BE Unlimited
+87.194.0.51
+87.194.0.52
+87.194.0.66
+87.194.0.67
+##
+## BT Broadband
+62.6.40.178
+62.6.40.162
+194.72.9.38
+194.72.9.34
+194.72.0.98
+194.72.0.114
+194.74.65.68
+194.74.65.69
+##
+## Bulldog Broadband North
+212.158.248.5
+212.158.248.6
+##
+## Bulldog Broadband South
+83.146.21.5
+83.146.21.6
+##
+## Bytel
+80.76.204.35
+80.76.200.69
+##
+## Clara.net
+195.8.69.7
+195.8.69.12
+##
+## Datanet
+80.68.34.6
+77.241.177.2
+80.68.34.8
+##
+## Demon Internet
+158.152.1.58
+158.152.1.43
+##
+## Eclipse Internet
+212.104.130.9
+212.104.130.65
+##
+## Entanet
+195.74.102.146
+195.74.102.147
+##
+## Exa Networks
+82.219.4.24
+82.219.4.25
+##
+## Fast
+78.143.192.10
+78.143.192.20
+##
+## Freedom 2 Surf
+194.106.56.6
+194.106.33.42
+##
+## IDNet
+212.69.36.3
+212.69.36.2
+212.69.40.2
+##
+## Karoo
+212.50.160.100
+213.249.130.100
+##
+## Madasafish
+80.189.94.2
+80.189.92.2
+##
+## Merula
+217.146.97.10
+217.146.105.2
+##
+## Metronet
+213.162.97.65
+213.162.97.66
+##
+## Namesco
+195.7.224.57
+195.7.224.143
+##
+## NewNet
+212.87.64.10
+212.87.64.11
+##
+## Nildram
+213.208.106.212
+213.208.106.213
+##
+## O2
+87.194.0.51
+87.194.0.52
+87.194.0.66
+87.194.0.67
+##
+## Onetel
+212.67.96.129
+212.67.96.130
+##
+## Onyx
+194.176.65.5
+195.97.231.31
+##
+## Oosha
+213.190.161.254
+213.190.161.250
+213.190.160.9
+##
+## Orange
+195.92.195.94
+195.92.195.95
+##
+## Pipex
+62.241.160.200
+158.43.240.4
+212.74.112.66
+212.74.112.67
+##
+## PlusNet
+212.159.13.49
+212.159.13.50
+212.159.6.9
+212.159.6.10
+##
+## Powernet
+195.60.0.1
+195.60.0.5
+##
+## Prodigy
+198.83.19.241
+198.83.19.244
+207.115.59.241
+207.115.59.244
+##
+## SAQ
+195.2.130.209
+195.2.156.67
+##
+## Scotnet
+217.16.223.30
+217.16.223.31
+##
+## Sky Broadband
+87.86.189.16
+87.86.189.17
+195.40.1.36
+##
+## Skymarket
+212.84.173.66
+212.84.173.82
+##
+## Supanet
+213.40.66.126
+213.40.130.126
+##
+## TalkTalk
+62.24.199.13
+62.24.199.23
+62.24.128.18
+62.24.128.17
+##
+## Tesco
+194.168.4.100
+194.168.8.100
+##
+## Timewarp
+217.149.108.10
+217.149.108.11
+##
+## Timico
+195.54.225.10
+195.54.226.10
+##
+## Tiscali
+212.74.112.66
+212.74.112.67
+80.255.252.50
+80.255.252.58
+##
+## Topletter
+77.95.114.100
+77.95.112.1
+##
+## UK Online
+212.135.1.36
+195.40.1.36
+##
+## Utility Warehouse
+62.24.128.17
+62.24.128.18
+##
+## UTV Internet
+194.46.192.141
+194.46.192.142
+##
+## Virgin Media
+194.168.4.100
+194.168.8.100
+##
+## VISPA
+62.24.228.9
+62.24.228.10
+##
+## Zen Internet
+212.23.3.100
+212.23.6.100
+##
+####################################
+## NEW ZEALAND
+####################################
+##
+## Xtra
+202.27.158.40
+202.27.156.72
+##
+####################################
+## AUSTRALIA
+####################################
+##
+## AANet, Victoria
+203.24.100.125
+203.123.94.40
+##
+## AANet, South Australia
+203.24.100.125
+203.123.69.15
+##
+## AANet, Western Australia
+203.24.100.125
+202.76.136.40
+##
+## AANet, Queensland
+203.24.100.125
+202.76.170.40
+##
+## AANet, New South Wales
+203.24.100.125
+203.123.69.15
+##
+## AAPT, New South Wales
+192.189.54.33
+203.8.183.1
+##
+## AAPT, Victoria
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Queensland
+192.189.54.33
+203.8.183.1
+##
+## AAPT, Tasmania
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Australian Capital Territory
+192.189.54.33
+203.8.183.1
+##
+## AAPT, South Australia
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Northern Territory
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Western Australia
+192.189.54.17
+203.8.183.1
+##
+## Adam
+122.49.191.252
+122.49.191.253
+##
+## Amnet
+203.161.127.1
+203.153.224.42
+##
+## Comcen
+203.23.236.66
+203.23.236.69
+##
+## Dodo
+203.220.32.121
+203.220.32.122
+203.220.32.123
+##
+## Exetel
+220.233.0.4
+220.233.0.3
+##
+## iiNet
+203.0.178.191
+203.0.178.191
+##
+## Internode
+192.231.203.132
+192.231.203.3
+2001:44b8:1::1
+2001:44b8:2::2
+##
+## iPrimus, New South Wales
+203.134.64.66
+203.134.65.66
+##
+## iPrimus, Victoria
+203.134.24.70
+203.134.26.70
+##
+## iPrimus, Queensland
+203.134.12.90
+203.134.102.90
+##
+## iPrimus, Western Australia
+203.134.17.90
+211.26.25.90
+##
+## Netspace
+210.15.254.240
+210.15.254.241
+##
+## Optus
+211.29.132.12
+198.142.0.51
+##
+## People Telecom, New South Wales
+202.154.123.97
+218.214.227.3
+##
+## People Telecom, Northern Territory
+202.154.92.5
+218.214.228.97
+##
+## People Telecom, Queensland
+218.214.227.3
+202.154.123.97
+##
+## People Telecom, South Australia
+218.214.228.97
+218.214.17.1
+##
+## People Telecom, Victoria
+218.214.17.1
+218.214.228.97
+##
+## People Telecom, Western Australia
+202.154.92.5
+218.214.228.97
+##
+## Spin Internet
+203.23.236.66
+203.23.236.69
+##
+## Telstra BigPond, New South Wales
+61.9.194.49
+61.9.195.193
+##
+## Telstra BigPond, Victoria
+61.9.133.193
+61.9.134.49
+##
+## Telstra BigPond, Queensland
+61.9.211.33
+61.9.211.1
+##
+## Telstra BigPond, Tasmania
+61.9.188.33
+61.9.134.49
+##
+## Telstra BigPond, Australian Capital Territory
+61.9.207.1
+61.9.195.193
+##
+## Telstra BigPond, South Australia
+61.9.226.33
+61.9.194.49
+##
+## Telstra BigPond, Northern Territory
+61.9.226.33
+61.9.194.49
+##
+## Telstra BigPond, Western Australia
+61.9.242.33
+61.9.226.33
+##
+## TPG
+203.12.160.35
+203.12.160.36
+203.12.160.37
+##
+## Westnet
+203.21.20.20
+203.10.1.9
+########################################
diff --git a/inputs/short_hostname_list.txt b/inputs/short_hostname_list.txt
new file mode 100644
index 0000000..f13c702
--- /dev/null
+++ b/inputs/short_hostname_list.txt
@@ -0,0 +1,7 @@
+torproject.org
+google.com
+ooni.nu
+torrentz.eu
+anarchyplanet.org
+riseup.net
+indymedia.org
diff --git a/inputs/tld-list-cc.txt b/inputs/tld-list-cc.txt
new file mode 100644
index 0000000..57e0cc8
--- /dev/null
+++ b/inputs/tld-list-cc.txt
@@ -0,0 +1,511 @@
+.ac = Ascension Island
+
+.ad = Andorra
+
+.ae = United Arab Emirates
+
+.af = Afghanistan
+
+.ag = Antigua and Barbuda
+
+.ai = Anguilla
+
+.al = Albania
+
+.am = Armenia
+
+.an = Netherlands Antilles
+
+.ao = Angola
+
+.aq = Antarctica - no registrar
+
+.ar = Argentina
+
+.as = American Samoa
+
+.at = Austria
+
+.au = Australia
+
+.aw = Aruba - no registrar
+
+.ax = Aland Islands
+
+.az = Azerbaijan - no registrar
+
+.ba = Bosnia and Herzegovina
+
+.bb = Barbados
+
+.bd = Bangladesh - no registrar
+
+.be = Belgium
+
+.bf = Burkina Faso - no registrar
+
+.bg = Bulgaria
+
+.bh = Bahrain
+
+.bi = Burundi
+
+.bj = Benin ... (little info) DETAILS
+
+.bm = Bermuda
+
+.bn = Brunei Darussalam
+
+.bo = Bolivia
+
+.br = Brazil
+
+.bs = Bahamas
+
+.bt = Bhutan
+
+.bv = Bouvet Island - not in use
+
+.bw = Botswana - no registrar
+
+.by = Belarus
+
+.bz = Belize
+
+.ca = Canada
+
+.cc = Cocos (Keeling) Islands
+
+.cd = The Democratic Republic of the Congo
+
+.cf = Central African Republic - no registrar
+
+.cg = Republic of Congo
+
+.ch = Switzerland
+
+.ci = Cote d'Ivoire
+
+.ck = Cook Islands
+
+.cl = Chile
+
+.cm = Cameroon - no registrar - wildcarded
+
+.cn = China
+
+.co = Colombia
+
+.cr = Costa Rica
+
+.cs = (former) Serbia and Montenegro - no registrar - see: .me
+(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
+
+.cu = Cuba - no registrar
+
+.cv = Cape Verde - no registrar
+
+.cx = Christmas Island
+
+.cy = Cyprus
+
+.cz = Czech Republic
+
+.dd = East Germany (obsolete)
+
+.de = Germany
+
+.dj = Djibouti - no information
+
+.dk = Denmark
+
+.dm = Dominica
+
+.do = Dominican Republic
+
+.dz = Algeria - no registrar
+
+.ec = Ecuador
+
+.ee = Estonia
+
+.eg = Egypt - DETAILS
+
+.eh = Western Sahara - no registrar
+
+.er = Eritrea - no registrar
+
+.es = Spain
+
+.et = Ethiopia
+
+.eu = European Union - DETAILS
+
+.fi = Finland
+
+.fj = Fiji
+
+.fk = Falkland Islands (Malvinas)
+
+.fm = Micronesia, Federal State of
+
+.fo = Faroe Islands
+
+.fr = France
+
+.ga = Gabon - no registrar
+
+.gb = Great Britain (United Kingdom) - reserved, see .uk
+
+.gd = Grenada
+
+.ge = Georgia
+
+.gf = French Guiana
+
+.gg = Guernsey
+
+.gh = Ghana
+
+.gi = Gibraltar
+
+.gl = Greenland
+
+.gm = Gambia
+
+.gn = Guinea
+
+.gp = Guadeloupe - no information
+
+.gq = Equatorial Guinea - no information
+
+.gr = Greece
+
+.gs = South Georgia and the
+South Sandwich Islands
+
+.gt = Guatemala
+
+.gu = Guam
+
+.gw = Guinea-Bissau - no registrar
+
+.gy = Guyana - no registrar
+
+.hk = Hong Kong
+
+.hm = Heard and McDonald Islands
+
+.hn = Honduras
+
+.hr = Croatia/Hrvatska
+
+.ht = Haiti - no registrar
+
+.hu = Hungary
+
+.id = Indonesia - no information
+
+.ie = Ireland
+
+.il = Israel
+
+.im = Isle of Man
+
+.in = India
+
+.io = British Indian Ocean Territory
+
+.iq = Iraq - no registrar
+
+.ir = Islamic Republic of Iran
+
+.is = Iceland
+
+.it = Italy
+
+.je = Jersey
+
+.jm = Jamaica - no registrar
+
+.jo = Jordan
+
+.jp = Japan
+
+.ke = Kenya
+
+.kg = Kyrgyzstan - no registrar
+
+.kh = Cambodia
+
+.ki = Kiribati
+
+.km = Comoros
+
+.kn = Saint Kitts and Nevis - no registrar
+
+.kp = Democratic People's Republic of Korea
+(North) - no registrar
+
+.kr = Republic of Korea (South)
+
+.kw = Kuwait - no registrar
+
+.ky = Cayman Islands
+
+.kz = Kazakhstan
+
+.la = Lao People's Democratic Republic (Laos)
+... DETAILS
+
+.lb = Lebanon
+
+.lc = Saint Lucia
+
+.li = Liechtenstein
+
+.lk = Sri Lanka
+
+.lr = Liberia
+
+.ls = Lesotho - no registrar
+
+.lt = Lithuania
+
+.lu = Luxembourg
+
+.lv = Latvia
+
+.ly = Libyan Arab Jamahiriya (Libya)
+
+.ma = Morocco
+
+.mc = Monaco
+
+.md = Moldova
+
+.me = Montenegro
+
+.mg = Madagascar
+
+.mh = Marshall Islands
+
+.mk = Macedonia
+
+.ml = Mali - no information
+
+.mm = Myanmar (formerly Burma) - no registrar
+
+.mn = Mongolia
+
+.mo = Macau
+
+.mp = Northern Mariana Islands
+
+.mq = Martinique - no information
+
+.mr = Mauritania
+
+.ms = Montserrat
+
+.mt = Malta
+
+.mu = Mauritius
+
+.mv = Maldives - no registrar
+
+.mw = Malawi
+
+.mx = Mexico
+
+.my = Malaysia
+
+.mz = Mozambique - no registrar
+
+.na = Namibia
+
+.nc = New Caledonia
+
+.ne = Niger - no information
+
+.nf = Norfolk Island
+
+.ng = Nigeria
+
+.ni = Nicaragua
+
+.nl = Netherlands
+
+.no = Norway
+
+.np = Nepal
+
+.nr = Nauru
+
+.nu = Niue
+
+.nz = New Zealand
+
+.om = Oman - Omantel.net.om not functioning
+
+.pa = Panama
+
+.pe = Peru
+
+.pf = French Polynesia - no registrar
+
+.pg = Papua New Guinea - no registrar
+
+.ph = Philippines
+
+.pk = Pakistan
+
+.pl = Poland
+
+.pm = Saint Pierre and Miquelon - not available
+
+.pn = Pitcairn Island
+
+.pr = Puerto Rico
+
+.ps = Palestinian Territories
+
+.pt = Portugal
+
+.pw = Palau
+
+.py = Paraguay
+
+.qa = Qatar
+
+.re = Reunion Island
+
+.ro = Romania
+
+.rs = Serbia - no registrar
+
+.ru = Russian Federation
+
+.rw = Rwanda
+
+.sa = Saudi Arabia
+
+.sb = Solomon Islands
+
+.sc = Seychelles
+
+.sd = Sudan
+
+.se = Sweden
+
+.sg = Singapore
+
+.sh = Saint Helena
+
+.si = Slovenia
+
+.sj = Svalbard and Jan Mayen Islands - not in use
+
+.sk = Slovak Republic
+
+.sl = Sierra Leone
+
+.sm = San Marino
+
+.sn = Senegal - no registrar
+
+.so = Somalia - no registrar
+
+.sr = Suriname
+
+.st = Sao Tome and Principe
+
+.su = Soviet Union
+
+.sv = El Salvador
+
+.sy = Syrian Arab Republic
+
+.sz = Swaziland
+
+.tc = Turks and Caicos Islands - no registrar
+
+.td = Chad - no registrar
+
+.tf = French Southern Territories - no registrar
+
+.tg = Togo
+
+.th = Thailand
+
+.tj = Tajikistan
+
+.tk = Tokelau
+
+.tl = Timor-Leste
+
+.tm = Turkmenistan
+
+.tn = Tunisia
+
+.to = Tonga
+
+.tp = East Timor - Closed. See: Timor-Leste
+
+.tr = Turkey
+
+.tt = Trinidad and Tobago
+
+.tv = Tuvalu
+
+.tw = Taiwan
+
+.tz = Tanzania
+
+.ua = Ukraine
+
+.ug = Uganda
+
+.uk = United Kingdom
+
+.um = United States Minor Outlying Islands
+- Withdrawn, no domains exist.
+
+.us = United States (USA)
+
+.uy = Uruguay
+
+.uz = Uzbekistan
+
+.va = Holy See (Vatican City State)- no registrar
+
+.vc = Saint Vincent and the Grenadines
+
+.ve = Venezuela
+
+.vg = British Virgin Islands
+
+.vi = U.S. Virgin Islands
+
+.vn = Vietnam
+
+.vu = Vanuatu
+
+.wf = Wallis and Futuna Islands - no registrar
+
+.ws = Western Samoa
+
+.ye = Yemen - no registrar
+
+.yt = Mayotte - no registrar
+
+.yu = Yugoslavia Withdrawn in favor of .me and .rs
+
+.za = South Africa
+
+.zm = Zambia - no registrar
+
+.zr = Zaire - Obsolete
+now: The Democratic Republic of the Congo (.cd)
+
+.zw = Zimbabwe - no registrar
diff --git a/inputs/tld-list-mozilla.txt b/inputs/tld-list-mozilla.txt
new file mode 100644
index 0000000..7902eee
--- /dev/null
+++ b/inputs/tld-list-mozilla.txt
@@ -0,0 +1,5 @@
+--2012-05-19 13:07:53-- https://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_na…
+Resolving mxr.mozilla.org (mxr.mozilla.org) 63.245.215.42
+Connecting to mxr.mozilla.org (mxr.mozilla.org)|63.245.215.42|:443... connected.
+ERROR: The certificate of `mxr.mozilla.org' is not trusted.
+ERROR: The certificate of `mxr.mozilla.org' hasn't got a known issuer.
diff --git a/inputs/top-1m.txt.bak2 b/inputs/top-1m.txt.bak2
new file mode 100644
index 0000000..293e661
--- /dev/null
+++ b/inputs/top-1m.txt.bak2
@@ -0,0 +1,11 @@
+1,torproject.org
+2,google.com
+3,facebook.com
+4,youtube.com
+5,yahoo.com
+6,baidu.com
+7,wikipedia.org
+8,live.com
+9,blogspot.com
+10,twitter.com
+11,qq.com
diff --git a/lists/captive_portal_tests.txt.good b/lists/captive_portal_tests.txt.good
deleted file mode 100644
index 1bd016f..0000000
--- a/lists/captive_portal_tests.txt.good
+++ /dev/null
@@ -1,4 +0,0 @@
-
-http://ooni.nu, Open Observatory of Network Interference, 200
-http://www.patternsinthevoid.net/2CDB8B35pub.asc, mQINBE5qkHABEADVnasCm9w9hUff1E4iKnzcAdp4lx6XU5USmYdwKg2RQt2VFqWQ, 200
-http://www.google.com, Search the world's information, 200
diff --git a/lists/cctld.txt b/lists/cctld.txt
deleted file mode 100644
index 57e0cc8..0000000
--- a/lists/cctld.txt
+++ /dev/null
@@ -1,511 +0,0 @@
-.ac = Ascension Island
-
-.ad = Andorra
-
-.ae = United Arab Emirates
-
-.af = Afghanistan
-
-.ag = Antigua and Barbuda
-
-.ai = Anguilla
-
-.al = Albania
-
-.am = Armenia
-
-.an = Netherlands Antilles
-
-.ao = Angola
-
-.aq = Antarctica - no registrar
-
-.ar = Argentina
-
-.as = American Samoa
-
-.at = Austria
-
-.au = Australia
-
-.aw = Aruba - no registrar
-
-.ax = Aland Islands
-
-.az = Azerbaijan - no registrar
-
-.ba = Bosnia and Herzegovina
-
-.bb = Barbados
-
-.bd = Bangladesh - no registrar
-
-.be = Belgium
-
-.bf = Burkina Faso - no registrar
-
-.bg = Bulgaria
-
-.bh = Bahrain
-
-.bi = Burundi
-
-.bj = Benin ... (little info) DETAILS
-
-.bm = Bermuda
-
-.bn = Brunei Darussalam
-
-.bo = Bolivia
-
-.br = Brazil
-
-.bs = Bahamas
-
-.bt = Bhutan
-
-.bv = Bouvet Island - not in use
-
-.bw = Botswana - no registrar
-
-.by = Belarus
-
-.bz = Belize
-
-.ca = Canada
-
-.cc = Cocos (Keeling) Islands
-
-.cd = The Democratic Republic of the Congo
-
-.cf = Central African Republic - no registrar
-
-.cg = Republic of Congo
-
-.ch = Switzerland
-
-.ci = Cote d'Ivoire
-
-.ck = Cook Islands
-
-.cl = Chile
-
-.cm = Cameroon - no registrar - wildcarded
-
-.cn = China
-
-.co = Colombia
-
-.cr = Costa Rica
-
-.cs = (former) Serbia and Montenegro - no registrar - see: .me
-(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
-
-.cu = Cuba - no registrar
-
-.cv = Cape Verde - no registrar
-
-.cx = Christmas Island
-
-.cy = Cyprus
-
-.cz = Czech Republic
-
-.dd = East Germany (obsolete)
-
-.de = Germany
-
-.dj = Djibouti - no information
-
-.dk = Denmark
-
-.dm = Dominica
-
-.do = Dominican Republic
-
-.dz = Algeria - no registrar
-
-.ec = Ecuador
-
-.ee = Estonia
-
-.eg = Egypt - DETAILS
-
-.eh = Western Sahara - no registrar
-
-.er = Eritrea - no registrar
-
-.es = Spain
-
-.et = Ethiopia
-
-.eu = European Union - DETAILS
-
-.fi = Finland
-
-.fj = Fiji
-
-.fk = Falkland Islands (Malvinas)
-
-.fm = Micronesia, Federal State of
-
-.fo = Faroe Islands
-
-.fr = France
-
-.ga = Gabon - no registrar
-
-.gb = Great Britain (United Kingdom) - reserved, see .uk
-
-.gd = Grenada
-
-.ge = Georgia
-
-.gf = French Guiana
-
-.gg = Guernsey
-
-.gh = Ghana
-
-.gi = Gibraltar
-
-.gl = Greenland
-
-.gm = Gambia
-
-.gn = Guinea
-
-.gp = Guadeloupe - no information
-
-.gq = Equatorial Guinea - no information
-
-.gr = Greece
-
-.gs = South Georgia and the
-South Sandwich Islands
-
-.gt = Guatemala
-
-.gu = Guam
-
-.gw = Guinea-Bissau - no registrar
-
-.gy = Guyana - no registrar
-
-.hk = Hong Kong
-
-.hm = Heard and McDonald Islands
-
-.hn = Honduras
-
-.hr = Croatia/Hrvatska
-
-.ht = Haiti - no registrar
-
-.hu = Hungary
-
-.id = Indonesia - no information
-
-.ie = Ireland
-
-.il = Israel
-
-.im = Isle of Man
-
-.in = India
-
-.io = British Indian Ocean Territory
-
-.iq = Iraq - no registrar
-
-.ir = Islamic Republic of Iran
-
-.is = Iceland
-
-.it = Italy
-
-.je = Jersey
-
-.jm = Jamaica - no registrar
-
-.jo = Jordan
-
-.jp = Japan
-
-.ke = Kenya
-
-.kg = Kyrgyzstan - no registrar
-
-.kh = Cambodia
-
-.ki = Kiribati
-
-.km = Comoros
-
-.kn = Saint Kitts and Nevis - no registrar
-
-.kp = Democratic People's Republic of Korea
-(North) - no registrar
-
-.kr = Republic of Korea (South)
-
-.kw = Kuwait - no registrar
-
-.ky = Cayman Islands
-
-.kz = Kazakhstan
-
-.la = Lao People's Democratic Republic (Laos)
-... DETAILS
-
-.lb = Lebanon
-
-.lc = Saint Lucia
-
-.li = Liechtenstein
-
-.lk = Sri Lanka
-
-.lr = Liberia
-
-.ls = Lesotho - no registrar
-
-.lt = Lithuania
-
-.lu = Luxembourg
-
-.lv = Latvia
-
-.ly = Libyan Arab Jamahiriya (Libya)
-
-.ma = Morocco
-
-.mc = Monaco
-
-.md = Moldova
-
-.me = Montenegro
-
-.mg = Madagascar
-
-.mh = Marshall Islands
-
-.mk = Macedonia
-
-.ml = Mali - no information
-
-.mm = Myanmar (formerly Burma) - no registrar
-
-.mn = Mongolia
-
-.mo = Macau
-
-.mp = Northern Mariana Islands
-
-.mq = Martinique - no information
-
-.mr = Mauritania
-
-.ms = Montserrat
-
-.mt = Malta
-
-.mu = Mauritius
-
-.mv = Maldives - no registrar
-
-.mw = Malawi
-
-.mx = Mexico
-
-.my = Malaysia
-
-.mz = Mozambique - no registrar
-
-.na = Namibia
-
-.nc = New Caledonia
-
-.ne = Niger - no information
-
-.nf = Norfolk Island
-
-.ng = Nigeria
-
-.ni = Nicaragua
-
-.nl = Netherlands
-
-.no = Norway
-
-.np = Nepal
-
-.nr = Nauru
-
-.nu = Niue
-
-.nz = New Zealand
-
-.om = Oman - Omantel.net.om not functioning
-
-.pa = Panama
-
-.pe = Peru
-
-.pf = French Polynesia - no registrar
-
-.pg = Papua New Guinea - no registrar
-
-.ph = Philippines
-
-.pk = Pakistan
-
-.pl = Poland
-
-.pm = Saint Pierre and Miquelon - not available
-
-.pn = Pitcairn Island
-
-.pr = Puerto Rico
-
-.ps = Palestinian Territories
-
-.pt = Portugal
-
-.pw = Palau
-
-.py = Paraguay
-
-.qa = Qatar
-
-.re = Reunion Island
-
-.ro = Romania
-
-.rs = Serbia - no registrar
-
-.ru = Russian Federation
-
-.rw = Rwanda
-
-.sa = Saudi Arabia
-
-.sb = Solomon Islands
-
-.sc = Seychelles
-
-.sd = Sudan
-
-.se = Sweden
-
-.sg = Singapore
-
-.sh = Saint Helena
-
-.si = Slovenia
-
-.sj = Svalbard and Jan Mayen Islands - not in use
-
-.sk = Slovak Republic
-
-.sl = Sierra Leone
-
-.sm = San Marino
-
-.sn = Senegal - no registrar
-
-.so = Somalia - no registrar
-
-.sr = Suriname
-
-.st = Sao Tome and Principe
-
-.su = Soviet Union
-
-.sv = El Salvador
-
-.sy = Syrian Arab Republic
-
-.sz = Swaziland
-
-.tc = Turks and Caicos Islands - no registrar
-
-.td = Chad - no registrar
-
-.tf = French Southern Territories - no registrar
-
-.tg = Togo
-
-.th = Thailand
-
-.tj = Tajikistan
-
-.tk = Tokelau
-
-.tl = Timor-Leste
-
-.tm = Turkmenistan
-
-.tn = Tunisia
-
-.to = Tonga
-
-.tp = East Timor - Closed. See: Timor-Leste
-
-.tr = Turkey
-
-.tt = Trinidad and Tobago
-
-.tv = Tuvalu
-
-.tw = Taiwan
-
-.tz = Tanzania
-
-.ua = Ukraine
-
-.ug = Uganda
-
-.uk = United Kingdom
-
-.um = United States Minor Outlying Islands
-- Withdrawn, no domains exist.
-
-.us = United States (USA)
-
-.uy = Uruguay
-
-.uz = Uzbekistan
-
-.va = Holy See (Vatican City State)- no registrar
-
-.vc = Saint Vincent and the Grenadines
-
-.ve = Venezuela
-
-.vg = British Virgin Islands
-
-.vi = U.S. Virgin Islands
-
-.vn = Vietnam
-
-.vu = Vanuatu
-
-.wf = Wallis and Futuna Islands - no registrar
-
-.ws = Western Samoa
-
-.ye = Yemen - no registrar
-
-.yt = Mayotte - no registrar
-
-.yu = Yugoslavia Withdrawn in favor of .me and .rs
-
-.za = South Africa
-
-.zm = Zambia - no registrar
-
-.zr = Zaire - Obsolete
-now: The Democratic Republic of the Congo (.cd)
-
-.zw = Zimbabwe - no registrar
diff --git a/lists/dns_servers.txt.bak b/lists/dns_servers.txt.bak
deleted file mode 100644
index 844e8d5..0000000
--- a/lists/dns_servers.txt.bak
+++ /dev/null
@@ -1,6 +0,0 @@
-209.244.0.3
-209.244.0.4
-208.67.222.222
-208.67.220.220
-156.154.70.1
-156.154.71.1
diff --git a/lists/dns_servers.txt.bak2 b/lists/dns_servers.txt.bak2
deleted file mode 100644
index 0c4b6f6..0000000
--- a/lists/dns_servers.txt.bak2
+++ /dev/null
@@ -1 +0,0 @@
-192.168.1.1
diff --git a/lists/example_exp_list.txt b/lists/example_exp_list.txt
deleted file mode 100644
index 42ab976..0000000
--- a/lists/example_exp_list.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-86.59.30.36
-38.229.72.14
-38.229.72.16
diff --git a/lists/major_isp_dns_servers.txt b/lists/major_isp_dns_servers.txt
deleted file mode 100644
index 36b8098..0000000
--- a/lists/major_isp_dns_servers.txt
+++ /dev/null
@@ -1,796 +0,0 @@
-######################################
-## ISP DNS SERVERS BY COUNTRY
-######################################
-## USA
-######################################
-##
-## AT&T
-68.94.156.1
-68.94.157.1
-##
-## ACS Alaska
-209.193.4.7
-209.112.160.2
-##
-## AOL
-205.188.146.145
-##
-## Century Link
-207.14.235.234
-67.238.98.162
-74.4.19.187
-##
-## Charter
-24.296.64.53
-##
-## Cincinnati Bell, ZoomTown
-216.68.4.10
-216.68.5.10
-##
-## Cincinnati Bell, Fuze
-216.68.1.100
-216.68.2.100
-##
-## Comcast, General DNS Servers
-## West Coast
-68.87.85.98
-2001:558:1004:6:68:87:85:98
-## East Coast
-68.87.64.146
-2001:558:1002:B:68:87:64:146
-##
-## Comcast, Albuquerque
-68.87.85.98
-68.87.69.146
-2001:558:1004:6:68:87:85:98
-2001:558:100C:D:68:87:69:146
-##
-## Comcast, Atlanta
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Augusta
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Battle Creek
-68.87.77.130
-68.87.72.130
-2001:558:1016:C:68:87:77:130
-2001:558:100E:4:68:87:72:130
-##
-## Comcast, Charleston
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Chattanooga
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Chesterfield
-68.87.73.242
-68.87.71.226
-2001:558:1010:8:68:87:73:242
-2001:558:1000:E:68:87:71:226
-##
-## Comcast, Chicago
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Colorado
-68.87.85.98
-68.87.69.146
-2001:558:1004:6:68:87:85:98
-2001:558:100C:D:68:87:69:146
-##
-## Comcast, Connecticut
-68.87.71.226
-68.87.73.242
-2001:558:1000:E:68:87:71:226
-2001:558:1010:8:68:87:73:242
-##
-## Comcast, Dallas
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, East Tennessee
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Elyria
-68.87.75.194
-68.87.64.146
-2001:558:1001:C:68:87:75:194
-2001:558:1002:B:68:87:64:146
-##
-## Comcast, Fort Wayne
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Fresno
-68.87.76.178
-68.87.78.130
-2001:558:1014:F:68:87:76:178
-2001:558:1018:6:68:87:78:130
-##
-## Comcast, Hattiesburg-Laurel
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Huntsville
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Illinois
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Independence
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Indianapolis
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Jacksonville
-68.87.74.162
-68.87.68.162
-2001:558:1012:6:68:87:74:162
-2001:558:100A:4:68:87:68:162
-##
-## Comcast, Knoxville
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Lake County
-68.87.74.162
-68.87.68.162
-2001:558:1012:6:68:87:74:162
-2001:558:100A:4:68:87:68:162
-##
-## Comcast, Little Rock
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Los Angeles
-68.87.76.178
-68.87.78.130
-2001:558:1014:F:68:87:76:178
-2001:558:1018:6:68:87:78:130
-##
-## Comcast, Massachusetts
-68.87.71.226
-68.87.73.242
-2001:558:1000:E:68:87:71:226
-2001:558:1010:8:68:87:73:242
-##
-## Comcast, Meridian
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-## Comcast, Miami
-68.87.74.162
-68.87.68.162
-2001:558:1012:6:68:87:74:162
-2001:558:100A:4:68:87:68:162
-##
-## Comcast, Michigan
-68.87.77.130
-68.87.72.130
-2001:558:1016:C:68:87:77:130
-2001:558:100E:4:68:87:72:130
-## Comcast, Minnesota
-68.87.77.130
-68.87.72.130
-2001:558:1016:C:68:87:77:130
-2001:558:100E:4:68:87:72:130
-##
-## Comcast, Mobile
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Muncie
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Naples
-68.87.74.162
-68.87.68.162
-2001:558:1012:6:68:87:74:162
-2001:558:100A:4:68:87:68:162
-##
-## Comcast, Nashville
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, New England
-68.87.71.226
-68.87.73.242
-2001:558:1000:E:68:87:71:226
-2001:558:1010:8:68:87:73:242
-##
-## Comcast, Olathe
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Oregon
-68.87.69.146
-68.87.85.98
-2001:558:100C:D:68:87:69:146
-2001:558:1004:6:68:87:85:98
-##
-## Comcast, Paducah
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Panama City
-68.87.74.162
-68.87.68.162
-2001:558:1012:6:68:87:74:162
-2001:558:100A:4:68:87:68:162
-##
-## Comcast, Pennsylvania
-68.87.75.194
-68.87.64.146
-2001:558:1001:C:68:87:75:194
-2001:558:1002:B:68:87:64:146
-##
-## Comcast, Philadelphia
-68.87.64.146
-68.87.75.194
-2001:558:1002:B:68:87:64:146
-2001:558:1001:C:68:87:75:194
-##
-## Comcast, Pima
-68.87.85.98
-68.87.69.146
-2001:558:1004:6:68:87:85:98
-2001:558:100C:D:68:87:69:146
-##
-## Comcast, Richmond
-68.87.73.242
-68.87.71.226
-2001:558:1010:8:68:87:73:242
-2001:558:1000:E:68:87:71:226
-##
-## Comcast, Sacramento
-68.87.76.178
-68.87.78.130
-2001:558:1014:F:68:87:76:178
-2001:558:1018:6:68:87:78:130
-##
-## Comcast, San Francisco Bay Area
-68.87.76.178
-68.87.78.130
-2001:558:1014:F:68:87:76:178
-2001:558:1018:6:68:87:78:130
-##
-## Comcast, Savannah
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, South Bend
-68.87.72.130
-68.87.77.130
-2001:558:100E:4:68:87:72:130
-2001:558:1016:C:68:87:77:130
-##
-## Comcast, Spokane
-68.87.69.146
-68.87.85.98
-2001:558:100C:D:68:87:69:146
-2001:558:1004:6:68:87:85:98
-##
-## Comcast, Stockton
-68.87.76.178
-68.87.78.130
-2001:558:1014:F:68:87:76:178
-2001:558:1018:6:68:87:78:130
-##
-## Comcast, Tallahassee
-68.87.74.162
-68.87.68.162
-2001:558:1012:6:68:87:74:162
-2001:558:100A:4:68:87:68:162
-##
-## Comcast, Texas
-68.87.85.98
-68.87.69.146
-2001:558:1004:6:68:87:85:98
-2001:558:100C:D:68:87:69:146
-##
-## Comcast, Tuscaloosa
-68.87.68.162
-68.87.74.162
-2001:558:100A:4:68:87:68:162
-2001:558:1012:6:68:87:74:162
-##
-## Comcast, Utah
-68.87.85.98
-68.87.69.146
-2001:558:1004:6:68:87:85:98
-2001:558:100C:D:68:87:69:146
-##
-## Comcast, Washington
-68.87.69.146
-68.87.85.98
-2001:558:100C:D:68:87:69:146
-2001:558:1004:6:68:87:85:98
-##
-## Comcast, Washington DC
-68.87.73.242
-68.87.71.226
-2001:558:1010:8:68:87:73:242
-2001:558:1000:E:68:87:71:226
-##
-## Comcast, West Florida
-68.87.74.162
-68.87.68.162
-2001:558:1012:6:68:87:74:162
-2001:558:100A:4:68:87:68:162
-##
-## Earthlink
-207.69.188.185
-207.69.188.186
-207.69.188.187
-##
-############################
-## UK
-############################
-##
-## AAISP
-217.169.20.20
-217.169.20.21
-2001:8b0::2020
-2001:8b0::2021
-##
-## AOL Broadband
-64.12.51.132
-149.174.221.8
-205.188.157.232
-205.188.146.145
-##
-## BE Unlimited
-87.194.0.51
-87.194.0.52
-87.194.0.66
-87.194.0.67
-##
-## BT Broadband
-62.6.40.178
-62.6.40.162
-194.72.9.38
-194.72.9.34
-194.72.0.98
-194.72.0.114
-194.74.65.68
-194.74.65.69
-##
-## Bulldog Broadband North
-212.158.248.5
-212.158.248.6
-##
-## Bulldog Broadband South
-83.146.21.5
-83.146.21.6
-##
-## Bytel
-80.76.204.35
-80.76.200.69
-##
-## Clara.net
-195.8.69.7
-195.8.69.12
-##
-## Datanet
-80.68.34.6
-77.241.177.2
-80.68.34.8
-##
-## Demon Internet
-158.152.1.58
-158.152.1.43
-##
-## Eclipse Internet
-212.104.130.9
-212.104.130.65
-##
-## Entanet
-195.74.102.146
-195.74.102.147
-##
-## Exa Networks
-82.219.4.24
-82.219.4.25
-##
-## Fast
-78.143.192.10
-78.143.192.20
-##
-## Freedom 2 Surf
-194.106.56.6
-194.106.33.42
-##
-## IDNet
-212.69.36.3
-212.69.36.2
-212.69.40.2
-##
-## Karoo
-212.50.160.100
-213.249.130.100
-##
-## Madasafish
-80.189.94.2
-80.189.92.2
-##
-## Merula
-217.146.97.10
-217.146.105.2
-##
-## Metronet
-213.162.97.65
-213.162.97.66
-##
-## Namesco
-195.7.224.57
-195.7.224.143
-##
-## NewNet
-212.87.64.10
-212.87.64.11
-##
-## Nildram
-213.208.106.212
-213.208.106.213
-##
-## O2
-87.194.0.51
-87.194.0.52
-87.194.0.66
-87.194.0.67
-##
-## Onetel
-212.67.96.129
-212.67.96.130
-##
-## Onyx
-194.176.65.5
-195.97.231.31
-##
-## Oosha
-213.190.161.254
-213.190.161.250
-213.190.160.9
-##
-## Orange
-195.92.195.94
-195.92.195.95
-##
-## Pipex
-62.241.160.200
-158.43.240.4
-212.74.112.66
-212.74.112.67
-##
-## PlusNet
-212.159.13.49
-212.159.13.50
-212.159.6.9
-212.159.6.10
-##
-## Powernet
-195.60.0.1
-195.60.0.5
-##
-## Prodigy
-198.83.19.241
-198.83.19.244
-207.115.59.241
-207.115.59.244
-##
-## SAQ
-195.2.130.209
-195.2.156.67
-##
-## Scotnet
-217.16.223.30
-217.16.223.31
-##
-## Sky Broadband
-87.86.189.16
-87.86.189.17
-195.40.1.36
-##
-## Skymarket
-212.84.173.66
-212.84.173.82
-##
-## Supanet
-213.40.66.126
-213.40.130.126
-##
-## TalkTalk
-62.24.199.13
-62.24.199.23
-62.24.128.18
-62.24.128.17
-##
-## Tesco
-194.168.4.100
-194.168.8.100
-##
-## Timewarp
-217.149.108.10
-217.149.108.11
-##
-## Timico
-195.54.225.10
-195.54.226.10
-##
-## Tiscali
-212.74.112.66
-212.74.112.67
-80.255.252.50
-80.255.252.58
-##
-## Topletter
-77.95.114.100
-77.95.112.1
-##
-## UK Online
-212.135.1.36
-195.40.1.36
-##
-## Utility Warehouse
-62.24.128.17
-62.24.128.18
-##
-## UTV Internet
-194.46.192.141
-194.46.192.142
-##
-## Virgin Media
-194.168.4.100
-194.168.8.100
-##
-## VISPA
-62.24.228.9
-62.24.228.10
-##
-## Zen Internet
-212.23.3.100
-212.23.6.100
-##
-####################################
-## NEW ZEALAND
-####################################
-##
-## Xtra
-202.27.158.40
-202.27.156.72
-##
-####################################
-## AUSTRALIA
-####################################
-##
-## AANet, Victoria
-203.24.100.125
-203.123.94.40
-##
-## AANet, South Australia
-203.24.100.125
-203.123.69.15
-##
-## AANet, Western Australia
-203.24.100.125
-202.76.136.40
-##
-## AANet, Queensland
-203.24.100.125
-202.76.170.40
-##
-## AANet, New South Wales
-203.24.100.125
-203.123.69.15
-##
-## AAPT, New South Wales
-192.189.54.33
-203.8.183.1
-##
-## AAPT, Victoria
-192.189.54.17
-203.8.183.1
-##
-## AAPT, Queensland
-192.189.54.33
-203.8.183.1
-##
-## AAPT, Tasmania
-192.189.54.17
-203.8.183.1
-##
-## AAPT, Australian Capital Territory
-192.189.54.33
-203.8.183.1
-##
-## AAPT, South Australia
-192.189.54.17
-203.8.183.1
-##
-## AAPT, Northern Territory
-192.189.54.17
-203.8.183.1
-##
-## AAPT, Western Australia
-192.189.54.17
-203.8.183.1
-##
-## Adam
-122.49.191.252
-122.49.191.253
-##
-## Amnet
-203.161.127.1
-203.153.224.42
-##
-## Comcen
-203.23.236.66
-203.23.236.69
-##
-## Dodo
-203.220.32.121
-203.220.32.122
-203.220.32.123
-##
-## Exetel
-220.233.0.4
-220.233.0.3
-##
-## iiNet
-203.0.178.191
-203.0.178.191
-##
-## Internode
-192.231.203.132
-192.231.203.3
-2001:44b8:1::1
-2001:44b8:2::2
-##
-## iPrimus, New South Wales
-203.134.64.66
-203.134.65.66
-##
-## iPrimus, Victoria
-203.134.24.70
-203.134.26.70
-##
-## iPrimus, Queensland
-203.134.12.90
-203.134.102.90
-##
-## iPrimus, Western Australia
-203.134.17.90
-211.26.25.90
-##
-## Netspace
-210.15.254.240
-210.15.254.241
-##
-## Optus
-211.29.132.12
-198.142.0.51
-##
-## People Telecom, New South Wales
-202.154.123.97
-218.214.227.3
-##
-## People Telecom, Northern Territory
-202.154.92.5
-218.214.228.97
-##
-## People Telecom, Queensland
-218.214.227.3
-202.154.123.97
-##
-## People Telecom, South Australia
-218.214.228.97
-218.214.17.1
-##
-## People Telecom, Victoria
-218.214.17.1
-218.214.228.97
-##
-## People Telecom, Western Australia
-202.154.92.5
-218.214.228.97
-##
-## Spin Internet
-203.23.236.66
-203.23.236.69
-##
-## Telstra BigPond, New South Wales
-61.9.194.49
-61.9.195.193
-##
-## Telstra BigPond, Victoria
-61.9.133.193
-61.9.134.49
-##
-## Telstra BigPond, Queensland
-61.9.211.33
-61.9.211.1
-##
-## Telstra BigPond, Tasmania
-61.9.188.33
-61.9.134.49
-##
-## Telstra BigPond, Australian Capital Territory
-61.9.207.1
-61.9.195.193
-##
-## Telstra BigPond, South Australia
-61.9.226.33
-61.9.194.49
-##
-## Telstra BigPond, Northern Territory
-61.9.226.33
-61.9.194.49
-##
-## Telstra BigPond, Western Australia
-61.9.242.33
-61.9.226.33
-##
-## TPG
-203.12.160.35
-203.12.160.36
-203.12.160.37
-##
-## Westnet
-203.21.20.20
-203.10.1.9
-########################################
diff --git a/lists/short_hostname_list.txt b/lists/short_hostname_list.txt
deleted file mode 100644
index f13c702..0000000
--- a/lists/short_hostname_list.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-torproject.org
-google.com
-ooni.nu
-torrentz.eu
-anarchyplanet.org
-riseup.net
-indymedia.org
diff --git a/lists/tld-list-cc.txt b/lists/tld-list-cc.txt
deleted file mode 100644
index 57e0cc8..0000000
--- a/lists/tld-list-cc.txt
+++ /dev/null
@@ -1,511 +0,0 @@
-.ac = Ascension Island
-
-.ad = Andorra
-
-.ae = United Arab Emirates
-
-.af = Afghanistan
-
-.ag = Antigua and Barbuda
-
-.ai = Anguilla
-
-.al = Albania
-
-.am = Armenia
-
-.an = Netherlands Antilles
-
-.ao = Angola
-
-.aq = Antarctica - no registrar
-
-.ar = Argentina
-
-.as = American Samoa
-
-.at = Austria
-
-.au = Australia
-
-.aw = Aruba - no registrar
-
-.ax = Aland Islands
-
-.az = Azerbaijan - no registrar
-
-.ba = Bosnia and Herzegovina
-
-.bb = Barbados
-
-.bd = Bangladesh - no registrar
-
-.be = Belgium
-
-.bf = Burkina Faso - no registrar
-
-.bg = Bulgaria
-
-.bh = Bahrain
-
-.bi = Burundi
-
-.bj = Benin ... (little info) DETAILS
-
-.bm = Bermuda
-
-.bn = Brunei Darussalam
-
-.bo = Bolivia
-
-.br = Brazil
-
-.bs = Bahamas
-
-.bt = Bhutan
-
-.bv = Bouvet Island - not in use
-
-.bw = Botswana - no registrar
-
-.by = Belarus
-
-.bz = Belize
-
-.ca = Canada
-
-.cc = Cocos (Keeling) Islands
-
-.cd = The Democratic Republic of the Congo
-
-.cf = Central African Republic - no registrar
-
-.cg = Republic of Congo
-
-.ch = Switzerland
-
-.ci = Cote d'Ivoire
-
-.ck = Cook Islands
-
-.cl = Chile
-
-.cm = Cameroon - no registrar - wildcarded
-
-.cn = China
-
-.co = Colombia
-
-.cr = Costa Rica
-
-.cs = (former) Serbia and Montenegro - no registrar - see: .me
-(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
-
-.cu = Cuba - no registrar
-
-.cv = Cape Verde - no registrar
-
-.cx = Christmas Island
-
-.cy = Cyprus
-
-.cz = Czech Republic
-
-.dd = East Germany (obsolete)
-
-.de = Germany
-
-.dj = Djibouti - no information
-
-.dk = Denmark
-
-.dm = Dominica
-
-.do = Dominican Republic
-
-.dz = Algeria - no registrar
-
-.ec = Ecuador
-
-.ee = Estonia
-
-.eg = Egypt - DETAILS
-
-.eh = Western Sahara - no registrar
-
-.er = Eritrea - no registrar
-
-.es = Spain
-
-.et = Ethiopia
-
-.eu = European Union - DETAILS
-
-.fi = Finland
-
-.fj = Fiji
-
-.fk = Falkland Islands (Malvinas)
-
-.fm = Micronesia, Federal State of
-
-.fo = Faroe Islands
-
-.fr = France
-
-.ga = Gabon - no registrar
-
-.gb = Great Britain (United Kingdom) - reserved, see .uk
-
-.gd = Grenada
-
-.ge = Georgia
-
-.gf = French Guiana
-
-.gg = Guernsey
-
-.gh = Ghana
-
-.gi = Gibraltar
-
-.gl = Greenland
-
-.gm = Gambia
-
-.gn = Guinea
-
-.gp = Guadeloupe - no information
-
-.gq = Equatorial Guinea - no information
-
-.gr = Greece
-
-.gs = South Georgia and the
-South Sandwich Islands
-
-.gt = Guatemala
-
-.gu = Guam
-
-.gw = Guinea-Bissau - no registrar
-
-.gy = Guyana - no registrar
-
-.hk = Hong Kong
-
-.hm = Heard and McDonald Islands
-
-.hn = Honduras
-
-.hr = Croatia/Hrvatska
-
-.ht = Haiti - no registrar
-
-.hu = Hungary
-
-.id = Indonesia - no information
-
-.ie = Ireland
-
-.il = Israel
-
-.im = Isle of Man
-
-.in = India
-
-.io = British Indian Ocean Territory
-
-.iq = Iraq - no registrar
-
-.ir = Islamic Republic of Iran
-
-.is = Iceland
-
-.it = Italy
-
-.je = Jersey
-
-.jm = Jamaica - no registrar
-
-.jo = Jordan
-
-.jp = Japan
-
-.ke = Kenya
-
-.kg = Kyrgyzstan - no registrar
-
-.kh = Cambodia
-
-.ki = Kiribati
-
-.km = Comoros
-
-.kn = Saint Kitts and Nevis - no registrar
-
-.kp = Democratic People's Republic of Korea
-(North) - no registrar
-
-.kr = Republic of Korea (South)
-
-.kw = Kuwait - no registrar
-
-.ky = Cayman Islands
-
-.kz = Kazakhstan
-
-.la = Lao People's Democratic Republic (Laos)
-... DETAILS
-
-.lb = Lebanon
-
-.lc = Saint Lucia
-
-.li = Liechtenstein
-
-.lk = Sri Lanka
-
-.lr = Liberia
-
-.ls = Lesotho - no registrar
-
-.lt = Lithuania
-
-.lu = Luxembourg
-
-.lv = Latvia
-
-.ly = Libyan Arab Jamahiriya (Libya)
-
-.ma = Morocco
-
-.mc = Monaco
-
-.md = Moldova
-
-.me = Montenegro
-
-.mg = Madagascar
-
-.mh = Marshall Islands
-
-.mk = Macedonia
-
-.ml = Mali - no information
-
-.mm = Myanmar (formerly Burma) - no registrar
-
-.mn = Mongolia
-
-.mo = Macau
-
-.mp = Northern Mariana Islands
-
-.mq = Martinique - no information
-
-.mr = Mauritania
-
-.ms = Montserrat
-
-.mt = Malta
-
-.mu = Mauritius
-
-.mv = Maldives - no registrar
-
-.mw = Malawi
-
-.mx = Mexico
-
-.my = Malaysia
-
-.mz = Mozambique - no registrar
-
-.na = Namibia
-
-.nc = New Caledonia
-
-.ne = Niger - no information
-
-.nf = Norfolk Island
-
-.ng = Nigeria
-
-.ni = Nicaragua
-
-.nl = Netherlands
-
-.no = Norway
-
-.np = Nepal
-
-.nr = Nauru
-
-.nu = Niue
-
-.nz = New Zealand
-
-.om = Oman - Omantel.net.om not functioning
-
-.pa = Panama
-
-.pe = Peru
-
-.pf = French Polynesia - no registrar
-
-.pg = Papua New Guinea - no registrar
-
-.ph = Philippines
-
-.pk = Pakistan
-
-.pl = Poland
-
-.pm = Saint Pierre and Miquelon - not available
-
-.pn = Pitcairn Island
-
-.pr = Puerto Rico
-
-.ps = Palestinian Territories
-
-.pt = Portugal
-
-.pw = Palau
-
-.py = Paraguay
-
-.qa = Qatar
-
-.re = Reunion Island
-
-.ro = Romania
-
-.rs = Serbia - no registrar
-
-.ru = Russian Federation
-
-.rw = Rwanda
-
-.sa = Saudi Arabia
-
-.sb = Solomon Islands
-
-.sc = Seychelles
-
-.sd = Sudan
-
-.se = Sweden
-
-.sg = Singapore
-
-.sh = Saint Helena
-
-.si = Slovenia
-
-.sj = Svalbard and Jan Mayen Islands - not in use
-
-.sk = Slovak Republic
-
-.sl = Sierra Leone
-
-.sm = San Marino
-
-.sn = Senegal - no registrar
-
-.so = Somalia - no registrar
-
-.sr = Suriname
-
-.st = Sao Tome and Principe
-
-.su = Soviet Union
-
-.sv = El Salvador
-
-.sy = Syrian Arab Republic
-
-.sz = Swaziland
-
-.tc = Turks and Caicos Islands - no registrar
-
-.td = Chad - no registrar
-
-.tf = French Southern Territories - no registrar
-
-.tg = Togo
-
-.th = Thailand
-
-.tj = Tajikistan
-
-.tk = Tokelau
-
-.tl = Timor-Leste
-
-.tm = Turkmenistan
-
-.tn = Tunisia
-
-.to = Tonga
-
-.tp = East Timor - Closed. See: Timor-Leste
-
-.tr = Turkey
-
-.tt = Trinidad and Tobago
-
-.tv = Tuvalu
-
-.tw = Taiwan
-
-.tz = Tanzania
-
-.ua = Ukraine
-
-.ug = Uganda
-
-.uk = United Kingdom
-
-.um = United States Minor Outlying Islands
-- Withdrawn, no domains exist.
-
-.us = United States (USA)
-
-.uy = Uruguay
-
-.uz = Uzbekistan
-
-.va = Holy See (Vatican City State)- no registrar
-
-.vc = Saint Vincent and the Grenadines
-
-.ve = Venezuela
-
-.vg = British Virgin Islands
-
-.vi = U.S. Virgin Islands
-
-.vn = Vietnam
-
-.vu = Vanuatu
-
-.wf = Wallis and Futuna Islands - no registrar
-
-.ws = Western Samoa
-
-.ye = Yemen - no registrar
-
-.yt = Mayotte - no registrar
-
-.yu = Yugoslavia Withdrawn in favor of .me and .rs
-
-.za = South Africa
-
-.zm = Zambia - no registrar
-
-.zr = Zaire - Obsolete
-now: The Democratic Republic of the Congo (.cd)
-
-.zw = Zimbabwe - no registrar
diff --git a/lists/tld-list-mozilla.txt b/lists/tld-list-mozilla.txt
deleted file mode 100644
index 7902eee..0000000
--- a/lists/tld-list-mozilla.txt
+++ /dev/null
@@ -1,5 +0,0 @@
---2012-05-19 13:07:53-- https://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_na…
-Resolving mxr.mozilla.org (mxr.mozilla.org) 63.245.215.42
-Connecting to mxr.mozilla.org (mxr.mozilla.org)|63.245.215.42|:443... connected.
-ERROR: The certificate of `mxr.mozilla.org' is not trusted.
-ERROR: The certificate of `mxr.mozilla.org' hasn't got a known issuer.
diff --git a/lists/top-1m.txt.bak2 b/lists/top-1m.txt.bak2
deleted file mode 100644
index 293e661..0000000
--- a/lists/top-1m.txt.bak2
+++ /dev/null
@@ -1,11 +0,0 @@
-1,torproject.org
-2,google.com
-3,facebook.com
-4,youtube.com
-5,yahoo.com
-6,baidu.com
-7,wikipedia.org
-8,live.com
-9,blogspot.com
-10,twitter.com
-11,qq.com
1
0

04 Nov '12
commit ed7cb1a39289d18eb869151dfa376d9b73be6c1c
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Nov 4 13:49:36 2012 +0000
* Moved /old-to-be-ported to /to-be-ported.
---
old-to-be-ported-code/TODO | 418 --------------------
.../old-api/.ropeproject/config.py | 85 ----
.../old-api/.ropeproject/globalnames | Bin 108 -> 0 bytes
old-to-be-ported-code/old-api/.ropeproject/history | 1 -
.../old-api/.ropeproject/objectdb | Bin 741 -> 0 bytes
old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt | 8 -
old-to-be-ported-code/old-api/chinatrigger.py | 140 -------
old-to-be-ported-code/old-api/daphn3.py | 152 -------
old-to-be-ported-code/old-api/domclass.py | 216 ----------
old-to-be-ported-code/old-api/dropin.cache | 243 ------------
old-to-be-ported-code/old-api/httpt.py | 94 -----
old-to-be-ported-code/old-api/tcpconnect.py | 65 ---
old-to-be-ported-code/old-api/tcpscan.py | 84 ----
old-to-be-ported-code/spec/proxooni-spec.txt | 65 ---
old-to-be-ported-code/very-old/TODO.plgoons | 79 ----
old-to-be-ported-code/very-old/TO_BE_PORTED | 14 -
old-to-be-ported-code/very-old/ooni-probe.diff | 358 -----------------
old-to-be-ported-code/very-old/ooni/#namecheck.py# | 39 --
old-to-be-ported-code/very-old/ooni/.DS_Store | Bin 15364 -> 0 bytes
old-to-be-ported-code/very-old/ooni/__init__.py | 12 -
old-to-be-ported-code/very-old/ooni/command.py | 250 ------------
.../very-old/ooni/dns_poisoning.py | 43 --
old-to-be-ported-code/very-old/ooni/dnsooni.py | 356 -----------------
old-to-be-ported-code/very-old/ooni/helpers.py | 38 --
old-to-be-ported-code/very-old/ooni/http.py | 306 --------------
old-to-be-ported-code/very-old/ooni/input.py | 33 --
old-to-be-ported-code/very-old/ooni/namecheck.py | 39 --
.../very-old/ooni/plugins/dnstest_plgoo.py | 84 ----
.../very-old/ooni/plugins/http_plgoo.py | 70 ----
.../very-old/ooni/plugins/marco_plgoo.py | 377 ------------------
.../very-old/ooni/plugins/proxy_plgoo.py | 69 ----
.../very-old/ooni/plugins/simple_dns_plgoo.py | 35 --
.../very-old/ooni/plugins/tcpcon_plgoo.py | 278 -------------
old-to-be-ported-code/very-old/ooni/plugins/tor.py | 80 ----
old-to-be-ported-code/very-old/ooni/plugins/torrc | 9 -
old-to-be-ported-code/very-old/ooni/plugooni.py | 106 -----
.../very-old/ooni/transparenthttp.py | 41 --
old-to-be-ported-code/very-old/traceroute.py | 108 -----
to-be-ported/TODO | 418 ++++++++++++++++++++
to-be-ported/old-api/.ropeproject/config.py | 85 ++++
to-be-ported/old-api/.ropeproject/globalnames | Bin 0 -> 108 bytes
to-be-ported/old-api/.ropeproject/history | 1 +
to-be-ported/old-api/.ropeproject/objectdb | Bin 0 -> 741 bytes
to-be-ported/old-api/TESTS_ARE_MOVING.txt | 8 +
to-be-ported/old-api/chinatrigger.py | 140 +++++++
to-be-ported/old-api/daphn3.py | 152 +++++++
to-be-ported/old-api/domclass.py | 216 ++++++++++
to-be-ported/old-api/dropin.cache | 243 ++++++++++++
to-be-ported/old-api/httpt.py | 94 +++++
to-be-ported/old-api/tcpconnect.py | 65 +++
to-be-ported/old-api/tcpscan.py | 84 ++++
to-be-ported/spec/proxooni-spec.txt | 65 +++
to-be-ported/very-old/TODO.plgoons | 79 ++++
to-be-ported/very-old/TO_BE_PORTED | 14 +
to-be-ported/very-old/ooni-probe.diff | 358 +++++++++++++++++
to-be-ported/very-old/ooni/#namecheck.py# | 39 ++
to-be-ported/very-old/ooni/.DS_Store | Bin 0 -> 15364 bytes
to-be-ported/very-old/ooni/__init__.py | 12 +
to-be-ported/very-old/ooni/command.py | 250 ++++++++++++
to-be-ported/very-old/ooni/dns_poisoning.py | 43 ++
to-be-ported/very-old/ooni/dnsooni.py | 356 +++++++++++++++++
to-be-ported/very-old/ooni/helpers.py | 38 ++
to-be-ported/very-old/ooni/http.py | 306 ++++++++++++++
to-be-ported/very-old/ooni/input.py | 33 ++
to-be-ported/very-old/ooni/namecheck.py | 39 ++
.../very-old/ooni/plugins/dnstest_plgoo.py | 84 ++++
to-be-ported/very-old/ooni/plugins/http_plgoo.py | 70 ++++
to-be-ported/very-old/ooni/plugins/marco_plgoo.py | 377 ++++++++++++++++++
to-be-ported/very-old/ooni/plugins/proxy_plgoo.py | 69 ++++
.../very-old/ooni/plugins/simple_dns_plgoo.py | 35 ++
to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py | 278 +++++++++++++
to-be-ported/very-old/ooni/plugins/tor.py | 80 ++++
to-be-ported/very-old/ooni/plugins/torrc | 9 +
to-be-ported/very-old/ooni/plugooni.py | 106 +++++
to-be-ported/very-old/ooni/transparenthttp.py | 41 ++
to-be-ported/very-old/traceroute.py | 108 +++++
76 files changed, 4395 insertions(+), 4395 deletions(-)
diff --git a/old-to-be-ported-code/TODO b/old-to-be-ported-code/TODO
deleted file mode 100644
index 81d834f..0000000
--- a/old-to-be-ported-code/TODO
+++ /dev/null
@@ -1,418 +0,0 @@
-This is a list of techniques that should be added as plugins or hooks or yamlooni
-
-Implement Plugooni - our plugin framework
-Implement Yamlooni - our output format
-Implement Proxooni - our proxy spec and program
-
-We should launch our own Tor on a special port (say, 127.0.0.1:9066)
-We should act as a controller with TorCtl to do this, etc
-We should take the Tor consensus file and pass it to plugins such as marco
-
-HTTP Host header comparsion of a vs b
-HTTP Content length header comparision of a vs b
-
-GET request splitting
- "G E T "
- Used in Iran
-
-General Malformed HTTP requests
- Error pages are fingerprintable
-
-traceroute
- icmp/udp/tcp
- each network link is an edge, each hop is a vertex in a network graph
-
-traceroute hop count
- "TTL walking"
-
-Latency measurement
-TCP reset detection
-Forged DNS spoofing detection
-
-DNS oracle query tool
- given DNS server foo - test resolve and look for known block pages
-
-Test HTTP header order - do they get reordered?
-
-Look for these filter fingerprints:
-X-Squid-Error: ERR_SCC_SMARTFILTER_DENIED 0
-X-Squid-Error: ERR_ACCESS_DENIED 0
-X-Cache: MISS from SmartFilter
-
-
-WWW-Authenticate: Basic realm="SmartFilter Control List HTTP Download"
-
-
-Via: 1.1 WEBFILTER.CONSERVESCHOOL.ORG:8080
-
-X-Cache: MISS from webfilter.whiteschneider.com
-X-Cache: MISS from webfilter.whiteschneider.com
-X-Cache: MISS from webfilter.whiteschneider.com
-
-Location: http://192.168.0.244/webfilter/blockpage?nonce=7d2b7e500e99a0fe&tid=3
-
-
-X-Cache: MISS from webfilter.imscs.local
-X-Cache: MISS from webfilter.tjs.at
-
-
-Via: 1.1 webwasher (Webwasher 6.8.7.9396)
-
-Websense:
-HTTP/1.0 301 Moved Permanently -> Location: http://www.websense.com/
-
-Via: HTTP/1.1 localhost.localdomain (Websense-Content_Gateway/7.1.4 [c s f ]), HTTP/1.0 localhost.localdomain (Websense-Content_Gateway/7.1.4 [cMsSf ])
-
-
-BlueCoat:
-
-Via: 1.1 testrating.dc5.es.bluecoat.com
-403 ->
-Set-Cookie: BIGipServerpool_bluecoat=1185677834.20480.0000; expires=Fri, 15-Apr-2011 10:13:21 GMT; path=/
-
-HTTP/1.0 407 Proxy Authentication Required ( The ISA Server requires authorization to fulfill the request. Access to the Web Proxy filter is denied. ) -> Via: 1.1 WEBSENSE
-
-HTTP/1.0 302 Found -> Location: http://bluecoat/?cfru=aHR0cDovLzIwMC4yNy4xMjMuMTc4Lw==
-
-HTTP/1.0 403 Forbidden
-Server: squid/3.0.STABLE8
-
-X-Squid-Error: ERR_ACCESS_DENIED 0
-X-Cache: MISS from Bluecoat
-X-Cache-Lookup: NONE from Bluecoat:3128
-Via: 1.0 Bluecoat (squid/3.0.STABLE8)
-
-ISA server:
-HTTP/1.0 403 Forbidden ( ISA Server is configured to block HTTP requests that require authentication. )
-
-
-Unknown:
-X-XSS-Protection: 1; mode=block
-
-Rimon filter:
-
-Rimon: RWC_BLOCK
-HTTP/1.1 Rimon header
-Rimon header is only sent by lighttpd
-http://www.ynetnews.com/articles/0,7340,L-3446129,00.html
-http://btya.org/pdfs/rvienerbrochure.pdf
-
-Korea filtering:
-HTTP/1.0 302 Object Moved -> Location: http://www.willtechnology.co.kr/eng/BlockingMSGew.htm
-Redirects to Korean filter:
-http://www.willtechnology.co.kr/eng/BlockingMSGew.htm
-
-UA filtering:
-HTTP/1.0 307 Temporary Redirect
-https://my.best.net.ua/login/blocked/
-
-netsweeper:
-HTTP/1.0 302 Moved
-Location: http://netsweeper1.gaggle.net:8080/webadmin/deny/index.php?dpid=53&dpruleid…
-
-Set-cookie: RT_SID_netsweeper.com.80=68a6f5c564a9db297e8feb2bff69d73f; path=/
-X-Cache: MISS from netsweeper.irishbroadband.ie
-X-Cache-Lookup: NONE from netsweeper.irishbroadband.ie:80
-Via: 1.0 netsweeper.irishbroadband.ie:80 (squid/2.6.STABLE21)
-
-Nokia:
-Via: 1.1 saec-nokiaq05ca (NetCache NetApp/6.0.7)
-Server: "Nokia"
-
-CensorNet:
-HTTP/1.0 401 Authorization Required
-WWW-Authenticate: Basic realm="CensorNet Administration Area"
-Server: CensorNet/4.0
-
-http://www.itcensor.com/censor
-
-
-Server: ZyWALL Content Filter
-
-Apache/1.3.34 (Unix) filter/1.0
-
-HTTP/1.0 502 infiniteproxyloop
-Via: 1.0 218.102.20.37 (McAfee Web Gateway 7.0.1.5.0.8505)
-
-
-Set-Cookie: McAfee-SCM-URL-Filter-Coach="dD4OzXciEcp8Ihf1dD4ZzHM5FMZ2PSvRTllOnSR4RZkqfkmEIGgb3hZlVJsEaFaXNmNS3mgsdZAxaVOKIGgrrSx4Rb8hekmNKn4g02VZToogf1SbIQcVz3Q8G/U="; Comment="McAfee URL access coaching"; Version=1; Path=/; Max-Age=900; expires=Sat, 18 Dec 2010 06:47:11 GMT;
-
-
-WWW-Authenticate: Basic realm="(Nancy McAfee)"
-
-
-No known fingerprints for:
-NetNanny
-WebChaver
-accountable2you.com
-http://www.shodanhq.com/?q=barracuda
-http://www.shodanhq.com/?q=untangle
-http://www.shodanhq.com/?q=Lightspeed
-
-Server: Smarthouse Lightspeed
-Server: Smarthouse Lightspeed2
-Server: Smarthouse Lightspeed 3
-
-Server: EdgePrism/3.8.1.1
-
-
-X-Cache: MISS from Barracuda-WebFilter.jmpsecurities.com
-Via: 1.0 Barracuda-WebFilter.jmpsecurities.com:8080 (http_scan/4.0.2.6.19)
-
-HTTP/1.0 302 Redirected by M86 Web Filter
-http://www.m86security.com/products/web_security/m86-web-filter.asp
-
-Location: http://10.1.61.37:81/cgi/block.cgi?URL=http://70.182.111.99/&IP=96.9.174.54…
-
-
-Via: 1.1 WEBSENSE
-
-
-Via: 1.1 192.168.1.251 (McAfee Web Gateway 7.1.0.1.0.10541)
-Via: 1.1 McAfeeSA3000.cbcl.lan
-
-
-X-Squid-Error: ERR_CONNECT_FAIL 111
-X-Cache: MISS from CudaWebFilter.poten.com
-
-http://212.50.251.82/ -iran squid
-
-HTTP/1.0 403 Forbidden ( Forefront TMG denied the specified Uniform Resource Locator (URL). )
-Via: 1.1 TMG
-
-
-Server: NetCache appliance (NetApp/6.0.2)
-
-
-Server: EdgePrism/3.8.1.1
-
-
-Server: Mikrotik HttpProxy
-
-
-Via: 1.1 TMG-04, 1.1 TMG-03
-
-
-X-Squid-Error: ERR_INVALID_REQ 0
-X-Cache: MISS from uspa150.trustedproxies.com
-X-Cache-Lookup: NONE from uspa150.trustedproxies.com:80
-
-http://www.shodanhq.com/host/view/93.125.95.177
-
-
-Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server
-http://203.229.245.100/ <- korea block page
-
-
-
-Server: Asroc Intelligent Security Filter 4.1.8
-
-
-
-Server: tinyproxy/1.8.2
-
-http://www.shodanhq.com/host/view/64.104.95.251
-
-
-
-Server: Asroc Intelligent Security Filter 4.1.8
-
-http://www.shodanhq.com/host/view/67.220.92.62
-
-
-Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server
-http://www.shodanhq.com/host/view/203.229.245.100
-Location: http://192.168.3.20/redirect.cgi?Time=05%2FJul%2F2011%3A21%3A29%3A32%20%2B0…
-
-
-http://www.shodanhq.com/?q=%22content+filter%22+-squid+-apache+-ZyWall&page=4
-http://www.shodanhq.com/host/view/72.5.92.51
-http://www.microsoft.com/forefront/threat-management-gateway/en/us/pricing-licensing.aspx
-
-http://meta.wikimedia.org/wiki/Talk:XFF_project
-
-% dig nats.epiccash.com
-
-; <<>> DiG 9.7.3 <<>> nats.epiccash.com
-;; global options: +cmd
-;; Got answer:
-;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 14920
-;; flags: qr rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 2, ADDITIONAL: 0
-
-;; QUESTION SECTION:
-;nats.epiccash.com. IN A
-
-;; ANSWER SECTION:
-nats.epiccash.com. 5 IN A 172.27.0.1
-
-;; AUTHORITY SECTION:
-epiccash.com. 5 IN NS ns0.example.net.
-epiccash.com. 5 IN NS ns1.example.net.
-
-;; Query time: 81 msec
-;; SERVER: 172.16.42.2#53(172.16.42.2)
-;; WHEN: Sat Jul 16 16:14:11 2011
-;; MSG SIZE rcvd: 98
-
-If we think it's squid, we can perhaps confirm it:
-echo -e "GET cache_object://localhost/info HTTP/1.0\r\n" | nc en.wikipedia.com 80
-Harvest urls from:
-http://urlblacklist.com/?sec=download
-
-https://secure.wikimedia.org/wikipedia/simple/wiki/User_talk:62.30.249.131
-
-mention WCCPv2 filters (http://www.cl.cam.ac.uk/~rnc1/talks/090528-uknof13.pdf)
-
-Cite a bunch of Richard's work:
-http://www.cl.cam.ac.uk/~rnc1/ignoring.pdf
-
-http://www.contentkeeper.com/products/web
-
-We should detect HTTP re-directs to rfc-1918 addresses; they're almost always captive portals.
-We should also detect HTTP MITM served from rfc-1918 addresses for the same reason.
-
-We should take a page from sshshuttle and run without touching the disk
-
-VIA Rail MITM's SSL In Ottawa:
-Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
-
-http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&password=al1852
-
-VIA Rail Via header (DONE):
-
-HTTP/1.0 301 Moved Permanently
-Location: http://www.google.com/
-Content-Type: text/html; charset=UTF-8
-Date: Sat, 23 Jul 2011 02:21:30 GMT
-Expires: Mon, 22 Aug 2011 02:21:30 GMT
-Cache-Control: public, max-age=2592000
-Server: gws
-Content-Length: 219
-X-XSS-Protection: 1; mode=block
-X-Cache: MISS from cache_server
-X-Cache-Lookup: MISS from cache_server:3128
-Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-Connection: close
-
-<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
-<TITLE>301 Moved</TITLE></HEAD><BODY>
-<H1>301 Moved</H1>
-The document has moved
-<A HREF="http://www.google.com/">here</A>.
-</BODY></HTML>
-
-
-blocked site (DONE):
-
-HTTP/1.0 302 Moved Temporarily
-Server: squid/2.6.STABLE21
-Date: Sat, 23 Jul 2011 02:22:17 GMT
-Content-Length: 0
-Location: http://10.66.66.66/denied.html
-
-invalid request response:
-
-$ nc 8.8.8.8 80 (DONE)
-hjdashjkdsahjkdsa
-HTTP/1.0 400 Bad Request
-Server: squid/2.6.STABLE21
-Date: Sat, 23 Jul 2011 02:22:44 GMT
-Content-Type: text/html
-Content-Length: 1178
-Expires: Sat, 23 Jul 2011 02:22:44 GMT
-X-Squid-Error: ERR_INVALID_REQ 0
-X-Cache: MISS from cache_server
-X-Cache-Lookup: NONE from cache_server:3128
-Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-Proxy-Connection: close
-
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-</HEAD><BODY>
-<H1>ERROR</H1>
-<H2>The requested URL could not be retrieved</H2>
-<HR noshade size="1px">
-<P>
-While trying to process the request:
-<PRE>
-hjdashjkdsahjkdsa
-
-</PRE>
-<P>
-The following error was encountered:
-<UL>
-<LI>
-<STRONG>
-Invalid Request
-</STRONG>
-</UL>
-
-<P>
-Some aspect of the HTTP Request is invalid. Possible problems:
-<UL>
-<LI>Missing or unknown request method
-<LI>Missing URL
-<LI>Missing HTTP Identifier (HTTP/1.0)
-<LI>Request is too large
-<LI>Content-Length missing for POST or PUT requests
-<LI>Illegal character in hostname; underscores are not allowed
-</UL>
-<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-
-<BR clear="all">
-<HR noshade size="1px">
-<ADDRESS>
-Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
-</ADDRESS>
-</BODY></HTML>
-
-nc 10.66.66.66 80
-GET cache_object://localhost/info HTTP/1.0
-HTTP/1.0 403 Forbidden
-Server: squid/2.6.STABLE21
-Date: Sat, 23 Jul 2011 02:25:56 GMT
-Content-Type: text/html
-Content-Length: 1061
-Expires: Sat, 23 Jul 2011 02:25:56 GMT
-X-Squid-Error: ERR_ACCESS_DENIED 0
-X-Cache: MISS from cache_server
-X-Cache-Lookup: NONE from cache_server:3128
-Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-Proxy-Connection: close
-
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-</HEAD><BODY>
-<H1>ERROR</H1>
-<H2>The requested URL could not be retrieved</H2>
-<HR noshade size="1px">
-<P>
-While trying to retrieve the URL:
-<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
-<P>
-The following error was encountered:
-<UL>
-<LI>
-<STRONG>
-Access Denied.
-</STRONG>
-<P>
-Access control configuration prevents your request from
-being allowed at this time. Please contact your service provider if
-you feel this is incorrect.
-</UL>
-<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-
-
-<BR clear="all">
-<HR noshade size="1px">
-<ADDRESS>
-Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
-</ADDRESS>
-</BODY></HTML>
-
-
diff --git a/old-to-be-ported-code/old-api/.ropeproject/config.py b/old-to-be-ported-code/old-api/.ropeproject/config.py
deleted file mode 100644
index ffebcd4..0000000
--- a/old-to-be-ported-code/old-api/.ropeproject/config.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# The default ``config.py``
-
-
-def set_prefs(prefs):
- """This function is called before opening the project"""
-
- # Specify which files and folders to ignore in the project.
- # Changes to ignored resources are not added to the history and
- # VCSs. Also they are not returned in `Project.get_files()`.
- # Note that ``?`` and ``*`` match all characters but slashes.
- # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
- # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
- # '.svn': matches 'pkg/.svn' and all of its children
- # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
- # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
- prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
- '.hg', '.svn', '_svn', '.git']
-
- # Specifies which files should be considered python files. It is
- # useful when you have scripts inside your project. Only files
- # ending with ``.py`` are considered to be python files by
- # default.
- #prefs['python_files'] = ['*.py']
-
- # Custom source folders: By default rope searches the project
- # for finding source folders (folders that should be searched
- # for finding modules). You can add paths to that list. Note
- # that rope guesses project source folders correctly most of the
- # time; use this if you have any problems.
- # The folders should be relative to project root and use '/' for
- # separating folders regardless of the platform rope is running on.
- # 'src/my_source_folder' for instance.
- #prefs.add('source_folders', 'src')
-
- # You can extend python path for looking up modules
- #prefs.add('python_path', '~/python/')
-
- # Should rope save object information or not.
- prefs['save_objectdb'] = True
- prefs['compress_objectdb'] = False
-
- # If `True`, rope analyzes each module when it is being saved.
- prefs['automatic_soa'] = True
- # The depth of calls to follow in static object analysis
- prefs['soa_followed_calls'] = 0
-
- # If `False` when running modules or unit tests "dynamic object
- # analysis" is turned off. This makes them much faster.
- prefs['perform_doa'] = True
-
- # Rope can check the validity of its object DB when running.
- prefs['validate_objectdb'] = True
-
- # How many undos to hold?
- prefs['max_history_items'] = 32
-
- # Shows whether to save history across sessions.
- prefs['save_history'] = True
- prefs['compress_history'] = False
-
- # Set the number spaces used for indenting. According to
- # :PEP:`8`, it is best to use 4 spaces. Since most of rope's
- # unit-tests use 4 spaces it is more reliable, too.
- prefs['indent_size'] = 4
-
- # Builtin and c-extension modules that are allowed to be imported
- # and inspected by rope.
- prefs['extension_modules'] = []
-
- # Add all standard c-extensions to extension_modules list.
- prefs['import_dynload_stdmods'] = True
-
- # If `True` modules with syntax errors are considered to be empty.
- # The default value is `False`; When `False` syntax errors raise
- # `rope.base.exceptions.ModuleSyntaxError` exception.
- prefs['ignore_syntax_errors'] = False
-
- # If `True`, rope ignores unresolvable imports. Otherwise, they
- # appear in the importing namespace.
- prefs['ignore_bad_imports'] = False
-
-
-def project_opened(project):
- """This function is called after opening the project"""
- # Do whatever you like here!
diff --git a/old-to-be-ported-code/old-api/.ropeproject/globalnames b/old-to-be-ported-code/old-api/.ropeproject/globalnames
deleted file mode 100644
index 2877ef5..0000000
Binary files a/old-to-be-ported-code/old-api/.ropeproject/globalnames and /dev/null differ
diff --git a/old-to-be-ported-code/old-api/.ropeproject/history b/old-to-be-ported-code/old-api/.ropeproject/history
deleted file mode 100644
index fcd9c96..0000000
--- a/old-to-be-ported-code/old-api/.ropeproject/history
+++ /dev/null
@@ -1 +0,0 @@
-]q(]q]qe.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/.ropeproject/objectdb b/old-to-be-ported-code/old-api/.ropeproject/objectdb
deleted file mode 100644
index f276839..0000000
Binary files a/old-to-be-ported-code/old-api/.ropeproject/objectdb and /dev/null differ
diff --git a/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt b/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
deleted file mode 100644
index f4c0084..0000000
--- a/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-7/10/2012
-
-All new tests will be moved to the directory /nettests/.
-
-Tests that are in this directory are either here for historical reasons or have
-not yet been properly tested and fully supporting the new API.
-
-A.
diff --git a/old-to-be-ported-code/old-api/chinatrigger.py b/old-to-be-ported-code/old-api/chinatrigger.py
deleted file mode 100644
index cf4bcb3..0000000
--- a/old-to-be-ported-code/old-api/chinatrigger.py
+++ /dev/null
@@ -1,140 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, defer
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.scapyproto import ScapyTest
-
-from ooni.lib.txscapy import txsr, txsend
-
-class scapyArgs(usage.Options):
- optParameters = [['dst', 'd', None, 'Specify the target address'],
- ['port', 'p', None, 'Specify the target port'],
- ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
- ]
-
-class ChinaTriggerTest(ScapyTest):
- """
- This test is a OONI based implementation of the C tool written
- by Philipp Winter to engage chinese probes in active scanning.
-
- Example of running it:
- ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
- """
- implements(IPlugin, ITest)
-
- shortName = "chinatrigger"
- description = "Triggers the chinese probes into scanning"
- requirements = ['root']
- options = scapyArgs
- blocking = False
-
- receive = True
- pcapfile = 'example_scapy.pcap'
- timeout = 5
-
- def initialize(self, reactor=None):
- if not self.reactor:
- from twisted.internet import reactor
- self.reactor = reactor
-
- @staticmethod
- def set_random_servername(pkt):
- ret = pkt[:121]
- for i in range(16):
- ret += random.choice(string.ascii_lowercase)
- ret += pkt[121+16:]
- return ret
-
- @staticmethod
- def set_random_time(pkt):
- ret = pkt[:11]
- ret += struct.pack('!I', int(time.time()))
- ret += pkt[11+4:]
- return ret
-
- @staticmethod
- def set_random_field(pkt):
- ret = pkt[:15]
- for i in range(28):
- ret += chr(random.randint(0, 256))
- ret += pkt[15+28:]
- return ret
-
- @staticmethod
- def mutate(pkt, idx):
- """
- Slightly changed mutate function.
- """
- ret = pkt[:idx-1]
- mutation = chr(random.randint(0, 256))
- while mutation == pkt[idx]:
- mutation = chr(random.randint(0, 256))
- ret += mutation
- ret += pkt[idx:]
- return ret
-
- @staticmethod
- def set_all_random_fields(pkt):
- pkt = ChinaTriggerTest.set_random_servername(pkt)
- pkt = ChinaTriggerTest.set_random_time(pkt)
- pkt = ChinaTriggerTest.set_random_field(pkt)
- return pkt
-
- def build_packets(self, *args, **kw):
- """
- Override this method to build scapy packets.
- """
- from scapy.all import IP, TCP
- pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
- "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
- "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
- "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
- "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
- "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
- "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
- "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
- "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
- "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
- "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
- "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
- "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
- "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
- "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
- "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
- "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
- "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
- "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
- "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
- "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
- "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
- "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
- "\x00\x00"
-
- pkt = ChinaTriggerTest.set_all_random_fields(pkt)
- pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
- for x in range(len(pkt)):
- mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
- pkts.append(mutation)
- return pkts
-
- def load_assets(self):
- if self.local_options:
- self.dst = self.local_options['dst']
- self.port = int(self.local_options['port'])
- if self.local_options['pcap']:
- self.pcapfile = self.local_options['pcap']
- if not self.port or not self.dst:
- pass
-
- return {}
-
-#chinatrigger = ChinaTriggerTest(None, None, None)
-
diff --git a/old-to-be-ported-code/old-api/daphn3.py b/old-to-be-ported-code/old-api/daphn3.py
deleted file mode 100644
index bf4d60d..0000000
--- a/old-to-be-ported-code/old-api/daphn3.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, endpoints
-
-from ooni.plugoo import reports
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
- def connectionMade(self):
- self.next_state()
-
-class Daphn3ClientFactory(protocol.ClientFactory):
- protocol = Daphn3ClientProtocol
- mutator = None
- steps = None
- test = None
-
- def buildProtocol(self, addr):
- p = self.protocol()
- p.factory = self
- p.test = self.test
-
- if self.steps:
- p.steps = self.steps
-
- if not self.mutator:
- self.mutator = daphn3.Mutator(p.steps)
-
- else:
- print "Moving on to next mutation"
- self.mutator.next()
-
- p.mutator = self.mutator
- p.current_state = self.mutator.state()
- return p
-
- def clientConnectionFailed(self, reason):
- print "We failed connecting the the OONIB"
- print "Cannot perform test. Perhaps it got blocked?"
- print "Please report this to tor-assistants(a)torproject.org"
- self.test.result['error'] = ('Failed in connecting to OONIB', reason)
- self.test.end(d)
-
- def clientConnectionLost(self, reason):
- print "Connection Lost."
-
-class daphn3Args(usage.Options):
- optParameters = [['pcap', 'f', None,
- 'PCAP to read for generating the YAML output'],
-
- ['output', 'o', 'daphn3.yaml',
- 'What file should be written'],
-
- ['yaml', 'y', None,
- 'The input file to the test'],
-
- ['host', 'h', None, 'Target Hostname'],
- ['port', 'p', None, 'Target port number'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-class daphn3Test(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "daphn3"
- description = "daphn3"
- requirements = None
- options = daphn3Args
- blocking = False
-
- local_options = None
-
- steps = None
-
- def initialize(self):
- if not self.local_options:
- self.end()
- return
-
- self.factory = Daphn3ClientFactory()
- self.factory.test = self
-
- if self.local_options['pcap']:
- self.tool = True
-
- elif self.local_options['yaml']:
- self.steps = daphn3.read_yaml(self.local_options['yaml'])
-
- else:
- log.msg("Not enough inputs specified to the test")
- self.end()
-
- def runTool(self):
- import yaml
- pcap = daphn3.read_pcap(self.local_options['pcap'])
- f = open(self.local_options['output'], 'w')
- f.write(yaml.dump(pcap))
- f.close()
-
- def control(self, exp_res, args):
- try:
- mutation = self.factory.mutator.get(0)
- self.result['censored'] = False
- except:
- mutation = None
-
- return {'mutation_number': args['mutation'],
- 'value': mutation}
-
- def _failure(self, *argc, **kw):
- self.result['censored'] = True
- self.result['error'] = ('Failed in connecting', (argc, kw))
- self.end()
-
- def experiment(self, args):
- log.msg("Doing mutation %s" % args['mutation'])
- self.factory.steps = self.steps
- host = self.local_options['host']
- port = int(self.local_options['port'])
- log.msg("Connecting to %s:%s" % (host, port))
-
- if self.ended:
- return
-
- endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
- d = endpoint.connect(self.factory)
- d.addErrback(self._failure)
- return d
-
- def load_assets(self):
- if not self.local_options:
- return {}
- if not self.steps:
- print "Error: No assets!"
- self.end()
- return {}
- mutations = 0
- for x in self.steps:
- mutations += len(x['data'])
- return {'mutation': range(mutations)}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#daphn3test = daphn3Test(None, None, None)
diff --git a/old-to-be-ported-code/old-api/domclass.py b/old-to-be-ported-code/old-api/domclass.py
deleted file mode 100644
index 3080c40..0000000
--- a/old-to-be-ported-code/old-api/domclass.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/usr/bin/env python
-#-*- encoding: utf-8 -*-
-#
-# domclass
-# ********
-#
-# :copyright: (c) 2012 by Arturo Filastò
-# :license: see LICENSE for more details.
-#
-# how this works
-# --------------
-#
-# This classifier uses the DOM structure of a website to determine how similar
-# the two sites are.
-# The procedure we use is the following:
-# * First we parse all the DOM tree of the web page and we build a list of
-# TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
-# (html, a), (a, b), (html, c)).
-#
-# * We then use this information to build a matrix (M) where m[i][j] = P(of
-# transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
-# Note: M is a square matrix that is number_of_tags wide.
-#
-# * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
-#
-# * The corelation between page A and B is given via this formula:
-# correlation = dot_product(e_A, e_B), where e_A and e_B are
-# resepectively the eigenvalues for the probability matrix A and the
-# probability matrix B.
-#
-
-try:
- import numpy
-except:
- print "Error numpy not installed!"
-
-import yaml
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.http import HTTPTest
-
-class domclassArgs(usage.Options):
- optParameters = [['output', 'o', None, 'Output to write'],
- ['file', 'f', None, 'Corpus file'],
- ['fileb', 'b', None, 'Corpus file'],
- ['urls', 'u', None, 'URL List'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-# All HTML4 tags
-# XXX add link to W3C page where these came from
-alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
- 'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
- 'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
- 'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
- 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
- 'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
- 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
- 'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
- 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
- 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
-
-# Reduced subset of only the most common tags
-commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
- 'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
- 'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
- 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
- 'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
- 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
- 'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
- 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
- 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
-
-# The tags we are intested in using for our analysis
-thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
- 'H3', 'H4', 'IFRAME ', 'INPUT',
- 'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
- 'STYLE', 'TR']
-
-def compute_probability_matrix(dataset):
- """
- Compute the probability matrix based on the input dataset.
-
- :dataset: an array of pairs representing the parent child relationships.
- """
- import itertools
- ret = {}
- matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
-
- for data in dataset:
- x = data[0].upper()
- y = data[1].upper()
- try:
- x = thetags.index(x)
- except:
- x = len(thetags)
-
- try:
- y = thetags.index(y)
- except:
- y = len(thetags)
-
- matrix[x,y] += 1
-
- for x in xrange(len(thetags) + 1):
- possibilities = 0
- for y in matrix[x]:
- possibilities += y
-
- for i in xrange(len(matrix[x])):
- if possibilities != 0:
- matrix[x][i] = matrix[x][i]/possibilities
-
- return matrix
-
-def compute_eigenvalues(matrix):
- """
- Returns the eigenvalues of the supplied square matrix.
-
- :matrix: must be a square matrix and diagonalizable.
- """
- return numpy.linalg.eigvals(matrix)
-
-def readDOM(content=None, filename=None):
- """
- Parses the DOM of the HTML page and returns an array of parent, child
- pairs.
-
- :content: the content of the HTML page to be read.
-
- :filename: the filename to be read from for getting the content of the
- page.
- """
- from bs4 import BeautifulSoup
-
- if filename:
- f = open(filename)
- content = ''.join(f.readlines())
- f.close()
-
- dom = BeautifulSoup(content)
- couples = []
- for x in dom.findAll():
- couples.append((str(x.parent.name), str(x.name)))
-
- return couples
-
-class domclassTest(HTTPTest):
- implements(IPlugin, ITest)
-
- shortName = "domclass"
- description = "domclass"
- requirements = None
- options = domclassArgs
- blocking = False
-
- follow_redirects = True
- #tool = True
-
- def runTool(self):
- site_a = readDOM(filename=self.local_options['file'])
- site_b = readDOM(filename=self.local_options['fileb'])
- a = {}
- a['matrix'] = compute_probability_matrix(site_a)
- a['eigen'] = compute_eigenvalues(a['matrix'])
-
- self.result['eigenvalues'] = a['eigen']
- b = {}
- b['matrix'] = compute_probability_matrix(site_b)
- b['eigen'] = compute_eigenvalues(b['matrix'])
-
- #print "A: %s" % a
- #print "B: %s" % b
- correlation = numpy.vdot(a['eigen'],b['eigen'])
- correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
- correlation = (correlation + 1)/2
- print "Corelation: %s" % correlation
- self.end()
- return a
-
- def processResponseBody(self, data):
- site_a = readDOM(data)
- #site_b = readDOM(self.local_options['fileb'])
- a = {}
- a['matrix'] = compute_probability_matrix(site_a)
- a['eigen'] = compute_eigenvalues(a['matrix'])
-
-
- if len(data) == 0:
- self.result['eigenvalues'] = None
- self.result['matrix'] = None
- else:
- self.result['eigenvalues'] = a['eigen']
- #self.result['matrix'] = a['matrix']
- #self.result['content'] = data[:200]
- #b = compute_matrix(site_b)
- print "A: %s" % a
- return a['eigen']
-
- def load_assets(self):
- if self.local_options:
- if self.local_options['file']:
- self.tool = True
- return {}
- elif self.local_options['urls']:
- return {'url': Asset(self.local_options['urls'])}
- else:
- self.end()
- return {}
- else:
- return {}
-
-#domclass = domclassTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/dropin.cache b/old-to-be-ported-code/old-api/dropin.cache
deleted file mode 100755
index 65c2187..0000000
--- a/old-to-be-ported-code/old-api/dropin.cache
+++ /dev/null
@@ -1,243 +0,0 @@
-(dp1
-S'tcpconnect'
-p2
-ccopy_reg
-_reconstructor
-p3
-(ctwisted.plugin
-CachedDropin
-p4
-c__builtin__
-object
-p5
-NtRp6
-(dp7
-S'moduleName'
-p8
-S'ooni.plugins.tcpconnect'
-p9
-sS'description'
-p10
-S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
-p11
-sS'plugins'
-p12
-(lp13
-g3
-(ctwisted.plugin
-CachedPlugin
-p14
-g5
-NtRp15
-(dp16
-S'provided'
-p17
-(lp18
-ctwisted.plugin
-IPlugin
-p19
-acooni.plugoo.interface
-ITest
-p20
-asS'dropin'
-p21
-g6
-sS'name'
-p22
-S'tcpconnect'
-p23
-sg10
-NsbasbsS'domclass'
-p24
-g3
-(g4
-g5
-NtRp25
-(dp26
-g8
-S'ooni.plugins.domclass'
-p27
-sg10
-Nsg12
-(lp28
-g3
-(g14
-g5
-NtRp29
-(dp30
-g17
-(lp31
-g19
-ag20
-asg21
-g25
-sg22
-S'domclass'
-p32
-sg10
-NsbasbsS'bridget'
-p33
-g3
-(g4
-g5
-NtRp34
-(dp35
-g8
-S'ooni.plugins.bridget'
-p36
-sg10
-Nsg12
-(lp37
-g3
-(g14
-g5
-NtRp38
-(dp39
-g17
-(lp40
-g19
-ag20
-asg21
-g34
-sg22
-S'bridget'
-p41
-sg10
-S"\n XXX fill me in\n\n :ivar config:\n An :class:`ooni.lib.txtorcon.TorConfig` instance.\n :ivar relays:\n A list of all provided relays to test.\n :ivar bridges:\n A list of all provided bridges to test.\n :ivar socks_port:\n Integer for Tor's SocksPort.\n :ivar control_port:\n Integer for Tor's ControlPort.\n :ivar transport:\n String defining the Tor's ClientTransportPlugin, for testing \n a bridge's pluggable transport functionality.\n :ivar tor_binary:\n Path to the Tor binary to use, e.g. '/usr/sbin/tor'\n "
-p42
-sbasbsS'daphn3'
-p43
-g3
-(g4
-g5
-NtRp44
-(dp45
-g8
-S'plugins.daphn3'
-p46
-sg10
-S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
-p47
-sg12
-(lp48
-g3
-(g14
-g5
-NtRp49
-(dp50
-g17
-(lp51
-g19
-ag20
-asg21
-g44
-sg22
-S'daphn3test'
-p52
-sg10
-NsbasbsS'httpt'
-p53
-g3
-(g4
-g5
-NtRp54
-(dp55
-g8
-S'ooni.plugins.httpt'
-p56
-sg10
-S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
-p57
-sg12
-(lp58
-sbsS'chinatrigger'
-p59
-g3
-(g4
-g5
-NtRp60
-(dp61
-g8
-S'plugins.chinatrigger'
-p62
-sg10
-Nsg12
-(lp63
-g3
-(g14
-g5
-NtRp64
-(dp65
-g17
-(lp66
-g19
-ag20
-asg21
-g60
-sg22
-S'chinatrigger'
-p67
-sg10
-S'\n This test is a OONI based implementation of the C tool written\n by Philipp Winter to engage chinese probes in active scanning.\n\n Example of running it:\n ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap\n '
-p68
-sbasbsS'dnstamper'
-p69
-g3
-(g4
-g5
-NtRp70
-(dp71
-g8
-S'ooni.plugins.dnstamper'
-p72
-sg10
-S'\n dnstamper\n *********\n\n This test resolves DNS for a list of domain names, one per line, in the\n file specified in the ooni-config under the setting "dns_experiment". If\n the file is top-1m.txt, the test will be run using Amazon\'s list of top\n one million domains. The experimental dns servers to query should\n be specified one per line in assets/dns_servers.txt.\n\n The test reports censorship if the cardinality of the intersection of\n the query result set from the control server and the query result set\n from the experimental server is zero, which is to say, if the two sets\n have no matching results whatsoever.\n\n NOTE: This test frequently results in false positives due to GeoIP-based\n load balancing on major global sites such as google, facebook, and\n youtube, etc.\n\n :author: Isis Lovecruft, Arturo Filast\xc3\xb2\n :license: see LICENSE for more details\n\n TODO:\n * Finish porting to twisted\n
* Finish the client.Resolver() subclass and test it\n * Use the DNS tests from captiveportal\n * Use plugoo/reports.py for final data\n'
-p73
-sg12
-(lp74
-g3
-(g14
-g5
-NtRp75
-(dp76
-g17
-(lp77
-g19
-ag20
-asg21
-g70
-sg22
-S'dnstamper'
-p78
-sg10
-S'\n XXX fill me in\n '
-p79
-sbasbsS'blocking'
-p80
-g3
-(g4
-g5
-NtRp81
-(dp82
-g8
-S'plugins.blocking'
-p83
-sg10
-Nsg12
-(lp84
-g3
-(g14
-g5
-NtRp85
-(dp86
-g17
-(lp87
-g19
-ag20
-asg21
-g81
-sg22
-S'blocking'
-p88
-sg10
-Nsbasbs.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/httpt.py b/old-to-be-ported-code/old-api/httpt.py
deleted file mode 100644
index 358f1ea..0000000
--- a/old-to-be-ported-code/old-api/httpt.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import http
-from ooni.utils import log
-
-class httptArgs(usage.Options):
- optParameters = [['urls', 'f', None, 'Urls file'],
- ['url', 'u', 'http://torproject.org/', 'Test single site'],
- ['resume', 'r', 0, 'Resume at this index'],
- ['rules', 'y', None, 'Specify the redirect rules file']]
-
-class httptTest(http.HTTPTest):
- implements(IPlugin, ITest)
-
- shortName = "httpt"
- description = "httpt"
- requirements = None
- options = httptArgs
- blocking = False
-
-
- def testPattern(self, value, pattern, type):
- if type == 'eq':
- return value == pattern
- elif type == 're':
- import re
- if re.match(pattern, value):
- return True
- else:
- return False
- else:
- return None
-
- def testPatterns(self, patterns, location):
- test_result = False
-
- if type(patterns) == list:
- for pattern in patterns:
- test_result |= self.testPattern(location, pattern['value'], pattern['type'])
- else:
- test_result |= self.testPattern(location, patterns['value'], patterns['type'])
-
- return test_result
-
- def testRules(self, rules, location):
- result = {}
- blocked = False
- for rule, value in rules.items():
- current_rule = {}
- current_rule['name'] = value['name']
- current_rule['patterns'] = value['patterns']
- current_rule['test'] = self.testPatterns(value['patterns'], location)
- blocked |= current_rule['test']
- result[rule] = current_rule
- result['blocked'] = blocked
- return result
-
- def processRedirect(self, location):
- self.result['redirect'] = None
- try:
- rules_file = self.local_options['rules']
- import yaml
- rules = yaml.load(open(rules_file))
- log.msg("Testing rules %s" % rules)
- redirect = self.testRules(rules, location)
- self.result['redirect'] = redirect
- except TypeError:
- log.msg("No rules file. Got a redirect, but nothing to do.")
-
-
- def control(self, experiment_result, args):
- print self.response
- print self.request
- # What you return here ends up inside of the report.
- log.msg("Running control")
- return {}
-
- def load_assets(self):
- if self.local_options and self.local_options['urls']:
- return {'url': Asset(self.local_options['urls'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#httpt = httptTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpconnect.py b/old-to-be-ported-code/old-api/tcpconnect.py
deleted file mode 100644
index 7758a9e..0000000
--- a/old-to-be-ported-code/old-api/tcpconnect.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from ooni.plugoo.interface import ITest
-from ooni.plugoo.tests import OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-
-class tcpconnectArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-class tcpconnectTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "tcpconnect"
- description = "tcpconnect"
- requirements = None
- options = tcpconnectArgs
- blocking = False
-
- def experiment(self, args):
- try:
- host, port = args['asset'].split(':')
- except:
- raise Exception("Error in parsing asset. Wrong format?")
- class DummyFactory(Factory):
- def buildProtocol(self, addr):
- return Protocol()
-
- def gotProtocol(p):
- p.transport.loseConnection()
- log.msg("Got a connection!")
- log.msg(str(p))
- return {'result': True, 'target': [host, port]}
-
- def gotError(err):
- log.msg("Had error :(")
- log.msg(err)
- return {'result': False, 'target': [host, port]}
-
- # What you return here gets handed as input to control
- point = TCP4ClientEndpoint(self.reactor, host, int(port))
- d = point.connect(DummyFactory())
- d.addCallback(gotProtocol)
- d.addErrback(gotError)
- return d
-
- def load_assets(self):
- if self.local_options:
- return {'asset': Asset(self.local_options['asset'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#tcpconnect = tcpconnectTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpscan.py b/old-to-be-ported-code/old-api/tcpscan.py
deleted file mode 100644
index b371c88..0000000
--- a/old-to-be-ported-code/old-api/tcpscan.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
- TCP Port Scanner
- ****************
-
- Does a TCP connect scan on the IP:port pairs.
-
-"""
-import os
-from gevent import socket
-from datetime import datetime
-import socks
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-__plugoo__ = "TCP Port Scanner"
-__desc__ = "This a test template to be used to build your own tests"
-
-class TCPScanAsset(Asset):
- """
- This is the asset that should be used by the Test. It will
- contain all the code responsible for parsing the asset file
- and should be passed on instantiation to the test.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-
-class TCPScan(Test):
- """
- The main Test class
- """
-
- def experiment(self, *a, **kw):
- """
- Fill this up with the tasks that should be performed
- on the "dirty" network and should be compared with the
- control.
- """
- addr = kw['data']
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- res = False
- try:
- self.logger.debug('Doing a connection to %s' % addr)
- s.connect((addr.split(':')[0], int(addr.split(':')[1])))
- res = True
- except socket.error, msg:
- self.logger.debug('Connection failed to %s: %s' % (addr, msg))
-
- finally:
- s.close()
-
- return {'Time': datetime.now(),
- 'Address': addr,
- 'Status': res}
-
- def control(self):
- """
- Fill this up with the control related code.
- """
- return True
-
-def run(ooni, asset=None):
- """
- This is the function that will be called by OONI
- and it is responsible for instantiating and passing
- the arguments to the Test class.
- """
- config = ooni.config
-
- # This the assets array to be passed to the run function of
- # the test
- if asset:
- assets = [TCPScanAsset(asset)]
- else:
- assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
- "tcpscan.txt"))]
-
- # Instantiate the Test
- thetest = TCPScan(ooni)
- ooni.logger.info("starting TCP Scan...")
- # Run the test with argument assets
- thetest.run(assets)
- ooni.logger.info("finished.")
diff --git a/old-to-be-ported-code/spec/proxooni-spec.txt b/old-to-be-ported-code/spec/proxooni-spec.txt
deleted file mode 100644
index 7cc476f..0000000
--- a/old-to-be-ported-code/spec/proxooni-spec.txt
+++ /dev/null
@@ -1,65 +0,0 @@
-
- Proxyooni specification
- version 0.0
- Jacob Appelbaum
-
-0. Preface
-
- This document describes a new proxy that is required to support ooni-probe.
-
-1. Overview
-
- There is no common proxy type that thwarts even the most basic traffic
- monitoring. The Proxyooni specification aims to provide a proxy that is
- encrypted by default, optionally authenticated, and will provide a way to run
- specific ooni-probe tests natively on the system where the proxy is running.
-
-2. Implementation
-
- Proxyooni may be written in any language, the reference implementation will be
- implemented in Python. The program shall be called ooni-proxy and it will handle
- running as a privileged user or an unprivileged user on supported systems. We
- aim to support ooni-proxy on Debian Gnu/Linux as the reference platform.
-
-2.1 Connections
-
- When ooni-proxy runs, it should open a single port and it will allow TLS 1.0
- clients to connect with a cipher suite that provides perfect forward secrecy.
-
-2.2 Certificates
-
- ooni-proxy should use a certificate if supplied or dynamically generate a
- certificate on startup; any connecting client should bootstrap trust with a
- TOFU model, a client may ignore the
-
-2.3 Authentication
-
- ooni-proxy should provide open access by default with no authentication.
- It should support TLS-PSK[0] if authentication is desired. Key distribution is
- explictly an out of scope problem.
-
-3.0 Services offered
-
- Post authentication, a remote client should treat ooni-proxy as a SOCKS4A[1]
- proxy. It should be possible to chain as many Proxyooni proxies as desired.
-
-3.1 Additional services offered
-
- ooni-proxy should allow for the sending of raw socket data - this is currently
- left unspecified. This should be specified in the next revision of the
- specification.
-
-3.2 Advanced meta-services
-
- It may be desired to load code on the ooni-proxy from a client with newer
- tests. This should be specified in the next revision of the specification.
-
-4. Security Concerns
-
- It is probably not a good idea to run ooni-proxy unless you have permission to
- do so. Consider your network context carefully; if it is dangerous to run a test
- ensure that you do not run the test.
-
-[0] http://en.wikipedia.org/wiki/TLS-PSK
-[1] http://en.wikipedia.org/wiki/SOCKS#SOCKS_4a
-
diff --git a/old-to-be-ported-code/very-old/TODO.plgoons b/old-to-be-ported-code/very-old/TODO.plgoons
deleted file mode 100644
index ace2a10..0000000
--- a/old-to-be-ported-code/very-old/TODO.plgoons
+++ /dev/null
@@ -1,79 +0,0 @@
-We should implement the following as plugoons:
-
-dns_plgoo.py - Various DNS checks
-
-As a start - we should perform a known good check against a name or list of
-names. As input, we should take an ip address, a name or a list of names for
-testing; we also take dns servers for experiment or control data. For output we
-emit UDP or TCP packets - we should support proxying these requests when
-possible as is the case with TCP but probably not with UDP for certain DNS
-request types.
-
-http_plgoo.py - Various HTTP checks
-
-We should compare two pages and see if we have identical properties.
-At the very least, we should print the important differences - perhaps
-with a diff like output? We should look for fingerprints in URLS that are
-returned. We should detect 302 re-direction.
-
-As input, we should take an ip address, a name or a list of names for testing;
-we also take a list of headers such as random user agent strings and so on.
-We should emit TCP packets and ensure that we do not leak DNS for connections
-that we expect to proxy to a remote network.
-
-latency_plgoo.py - Measure latency for a host or a list of hosts
-
-As input, we should take an ip address, a name or a list of names for testing;
-We should measure the mean latency from the ooni-probe to the host with various
-traceroute tests. We should also measure the latency between the ooni-probe and
-a given server for any other protocol that is request and response oriented;
-HTTP latency may be calculated by simply tracking the delta between requests
-and responses.
-
-tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
-
-tcptrace_plgoo.py should allow for both stray and in-connection traceroute
-modes.
-
-udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
-- it may also be nice to simply make a random A record request in a DNS packet
-and use it as the payload for a UDP traceroute.
-
-reversetrace_plgoo.py should give a remote host the client's IP and return the
-output of a traceroute to that IP from the remote host. It will need a remote
-component if run against a web server. It would not need a remote component if
-run against route-views - we can simply telnet over Tor and ask it to trace to
-our detected client IP.
-
-keyword_plgoo.py should take a keyword or a list of keywords for use as a
-payload in a varity of protocols. This should be protocol aware - dns keyword
-filtering requires a sniffer to catch stray packets after the censor wins the
-race. HTTP payloads in open connections may be similar and in practice, we'll
-have to find tune it.
-
-icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
-servers. They have dozens of tests and to implement this plgoo, we'll need to
-add many things to ooni. More details here:
-http://netalyzr.icsi.berkeley.edu/faq.html
-http://netalyzr.icsi.berkeley.edu/json/id=example-session
-
-HTML output:
-http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-JSON output:
-http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-Netalyzer log:
-http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
-
-sniffer_plgoo.py - We need a generic method for capturing packets during a full
-run - this may be better as a core ooni-probe feature but we should implement
-packet capture in a plugin if it is done no where else.
-
-nmap_plgoo.py - We should take a list of hosts and run nmap against each of
-these hosts; many hosts are collected during testing and they should be scanned
-with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
-more reasonable.
-
diff --git a/old-to-be-ported-code/very-old/TO_BE_PORTED b/old-to-be-ported-code/very-old/TO_BE_PORTED
deleted file mode 100644
index 49ce5e0..0000000
--- a/old-to-be-ported-code/very-old/TO_BE_PORTED
+++ /dev/null
@@ -1,14 +0,0 @@
-
-The tests in this directory are very old, and have neither been ported to
-Twisted, nor to the new twisted.trial API framework. Although, they are not
-old in the sense of the *seriously old* OONI code which was written two years
-ago.
-
-These tests should be updated at least to use Twisted.
-
-If you want to hack on something care free, feel free to mess with these files
-because it would be difficult to not improve on them.
-
-<(A)3
-isis
-0x2cdb8b35
diff --git a/old-to-be-ported-code/very-old/ooni-probe.diff b/old-to-be-ported-code/very-old/ooni-probe.diff
deleted file mode 100644
index fc61d3f..0000000
--- a/old-to-be-ported-code/very-old/ooni-probe.diff
+++ /dev/null
@@ -1,358 +0,0 @@
-diff --git a/TODO b/TODO
-index c2e19af..51fa559 100644
---- a/TODO
-+++ b/TODO
-@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
- Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
-
- http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&pas…
-+
-+VIA Rail Via header:
-+
-+HTTP/1.0 301 Moved Permanently
-+Location: http://www.google.com/
-+Content-Type: text/html; charset=UTF-8
-+Date: Sat, 23 Jul 2011 02:21:30 GMT
-+Expires: Mon, 22 Aug 2011 02:21:30 GMT
-+Cache-Control: public, max-age=2592000
-+Server: gws
-+Content-Length: 219
-+X-XSS-Protection: 1; mode=block
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: MISS from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Connection: close
-+
-+<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
-+<TITLE>301 Moved</TITLE></HEAD><BODY>
-+<H1>301 Moved</H1>
-+The document has moved
-+<A HREF="http://www.google.com/">here</A>.
-+</BODY></HTML>
-+
-+
-+blocked site:
-+
-+HTTP/1.0 302 Moved Temporarily
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:17 GMT
-+Content-Length: 0
-+Location: http://10.66.66.66/denied.html
-+
-+invalid request response:
-+
-+$ nc 8.8.8.8 80
-+hjdashjkdsahjkdsa
-+HTTP/1.0 400 Bad Request
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:44 GMT
-+Content-Type: text/html
-+Content-Length: 1178
-+Expires: Sat, 23 Jul 2011 02:22:44 GMT
-+X-Squid-Error: ERR_INVALID_REQ 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to process the request:
-+<PRE>
-+hjdashjkdsahjkdsa
-+
-+</PRE>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Invalid Request
-+</STRONG>
-+</UL>
-+
-+<P>
-+Some aspect of the HTTP Request is invalid. Possible problems:
-+<UL>
-+<LI>Missing or unknown request method
-+<LI>Missing URL
-+<LI>Missing HTTP Identifier (HTTP/1.0)
-+<LI>Request is too large
-+<LI>Content-Length missing for POST or PUT requests
-+<LI>Illegal character in hostname; underscores are not allowed
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+nc 10.66.66.66 80
-+GET cache_object://localhost/info HTTP/1.0
-+HTTP/1.0 403 Forbidden
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:25:56 GMT
-+Content-Type: text/html
-+Content-Length: 1061
-+Expires: Sat, 23 Jul 2011 02:25:56 GMT
-+X-Squid-Error: ERR_ACCESS_DENIED 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to retrieve the URL:
-+<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Access Denied.
-+</STRONG>
-+<P>
-+Access control configuration prevents your request from
-+being allowed at this time. Please contact your service provider if
-+you feel this is incorrect.
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-+
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+
-diff --git a/ooni/command.py b/ooni/command.py
-index 361190f..df1a58c 100644
---- a/ooni/command.py
-+++ b/ooni/command.py
-@@ -13,6 +13,7 @@ import ooni.captive_portal
- import ooni.namecheck
- import ooni.dns_poisoning
- import ooni.dns_cc_check
-+import ooni.transparenthttp
-
- class Command():
- def __init__(self, args):
-@@ -48,6 +49,15 @@ class Command():
- help="run captiveportal tests"
- )
-
-+ # --transhttp
-+ def cb_transhttp(option, opt, value, oparser):
-+ self.action = opt[2:]
-+ optparser.add_option(
-+ "--transhttp",
-+ action="callback", callback=cb_transhttp,
-+ help="run Transparent HTTP tests"
-+ )
-+
- # --dns
- def cb_dnstests(option, opt, value, oparser):
- self.action = opt[2:]
-@@ -122,7 +132,7 @@ class Command():
- if (not self.action):
- raise optparse.OptionError(
- 'is required',
-- '--dns | --dnsbulk | --captiveportal | --help | --version'
-+ '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
- )
-
- except optparse.OptionError, err:
-@@ -138,6 +148,10 @@ class Command():
- captive_portal = ooni.captive_portal.CaptivePortal
- captive_portal(self).main()
-
-+ def transhttp(self):
-+ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
-+ transparent_http(self).main()
-+
- def dns(self):
- dnstests = ooni.namecheck.DNS
- dnstests(self).main()
-diff --git a/ooni/dns.py b/ooni/dns.py
-index 95da6ef..90d50bd 100644
---- a/ooni/dns.py
-+++ b/ooni/dns.py
-@@ -8,7 +8,7 @@ from socket import gethostbyname
- import ooni.common
-
- # apt-get install python-dns
--import DNS
-+import dns
- import random
-
- """ Wrap gethostbyname """
-diff --git a/ooni/http.py b/ooni/http.py
-index 62365bb..bb72001 100644
---- a/ooni/http.py
-+++ b/ooni/http.py
-@@ -7,8 +7,14 @@
- from socket import gethostbyname
- import ooni.common
- import urllib2
-+import httplib
-+from urlparse import urlparse
-+from pprint import pprint
- import pycurl
-+import random
-+import string
- import re
-+from BeautifulSoup import BeautifulSoup
-
- # By default, we'll be Torbutton's UA
- default_ua = { 'User-Agent' :
-@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
- default_proxy_host = "127.0.0.1"
- default_proxy_port = "9050"
-
--
--
--
--
--
--
--
--
--
--
--
--
--
--
-+#class HTTPResponse(object):
-+# def __init__(self):
-
-
- """A very basic HTTP fetcher that uses Tor by default and returns a curl
-@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
- http_code = getinfo(pycurl.HTTP_CODE)
- return response, http_code
-
--"""A very basic HTTP fetcher that returns a urllib3 response object."""
-+"""A very basic HTTP fetcher that returns a urllib2 response object."""
- def http_fetch(url,
- headers= default_ua,
- label="generic HTTP fetch"):
-@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
- else:
- return True
-
-+def http_request(self, method, url, path=None):
-+ """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
-+ purl = urlparse(url)
-+ host = purl.netloc
-+ conn = httplib.HTTPConnection(host, 80)
-+ if path is None:
-+ path = purl.path
-+ conn.request(method, purl.path)
-+ response = conn.getresponse()
-+ headers = dict(response.getheaders())
-+ self.headers = headers
-+ self.data = response.read()
-+ return True
-+
-+def search_headers(self, s_headers, url):
-+ if http_request(self, "GET", url):
-+ headers = self.headers
-+ else:
-+ return None
-+ result = {}
-+ for h in s_headers.items():
-+ result[h[0]] = h[0] in headers
-+ return result
-+
-+def http_header_match_dict(experimental_url, dict_header):
-+ result = {}
-+ url_header = http_get_header_dict(experimental_url)
-+
-+# XXX for testing
-+# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
-+
-+def search_squid_headers(self):
-+ url = "http://securityfocus.org/blabla"
-+ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
-+ ret = search_headers(self, s_headers, url)
-+ for i in ret.items():
-+ if i[1] is True:
-+ return False
-+ return True
-+
-+def random_bad_request(self):
-+ url = "http://securityfocus.org/blabla"
-+ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
-+ if http_request(self, r_str, url):
-+ return True
-+ else:
-+ return None
-+
-+def squid_search_bad_request(self):
-+ if random_bad_request(self):
-+ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
-+ for i in s_headers.items():
-+ if i[0] in self.headers:
-+ return False
-+ return True
-+ else:
-+ return None
-+
-+def squid_cacheobject_request(self):
-+ url = "http://securityfocus.org/blabla"
-+ if http_request(self, "GET", url, "cache_object://localhost/info"):
-+ soup = BeautifulSoup(self.data)
-+ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
-+ return False
-+ else:
-+ return True
-+ else:
-+ return None
-+
-+
- def MSHTTP_CP_Tests(self):
- experiment_url = "http://www.msftncsi.com/ncsi.txt"
- expectedResponse = "Microsoft NCSI" # Only this - nothing more
-@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
-
- # Google ChromeOS fetches this url in guest mode
- # and they expect the user to authenticate
-- def googleChromeOSHTTPTest(self):
-- print "noop"
-- #url = "http://www.google.com/"
-+def googleChromeOSHTTPTest(self):
-+ print "noop"
-+ #url = "http://www.google.com/"
-+
-+def SquidHeader_TransparentHTTP_Tests(self):
-+ return search_squid_headers(self)
-+
-+def SquidBadRequest_TransparentHTTP_Tests(self):
-+ squid_cacheobject_request(self)
-+ return squid_search_bad_request(self)
-+
-+def SquidCacheobject_TransparentHTTP_Tests(self):
-+ return squid_cacheobject_request(self)
-+
-+
diff --git a/old-to-be-ported-code/very-old/ooni/#namecheck.py# b/old-to-be-ported-code/very-old/ooni/#namecheck.py#
deleted file mode 100644
index 1a2a3f0..0000000
--- a/old-to-be-ported-code/very-old/ooni/#namecheck.py#
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs multiple DNS tests.
-
-import sys
-import ooni.dnsooni
-
-class DNS():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.randomize = args.randomize
-
- def DNS_Tests(self):
- print "DNS tampering detection:"
- filter_name = "_DNS_Tests"
- tests = [ooni.dnsooni]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
-
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/.DS_Store b/old-to-be-ported-code/very-old/ooni/.DS_Store
deleted file mode 100644
index f5738a5..0000000
Binary files a/old-to-be-ported-code/very-old/ooni/.DS_Store and /dev/null differ
diff --git a/old-to-be-ported-code/very-old/ooni/__init__.py b/old-to-be-ported-code/very-old/ooni/__init__.py
deleted file mode 100644
index 8f1b96e..0000000
--- a/old-to-be-ported-code/very-old/ooni/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-"""\
-This is your package, 'ooni'.
-
-It was provided by the package, `package`.
-
-Please change this documentation, and write this module!
-"""
-
-__version__ = '0.0.1'
-
-# If you run 'make test', this is your failing test.
-# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/old-to-be-ported-code/very-old/ooni/command.py b/old-to-be-ported-code/very-old/ooni/command.py
deleted file mode 100644
index e5f8f9f..0000000
--- a/old-to-be-ported-code/very-old/ooni/command.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# -*- coding: utf-8
-"""\
-Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
-"""
-
-import os
-import sys
-import re
-import optparse
-
-# Only include high level ooni tests at this time
-import ooni.captive_portal
-import ooni.namecheck
-import ooni.dns_poisoning
-import ooni.dns_cc_check
-import ooni.transparenthttp
-import ooni.helpers
-import ooni.plugooni
-import ooni.input
-
-class Command():
- def __init__(self, args):
- sys.argv = sys.argv[0:1]
- sys.argv.extend(args)
- self.startup_options()
-
- def startup_options(self):
- self.action = None
- self.from_ = None
- self.to = None
- self.parser = None
- self.emitter = None
- self.emit_header = None
- self.emit_trailer = None
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.randomize = True
- self.cc = None
- self.hostname = None
- self.listfile = None
- self.listplugooni = False
- self.plugin_name = "all"
- self.controlproxy = None # "socks4a://127.0.0.1:9050/"
- self.experimentproxy = None
-
- usage = """
-
- 'ooni' is the Open Observatory of Network Interference
-
- command line usage: ooni-probe [options]"""
-
- optparser = optparse.OptionParser(usage=usage)
-
- # --plugin
- def cb_plugin(option, opt, value, oparser):
- self.action = opt[2:]
- self.plugin_name = str(value)
- optparser.add_option(
- "--plugin", type="string",
- action="callback", callback=cb_plugin,
- help="run the Plugooni plgoo plugin specified"
- )
-
- # --listplugins
- def cb_list_plugins(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--listplugins",
- action="callback", callback=cb_list_plugins,
- help="list available Plugooni as plgoos plugin names"
- )
-
- # --captiveportal
- def cb_captiveportal(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--captiveportal",
- action="callback", callback=cb_captiveportal,
- help="run vendor emulated captiveportal tests"
- )
-
- # --transhttp
- def cb_transhttp(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--transhttp",
- action="callback", callback=cb_transhttp,
- help="run Transparent HTTP tests"
- )
-
- # --dns
- def cb_dnstests(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dns",
- action="callback", callback=cb_dnstests,
- help="run fixed generic dns tests"
- )
-
- # --dnsbulk
- def cb_dnsbulktests(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dnsbulk",
- action="callback", callback=cb_dnsbulktests,
- help="run bulk DNS tests in random.shuffle() order"
- )
-
- # --dns-cc-check
- def cb_dnscccheck(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dnscccheck",
- action="callback", callback=cb_dnscccheck,
- help="run cc specific bulk DNS tests in random.shuffle() order"
- )
-
- # --cc [country code]
- def cb_cc(option, opt, value, optparser):
- # XXX: We should check this against a list of supported county codes
- # and then return the matching value from the list into self.cc
- self.cc = str(value)
- optparser.add_option(
- "--cc", type="string",
- action="callback", callback=cb_cc,
- help="set a specific county code -- default is None",
- )
-
- # --list [url/hostname/ip list in file]
- def cb_list(option, opt, value, optparser):
- self.listfile = os.path.expanduser(value)
- if not os.path.isfile(self.listfile):
- print "Wrong file '" + value + "' in --list."
- sys.exit(1)
- optparser.add_option(
- "--list", type="string",
- action="callback", callback=cb_list,
- help="file to read from -- default is None",
- )
-
- # --url [url/hostname/ip]
- def cb_host(option, opt, value, optparser):
- self.hostname = str(value)
- optparser.add_option(
- "--url", type="string",
- action="callback", callback=cb_host,
- help="set URL/hostname/IP for use in tests -- default is None",
- )
-
- # --controlproxy [scheme://host:port]
- def cb_controlproxy(option, opt, value, optparser):
- self.controlproxy = str(value)
- optparser.add_option(
- "--controlproxy", type="string",
- action="callback", callback=cb_controlproxy,
- help="proxy to be used as a control -- default is None",
- )
-
- # --experimentproxy [scheme://host:port]
- def cb_experimentproxy(option, opt, value, optparser):
- self.experimentproxy = str(value)
- optparser.add_option(
- "--experimentproxy", type="string",
- action="callback", callback=cb_experimentproxy,
- help="proxy to be used for experiments -- default is None",
- )
-
-
-
- # --randomize
- def cb_randomize(option, opt, value, optparser):
- self.randomize = bool(int(value))
- optparser.add_option(
- "--randomize", type="choice",
- choices=['0', '1'], metavar="0|1",
- action="callback", callback=cb_randomize,
- help="randomize host order -- default is on",
- )
-
- # XXX TODO:
- # pause/resume scans for dns_BULK_DNS_Tests()
- # setting of control/experiment resolver
- # setting of control/experiment proxy
- #
-
- def cb_version(option, opt, value, oparser):
- self.action = 'version'
- optparser.add_option(
- "-v", "--version",
- action="callback", callback=cb_version,
- help="print ooni-probe version"
- )
-
- # parse options
- (opts, args) = optparser.parse_args()
-
- # validate options
- try:
- if (args):
- raise optparse.OptionError('extra arguments found', args)
- if (not self.action):
- raise optparse.OptionError(
- 'RTFS', 'required arguments missing'
- )
-
- except optparse.OptionError, err:
- sys.stderr.write(str(err) + '\n\n')
- optparser.print_help()
- sys.exit(1)
-
- def version(self):
- print """
-ooni-probe pre-alpha
-Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
-See: https://www.torproject.org/ooni/
-
-"""
-
- def run(self):
- getattr(self, self.action)()
-
- def plugin(self):
- plugin_run = ooni.plugooni.Plugooni
- plugin_run(self).run(self)
-
- def listplugins(self):
- plugin_run = ooni.plugooni.Plugooni
- plugin_run(self).list_plugoons()
-
- def captiveportal(self):
- captive_portal = ooni.captive_portal.CaptivePortal
- captive_portal(self).main()
-
- def transhttp(self):
- transparent_http = ooni.transparenthttp.TransparentHTTPProxy
- transparent_http(self).main()
-
- def dns(self):
- dnstests = ooni.namecheck.DNS
- dnstests(self).main()
-
- def dnsbulk(self):
- dnstests = ooni.dns_poisoning.DNSBulk
- dnstests(self).main()
-
- def dnscccheck(self):
- dnstests = ooni.dns_cc_check.DNSBulk
- dnstests(self).main()
-
diff --git a/old-to-be-ported-code/very-old/ooni/dns_poisoning.py b/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
deleted file mode 100644
index 939391e..0000000
--- a/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-class DNSBulk():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.randomize = args.randomize
- self.debug = False
-
- def DNS_Tests(self):
- print "DNS tampering detection for list of domains:"
- filter_name = "_DNS_BULK_Tests"
- tests = [ooni.dnsooni]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/dnsooni.py b/old-to-be-ported-code/very-old/ooni/dnsooni.py
deleted file mode 100644
index bfdfe51..0000000
--- a/old-to-be-ported-code/very-old/ooni/dnsooni.py
+++ /dev/null
@@ -1,356 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-
-from socket import gethostbyname
-import ooni.common
-
-# requires python-dns
-# (pydns.sourceforge.net)
-try:
- import DNS
-# Mac OS X needs this
-except:
- try:
- import dns as DNS
- except:
- pass # Never mind, let's break later.
-import random
-from pprint import pprint
-
-""" Wrap gethostbyname """
-def dns_resolve(hostname):
- try:
- resolved_host = gethostbyname(hostname)
- return resolved_host
- except:
- return False
-
-"""Perform a resolution on test_hostname and compare it with the expected
- control_resolved ip address. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True; otherwise
- it returns False.
-"""
-def dns_resolve_match(experiment_hostname, control_resolved,
- label="generic DNS comparison"):
- experiment_resolved = dns_resolve(experiment_hostname)
- if experiment_resolved == False:
- return None
- if experiment_resolved:
- if str(experiment_resolved) != str(control_resolved):
- print label + " control " + str(control_resolved) + " data does not " \
- "match experiment response: " + str(experiment_resolved)
- return False
- return True
-
-def generic_DNS_resolve(experiment_hostname, experiment_resolver):
- if experiment_resolver == None:
- req = DNS.Request(name=experiment_hostname) # local resolver
- else:
- req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
- resolved_data = req.req().answers
- return resolved_data
-
-""" Return a list of all known censors. """
-def load_list_of_known_censors(known_proxy_file=None):
- proxyfile = "proxy-lists/ips.txt"
- known_proxy_file = open(proxyfile, 'r', 1)
- known_proxy_list = []
- for known_proxy in known_proxy_file.readlines():
- known_proxy_list.append(known_proxy)
- known_proxy_file.close()
- known_proxy_count = len(known_proxy_list)
- print "Loading " + str(known_proxy_count) + " known proxies..."
- return known_proxy_list, known_proxy_count
-
-def load_list_of_test_hosts(hostfile=None):
- if hostfile == None:
- hostfile="censorship-lists/norwegian-dns-blacklist.txt"
- host_list_file = open(hostfile, 'r', 1)
- host_list = []
- for host_name in host_list_file.readlines():
- if host_name.isspace():
- continue
- else:
- host_list.append(host_name)
- host_list_file.close()
- host_count = len(host_list)
- #print "Loading " + str(host_count) + " test host names..."
- return host_list, host_count
-
-""" Return True with a list of censors if we find a known censor from
- known_proxy_list in the experiment_data DNS response. Otherwise return
- False and None. """
-def contains_known_censors(known_proxy_list, experiment_data):
- match = False
- proxy_list = []
- for answer in range(len(experiment_data)):
- for known_proxy in known_proxy_list:
- if answer == known_proxy:
- print "CONFLICT: known proxy discovered: " + str(known_proxy),
- proxy_list.append(known_proxy)
- match = True
- return match, proxy_list
-
-""" Return True and the experiment response that failed to match."""
-def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
- known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
- conflict_list = []
- conflict = False
- if known_proxy_found:
- print "known proxy discovered: " + str(known_proxies)
- for answer in range(len(control_data)):
- if control_data[answer]['data'] == experiment_data:
- print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
- continue
- else:
- conflict = True
- conflict_list.append(experiment_data)
- #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
- return conflict, conflict_list
-
-def dns_DNS_BULK_Tests(self, hostfile=None,
- known_good_resolver="8.8.8.8", test_resolver=None):
- tampering = False # By default we'll pretend the internet is nice
- tampering_list = []
- host_list, host_count = load_list_of_test_hosts()
- known_proxies, proxy_count = load_list_of_known_censors()
- check_count = 1
- if test_resolver == None:
- DNS.ParseResolvConf() # Set the local resolver as our default
- if self.randomize:
- random.shuffle(host_list) # This makes our list non-sequential for now
- for host_name in host_list:
- host_name = host_name.strip()
- print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
- print "Resolving with control resolver..."
- print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
- print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
- # XXX TODO - we need to keep track of the status of these requests and then resume them
- while True:
- try:
- control_data = generic_DNS_resolve(host_name, known_good_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "control resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Resolving with experiment resolver..."
- while True:
- try:
- experiment_data = generic_DNS_resolve(host_name, test_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "experiment resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Comparing control and experiment...",
- tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
- if tampering:
- tampering_list.append(conflicts)
- print "Conflicts with " + str(host_name) + " : " + str(conflicts)
- check_count = check_count + 1
- host_list.close()
- return tampering
-
-""" Attempt to resolve random_hostname and return True and None if empty. If an
- address is returned we return False and the returned address.
-"""
-def dns_response_empty(random_hostname):
- response = dns_resolve(random_hostname)
- if response == False:
- return True, None
- return False, response
-
-def dns_multi_response_empty(count, size):
- for i in range(count):
- randName = ooni.common._randstring(size)
- response_empty, response_ip = dns_response_empty(randName)
- if response_empty == True and response_ip == None:
- responses_are_empty = True
- else:
- print label + " " + randName + " found with value " + str(response_ip)
- responses_are_empty = False
- return responses_are_empty
-
-""" Attempt to resolve one random host name per tld in tld_list where the
- hostnames are random strings with a length between min_length and
- max_length. Return True if list is empty, otherwise return False."""
-def dns_list_empty(tld_list, min_length, max_length,
- label="generic DNS list test"):
- for tld in tld_list:
- randName = ooni.common._randstring(min_length, max_length) + tld
- response_empty, response_ip = dns_response_empty(randName)
- return response_empty
-
-# Known bad test
-# Test for their DNS breakage and their HTTP MITM
-# "Family Shield" is 208.67.222.123 and 208.67.220.123
-# returns 67.215.65.130 for filtered sites like kink.com
-# block.opendns.com is a block page where users are redirected
-# 208.67.216.135 208.67.217.135 are the block pages currently point
-# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
-# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
-# whatever the HOST header says
-# Amusingly - their Server header is: "OpenDNS Guide"
-""" Return True if we are not being directed as known OpenDNS block pages."""
-def OpenDNS_DNS_Tests(self):
- return OpenDNS_Censorship_DNS_TESTS(self)
- return OpenDNS_NXDomain_DNS_TESTS(self)
-
-def OpenDNS_Censorship_DNS_TESTS(self):
- known_filter = "67.215.65.130"
- randName = ooni.common._randstring(10)
- redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
- if redirected:
- return False
- else:
- return True
-
-def OpenDNS_NXDomain_DNS_TESTS(self):
- known_filter = "67.215.65.132"
- randName = ooni.common._randstring(10)
- redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
- if redirected:
- return False
- else:
- return True
-
-"""Returns True if the experiment_url returns the well known Italian block page."""
-def cc_DNS_Tests_it(self):
- tampering = False # By default we'll pretend the internet is nice
- tampering_list = []
- conflicts = []
- known_good_resolver = "8.8.8.8"
- host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
- known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
- known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
-
- check_count = 1
- DNS.ParseResolvConf()
- # Set the local resolver as our default
- if self.randomize:
- random.shuffle(host_list) # This makes our list non-sequential for now
- print "We're testing (" + str(host_count) + ") URLs"
- print "We're looking for (" + str(known_block_count) + ") block pages"
- print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
- for test_resolver in known_censoring_resolvers:
- test_resolver = test_resolver.strip()
- for host_name in host_list:
- host_name = host_name.strip()
- print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
- print "Testing " + host_name + " with control resolver: " + known_good_resolver
- print "Testing " + host_name + " with experiment resolver: " + test_resolver
- while True:
- try:
- control_data = generic_DNS_resolve(host_name, known_good_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "control resolver appears to be failing..."
- break
- except:
- print "Timeout; looping!"
- continue
-
- while True:
- try:
- experiment_data = generic_DNS_resolve(host_name, test_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "experiment resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Comparing control and experiment...",
- tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
- if tampering:
- tampering_list.append(conflicts)
- print "Conflicts with " + str(host_name) + " : " + str(conflicts)
- check_count = check_count + 1
-
- host_list.close()
- return tampering
-
-
-## XXX TODO
-## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
-
-""" Returns True if the experiment_host returns a well known Australian filter
- IP address."""
-def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
- # http://www.robtex.com/ip/61.88.88.88.html
- # http://requests.optus.net.au/dns/
- known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
- known_censoring_resolvers = ["61.88.88.88"] # Optus
- for resolver in known_censoring_resolvers:
- blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
- if blocked:
- return True
-
-"""Returns True if experiment_hostname as resolved by experiment_resolver
- resolves to control_data. Returns False if there is no match or None if the
- attempt fails."""
-def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
- control_data):
- req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
- resolved_data = s.req().answers
- for answer in range(len(resolved_data)):
- if resolved_data[answer]['data'] == control_data:
- return True
- return False
-
-# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
-# details
-""" Return True if Tor would consider the network fine; False if it's hostile
- and has no signs of DNS tampering. """
-def Tor_DNS_Tests(self):
- response_rfc2606_empty = RFC2606_DNS_Tests(self)
- tor_tld_list = ["", ".com", ".org", ".net"]
- response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
- return response_tor_empty | response_rfc2606_empty
-
-""" Return True if RFC2606 would consider the network hostile; False if it's all
- clear and has no signs of DNS tampering. """
-def RFC2606_DNS_Tests(self):
- tld_list = [".invalid", ".test"]
- return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
-
-""" Return True if googleChromeDNSTest would consider the network OK."""
-def googleChrome_CP_Tests(self):
- maxGoogleDNSTests = 3
- GoogleDNSTestSize = 10
- return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
- GoogleDNSTestSize)
-def googleChrome_DNS_Tests(self):
- return googleChrome_CP_Tests(self)
-
-""" Return True if MSDNSTest would consider the network OK."""
-def MSDNS_CP_Tests(self):
- experimentHostname = "dns.msftncsi.com"
- expectedResponse = "131.107.255.255"
- return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
-
-def MSDNS_DNS_Tests(self):
- return MSDNS_CP_Tests(self)
diff --git a/old-to-be-ported-code/very-old/ooni/helpers.py b/old-to-be-ported-code/very-old/ooni/helpers.py
deleted file mode 100644
index 514e65f..0000000
--- a/old-to-be-ported-code/very-old/ooni/helpers.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-
-import ooni.common
-import pycurl
-import random
-import zipfile
-import os
-from xml.dom import minidom
-try:
- from BeautifulSoup import BeautifulSoup
-except:
- pass # Never mind, let's break later.
-
-def get_random_url(self):
- filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
- fp = zipfile.ZipFile(filepath, "r")
- fp.open("top-1m.csv")
- content = fp.read("top-1m.csv")
- return "http://" + random.choice(content.split("\n")).split(",")[1]
-
-"""Pick a random header and use that for the request"""
-def get_random_headers(self):
- filepath = os.getcwd() + "/test-lists/whatheaders.xml"
- headers = []
- content = open(filepath, "r").read()
- soup = BeautifulSoup(content)
- measurements = soup.findAll('measurement')
- i = random.randint(0,len(measurements))
- for vals in measurements[i].findAll('header'):
- name = vals.find('name').string
- value = vals.find('value').string
- if name != "host":
- headers.append((name, value))
- return headers
diff --git a/old-to-be-ported-code/very-old/ooni/http.py b/old-to-be-ported-code/very-old/ooni/http.py
deleted file mode 100644
index 59e2abb..0000000
--- a/old-to-be-ported-code/very-old/ooni/http.py
+++ /dev/null
@@ -1,306 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-#
-
-from socket import gethostbyname
-import ooni.common
-import ooni.helpers
-import ooni.report
-import urllib2
-import httplib
-from urlparse import urlparse
-from pprint import pprint
-import pycurl
-import random
-import string
-import re
-from pprint import pprint
-try:
- from BeautifulSoup import BeautifulSoup
-except:
- pass # Never mind, let's break later.
-
-# By default, we'll be Torbutton's UA
-default_ua = { 'User-Agent' :
- 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
-
-# Use pycurl to connect over a proxy
-PROXYTYPE_SOCKS5 = 5
-default_proxy_type = PROXYTYPE_SOCKS5
-default_proxy_host = "127.0.0.1"
-default_proxy_port = "9050"
-
-#class HTTPResponse(object):
-# def __init__(self):
-
-
-"""A very basic HTTP fetcher that uses Tor by default and returns a curl
- object."""
-def http_proxy_fetch(url, headers, proxy_type=5,
- proxy_host="127.0.0.1",
- proxy_port=9050):
- request = pycurl.Curl()
- request.setopt(pycurl.PROXY, proxy_host)
- request.setopt(pycurl.PROXYPORT, proxy_port)
- request.setopt(pycurl.PROXYTYPE, proxy_type)
- request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
- request.setopt(pycurl.URL, url)
- response = request.perform()
- http_code = getinfo(pycurl.HTTP_CODE)
- return response, http_code
-
-"""A very basic HTTP fetcher that returns a urllib2 response object."""
-def http_fetch(url,
- headers= default_ua,
- label="generic HTTP fetch"):
- request = urllib2.Request(url, None, headers)
- response = urllib2.urlopen(request)
- return response
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
- control_result. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True with the http
- status code; otherwise it returns False.
-"""
-def http_content_match(experimental_url, control_result,
- headers= { 'User-Agent' : default_ua },
- label="generic HTTP content comparison"):
- request = urllib2.Request(experimental_url, None, headers)
- response = urllib2.urlopen(request)
- responseContents = response.read()
- responseCode = response.code
- if responseContents != False:
- if str(responseContents) != str(control_result):
- print label + " control " + str(control_result) + " data does not " \
- "match experiment response: " + str(responseContents)
- return False, responseCode
- return True, responseCode
- else:
- print "HTTP connection appears to have failed"
- return False, False
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
- control_result as a regex. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True with the HTTP
- status code; otherwise it returns False.
-"""
-def http_content_fuzzy_match(experimental_url, control_result,
- headers= { 'User-Agent' : default_ua },
- label="generic HTTP content comparison"):
- request = urllib2.Request(experimental_url, None, headers)
- response = urllib2.urlopen(request)
- responseContents = response.read()
- responseCode = response.code
- pattern = re.compile(control_result)
- match = pattern.search(responseContents)
- if responseContents != False:
- if not match:
- print label + " control " + str(control_result) + " data does not " \
- "match experiment response: " + str(responseContents)
- return False, responseCode
- return True, responseCode
- else:
- print "HTTP connection appears to have failed"
- return False, False
-
-"""Compare two HTTP status codes as integers and return True if they match."""
-def http_status_code_match(experiment_code, control_code):
- if int(experiment_code) != int(control_code):
- return False
- return True
-
-"""Compare two HTTP status codes as integers and return True if they don't match."""
-def http_status_code_no_match(experiment_code, control_code):
- if http_status_code_match(experiment_code, control_code):
- return False
- return True
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it matches, False if it does not."""
-def http_header_match(experiment_url, control_header, control_result):
- response = http_fetch(url, label=label)
- remote_header = response.get_header(control_header)
- if str(remote_header) == str(control_result):
- return True
- else:
- return False
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it does not matche, False if it does."""
-def http_header_no_match(experiment_url, control_header, control_result):
- match = http_header_match(experiment_url, control_header, control_result)
- if match:
- return False
- else:
- return True
-
-def send_browser_headers(self, browser, conn):
- headers = ooni.helpers.get_random_headers(self)
- for h in headers:
- conn.putheader(h[0], h[1])
- conn.endheaders()
- return True
-
-def http_request(self, method, url, path=None):
- purl = urlparse(url)
- host = purl.netloc
- conn = httplib.HTTPConnection(host, 80)
- conn.connect()
- if path is None:
- path = purl.path
- conn.putrequest(method, purl.path)
- send_browser_headers(self, None, conn)
- response = conn.getresponse()
- headers = dict(response.getheaders())
- self.headers = headers
- self.data = response.read()
- return True
-
-def search_headers(self, s_headers, url):
- if http_request(self, "GET", url):
- headers = self.headers
- else:
- return None
- result = {}
- for h in s_headers.items():
- result[h[0]] = h[0] in headers
- return result
-
-# XXX for testing
-# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
-
-"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
-def search_squid_headers(self):
- test_name = "squid header"
- self.logger.info("RUNNING %s test" % test_name)
- url = ooni.helpers.get_random_url(self)
- s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
- ret = search_headers(self, s_headers, url)
- for i in ret.items():
- if i[1] is True:
- self.logger.info("the %s test returned False" % test_name)
- return False
- self.logger.info("the %s test returned True" % test_name)
- return True
-
-def random_bad_request(self):
- url = ooni.helpers.get_random_url(self)
- r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
- if http_request(self, r_str, url):
- return True
- else:
- return None
-
-"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
-def squid_search_bad_request(self):
- test_name = "squid bad request"
- self.logger.info("RUNNING %s test" % test_name)
- if random_bad_request(self):
- s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
- for i in s_headers.items():
- if i[0] in self.headers:
- self.logger.info("the %s test returned False" % test_name)
- return False
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- self.logger.warning("the %s test returned failed" % test_name)
- return None
-
-"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
-def squid_cacheobject_request(self):
- url = ooni.helpers.get_random_url(self)
- test_name = "squid cacheobject"
- self.logger.info("RUNNING %s test" % test_name)
- if http_request(self, "GET", url, "cache_object://localhost/info"):
- soup = BeautifulSoup(self.data)
- if soup.find('strong') and soup.find('strong').string == "Access Denied.":
- self.logger.info("the %s test returned False" % test_name)
- return False
- else:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- self.logger.warning("the %s test failed" % test_name)
- return None
-
-
-def MSHTTP_CP_Tests(self):
- test_name = "MS HTTP Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- experiment_url = "http://www.msftncsi.com/ncsi.txt"
- expectedResponse = "Microsoft NCSI" # Only this - nothing more
- expectedResponseCode = "200" # Must be this - nothing else
- label = "MS HTTP"
- headers = { 'User-Agent' : 'Microsoft NCSI' }
- content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
- headers, label)
- status_match = http_status_code_match(expectedResponseCode,
- experiment_code)
- if status_match and content_match:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-def AppleHTTP_CP_Tests(self):
- test_name = "Apple HTTP Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- experiment_url = "http://www.apple.com/library/test/success.html"
- expectedResponse = "Success" # There is HTML that contains this string
- expectedResponseCode = "200"
- label = "Apple HTTP"
- headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
- 'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
- ' Mobile/1A543a Safari/419.3' }
- content_match, experiment_code = http_content_fuzzy_match(
- experiment_url, expectedResponse, headers)
- status_match = http_status_code_match(expectedResponseCode,
- experiment_code)
- if status_match and content_match:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- print label + "content match:" + str(content_match) + " status match:" + str(status_match)
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-def WC3_CP_Tests(self):
- test_name = "W3 Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
- draftResponseCode = "428"
- label = "WC3 draft-nottingham-http-portal"
- response = http_fetch(url, label=label)
- responseCode = response.code
- if http_status_code_no_match(responseCode, draftResponseCode):
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- print label + " status match:" + status_match
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-# Google ChromeOS fetches this url in guest mode
-# and they expect the user to authenticate
-def googleChromeOSHTTPTest(self):
- print "noop"
- #url = "http://www.google.com/"
-
-def SquidHeader_TransparentHTTP_Tests(self):
- return search_squid_headers(self)
-
-def SquidBadRequest_TransparentHTTP_Tests(self):
- return squid_search_bad_request(self)
-
-def SquidCacheobject_TransparentHTTP_Tests(self):
- return squid_cacheobject_request(self)
-
-
diff --git a/old-to-be-ported-code/very-old/ooni/input.py b/old-to-be-ported-code/very-old/ooni/input.py
deleted file mode 100644
index c32ab48..0000000
--- a/old-to-be-ported-code/very-old/ooni/input.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-
-class file:
- def __init__(self, name=None):
- if name:
- self.name = name
-
- def simple(self, name=None):
- """ Simple file parsing method:
- Read a file line by line and output an array with all it's lines, without newlines
- """
- if name:
- self.name = name
- output = []
- try:
- f = open(self.name, "r")
- for line in f.readlines():
- output.append(line.strip())
- return output
- except:
- return output
-
- def csv(self, name=None):
- if name:
- self.name = name
-
- def yaml(self, name):
- if name:
- self.name = name
-
- def consensus(self, name):
- if name:
- self.name = name
diff --git a/old-to-be-ported-code/very-old/ooni/namecheck.py b/old-to-be-ported-code/very-old/ooni/namecheck.py
deleted file mode 100644
index 1a2a3f0..0000000
--- a/old-to-be-ported-code/very-old/ooni/namecheck.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs multiple DNS tests.
-
-import sys
-import ooni.dnsooni
-
-class DNS():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.randomize = args.randomize
-
- def DNS_Tests(self):
- print "DNS tampering detection:"
- filter_name = "_DNS_Tests"
- tests = [ooni.dnsooni]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
-
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/__init__.py b/old-to-be-ported-code/very-old/ooni/plugins/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
deleted file mode 100644
index 0c0cfa7..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from pprint import pprint
-from twisted.internet import reactor, endpoints
-from twisted.names import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class DNSTestPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ""
- self.control_response = []
-
- def response_split(self, response):
- a = []
- b = []
- for i in response:
- a.append(i[0])
- b.append(i[1])
-
- return a,b
-
- def cb(self, type, hostname, dns_server, value):
- if self.control_response is None:
- self.control_response = []
- if type == 'control' and self.control_response != value:
- print "%s %s" % (dns_server, value)
- self.control_response.append((dns_server,value))
- pprint(self.control_response)
- if type == 'experiment':
- pprint(self.control_response)
- _, res = self.response_split(self.control_response)
- if value not in res:
- print "res (%s) : " % value
- pprint(res)
- print "---"
- print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
-
- else:
- print "%s appears to be clean on %s" % (hostname, dns_server)
- self.r2.servers = [('212.245.158.66',53)]
- print "HN: %s %s" % (hostname, value)
-
- def err(self, pck, error):
- pprint(pck)
- error.printTraceback()
- reactor.stop()
- print "error!"
- pass
-
- def ooni_main(self, args):
- self.experimentalproxy = ''
- self.test_hostnames = ['dio.it']
- self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
- self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
-
- self.control_res = []
- self.control_response = None
-
- self.r1 = client.Resolver(None, [self.control_dns.pop()])
- self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
-
- for hostname in self.test_hostnames:
- for dns_server in self.control_dns:
- self.r1.servers = [dns_server]
- f = self.r1.getHostByName(hostname)
- pck = (hostname, dns_server)
- f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
- for dns_server in self.experiment_dns:
- self.r2.servers = [dns_server]
- pck = (hostname, dns_server)
- f = self.r2.getHostByName(hostname)
- f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
- reactor.run()
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
deleted file mode 100644
index 021e863..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ''
-
- def cb(self, type, content):
- print "got %d bytes from %s" % (len(content), type) # DEBUG
- if not self.buf:
- self.buf = content
- else:
- if self.buf == content:
- print "SUCCESS"
- else:
- print "FAIL"
- reactor.stop()
-
- def endpoint(self, scheme, host, port):
- ep = None
- if scheme == 'http':
- ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
- elif scheme == 'https':
- ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
- return ep
-
- def ooni_main(self):
- # We don't have the Command object so cheating for now.
- url = 'http://check.torproject.org/'
- self.controlproxy = 'socks4a://127.0.0.1:9050'
- self.experimentalproxy = ''
-
- if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
- return None
- scheme, host, port, path = client._parse(url)
-
- ctrl_dest = self.endpoint(scheme, host, port)
- if not ctrl_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- if self.controlproxy:
- _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
- control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
- else:
- control = ctrl_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('control', x))
- control.connect(f)
-
- exp_dest = self.endpoint(scheme, host, port)
- if not exp_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- # FIXME: use the experiment proxy if there is one
- experiment = exp_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('experiment', x))
- experiment.connect(f)
-
- reactor.run()
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
deleted file mode 100644
index cb63df7..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
+++ /dev/null
@@ -1,377 +0,0 @@
-#!/usr/bin/python
-# Copyright 2009 The Tor Project, Inc.
-# License at end of file.
-#
-# This tests connections to a list of Tor nodes in a given Tor consensus file
-# while also recording the certificates - it's not a perfect tool but complete
-# or even partial failure should raise alarms.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
- from ooni.plugooni import Plugoo
-except:
- print "Error importing Plugoo"
-
-try:
- from ooni.common import Storage
-except:
- print "Error importing Storage"
-
-try:
- from ooni import output
-except:
- print "Error importing output"
-
-try:
- from ooni import input
-except:
- print "Error importing output"
-
-
-
-ssl = OpenSSL = None
-
-try:
- import ssl
-except ImportError:
- pass
-
-if ssl is None:
- try:
- import OpenSSL.SSL
- import OpenSSL.crypto
- except ImportError:
- pass
-
-if ssl is None and OpenSSL is None:
- if socket.ssl:
- print """Your Python is too old to have the ssl module, and you haven't
-installed pyOpenSSL. I'll try to work with what you've got, but I can't
-record certificates so well."""
- else:
- print """Your Python has no OpenSSL support. Upgrade to 2.6, install
-pyOpenSSL, or both."""
- sys.exit(1)
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-HANDSHAKING = "nohandshake"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-def clean_pem_cert(cert):
- idx = cert.find('-----END')
- if idx > 1 and cert[idx-1] != '\n':
- cert = cert.replace('-----END','\n-----END')
- return cert
-
-def record((addr,port), state, extra=None, cert=None):
- LOCK.acquire()
- try:
- OUT.append({'addr' : addr,
- 'port' : port,
- 'state' : state,
- 'extra' : extra})
- if cert:
- CERT_OUT.append({'addr' : addr,
- 'port' : port,
- 'clean_cert' : clean_pem_cert(cert)})
- finally:
- LOCK.release()
-
-def probe(address,theCtx=None):
- sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- logging.info("Opening socket to %s",address)
- try:
- s.connect(address)
- except IOError, e:
- logging.info("Error %s from socket connect.",e)
- record(address, CONNECTING, e)
- s.close()
- return
- logging.info("Socket to %s open. Launching SSL handshake.",address)
- if ssl:
- try:
- s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
- # "MARCO!"
- s.do_handshake()
- except IOError, e:
- logging.info("Error %s from ssl handshake",e)
- record(address, HANDSHAKING, e)
- s.close()
- sock.close()
- return
- cert = s.getpeercert(True)
- if cert != None:
- cert = ssl.DER_cert_to_PEM_cert(cert)
- elif OpenSSL:
- try:
- s = OpenSSL.SSL.Connection(theCtx, s)
- s.set_connect_state()
- s.setblocking(True)
- s.do_handshake()
- cert = s.get_peer_certificate()
- if cert != None:
- cert = OpenSSL.crypto.dump_certificate(
- OpenSSL.crypto.FILETYPE_PEM, cert)
- except IOError, e:
- logging.info("Error %s from OpenSSL handshake",e)
- record(address, HANDSHAKING, e)
- s.close()
- sock.close()
- return
- else:
- try:
- s = socket.ssl(s)
- s.write('a')
- cert = s.server()
- except IOError, e:
- logging.info("Error %s from socket.ssl handshake",e)
- record(address, HANDSHAKING, e)
- sock.close()
- return
-
- logging.info("SSL handshake with %s finished",address)
- # "POLO!"
- record(address,OK, cert=cert)
- if (ssl or OpenSSL):
- s.close()
- sock.close()
-
-def parseNetworkstatus(ns):
- for line in ns:
- if line.startswith('r '):
- r = line.split()
- yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
- for line in cd:
- if line.startswith('router '):
- r = line.split()
- yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
- done = False
- logging.info("Launching thread.")
-
- if OpenSSL is not None:
- context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
- else:
- context = None
-
- while True:
- LOCK.acquire()
- try:
- if addrList:
- print "Starting test %d/%d"%(
- 1+origLength-len(addrList),origLength)
- addr = addrList.pop()
- else:
- return
- finally:
- LOCK.release()
-
- try:
- logging.info("Launching probe for %s",addr)
- probe(addr, context)
- except Exception, e:
- logging.info("Unexpected error from %s",addr)
- record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
- ts = []
- origLen = len(addrList)
- for num in xrange(nThreads):
- t = threading.Thread(target=worker, args=(addrList,origLen))
- t.setName("Th#%s"%num)
- ts.append(t)
- t.start()
- for t in ts:
- logging.info("Joining thread %s",t.getName())
- t.join()
-
-def main(self, args):
- # BEGIN
- # This logic should be present in more or less all plugoos
- global OUT
- global CERT_OUT
- global OUT_DATA
- global CERT_OUT_DATA
- OUT_DATA = []
- CERT_OUT_DATA = []
-
- try:
- OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
- except:
- print "No output file given. quitting..."
- return -1
-
- try:
- CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
- except:
- print "No output cert file given. quitting..."
- return -1
-
- logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
- datefmt="%b %d %H:%M:%S",
- level=logging.INFO,
- filename=args.log)
- logging.info("============== STARTING NEW LOG")
- # END
-
- if ssl is not None:
- methodName = "ssl"
- elif OpenSSL is not None:
- methodName = "OpenSSL"
- else:
- methodName = "socket"
- logging.info("Running marco with method '%s'", methodName)
-
- addresses = []
-
- if args.input.ips:
- for fn in input.file(args.input.ips).simple():
- a, b = fn.split(":")
- addresses.append( (a,int(b)) )
-
- elif args.input.consensus:
- for fn in args:
- print fn
- for a,b in parseNetworkstatus(open(args.input.consensus)):
- addresses.append( (a,b) )
-
- if args.input.randomize:
- # Take a random permutation of the set the knuth way!
- for i in range(0, len(addresses)):
- j = random.randint(0, i)
- addresses[i], addresses[j] = addresses[j], addresses[i]
-
- if len(addresses) == 0:
- logging.error("No input source given, quiting...")
- return -1
-
- addresses = list(addresses)
-
- if not args.input.randomize:
- addresses.sort()
-
- runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
- def __init__(self):
- self.name = ""
-
- self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
- "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
-
- self.input = Storage()
- self.input.ip = None
- try:
- c_file = os.path.expanduser("~/.tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- try:
- c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- if not self.input.consensus:
- print "Error importing consensus file"
- sys.exit(1)
-
- self.output = Storage()
- self.output.main = 'reports/marco-1.yamlooni'
- self.output.certificates = 'reports/marco_certs-1.out'
-
- # XXX This needs to be moved to a proper function
- # refactor, refactor and ... refactor!
- if os.path.exists(self.output.main):
- basedir = "/".join(self.output.main.split("/")[:-1])
- fn = self.output.main.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- if os.path.exists(self.output.certificates):
- basedir = "/".join(self.output.certificates.split("/")[:-1])
- fn = self.output.certificates.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- # We require for Tor to already be running or have recently run
- self.args = Storage()
- self.args.input = self.input
- self.args.output = self.output
- self.args.log = 'reports/marco.log'
-
- def ooni_main(self, cmd):
- self.args.input.randomize = cmd.randomize
- self.args.input.ips = cmd.listfile
- main(self, self.args)
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >> sys.stderr, ("This script takes one or more networkstatus "
- "files as an argument.")
- self = None
- main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-# * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
deleted file mode 100644
index d175c1c..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/python
-
-import sys
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ''
-
- def cb(self, type, content):
- print "got %d bytes from %s" % (len(content), type) # DEBUG
- if not self.buf:
- self.buf = content
- else:
- if self.buf == content:
- print "SUCCESS"
- else:
- print "FAIL"
- reactor.stop()
-
- def endpoint(self, scheme, host, port):
- ep = None
- if scheme == 'http':
- ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
- elif scheme == 'https':
- from twisted.internet import ssl
- ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
- ssl.ClientContextFactory())
- return ep
-
- def ooni_main(self, cmd):
- # We don't have the Command object so cheating for now.
- url = cmd.hostname
-
- # FIXME: validate that url is on the form scheme://host[:port]/path
- scheme, host, port, path = client._parse(url)
-
- ctrl_dest = self.endpoint(scheme, host, port)
- if not ctrl_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- if cmd.controlproxy:
- assert scheme != 'https', "no support for proxied https atm, sorry"
- _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
- control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
- print "proxy: ", proxy_host, proxy_port
- else:
- control = ctrl_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('control', x))
- control.connect(f)
-
- exp_dest = self.endpoint(scheme, host, port)
- if not exp_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- # FIXME: use the experiment proxy if there is one
- experiment = exp_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('experiment', x))
- experiment.connect(f)
-
- reactor.run()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
deleted file mode 100644
index 87d3684..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-from ooni.plugooni import Plugoo
-
-class DNSBulkPlugin(Plugoo):
- def __init__(self):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.randomize = True # Pass this down properly
- self.debug = False
-
- def DNS_Tests(self):
- print "DNS tampering detection for list of domains:"
- tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
- self.run_tests(tests)
-
- def magic_main(self):
- self.run_plgoo_tests("_Tests")
-
- def ooni_main(self, args):
- self.magic_main()
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
deleted file mode 100644
index 01dee81..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/python
-# Copyright 2011 The Tor Project, Inc.
-# License at end of file.
-#
-# This is a modified version of the marco plugoo. Given a list of #
-# IP:port addresses, this plugoo will attempt a TCP connection with each
-# host and write the results to a .yamlooni file.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
- from ooni.plugooni import Plugoo
-except:
- print "Error importing Plugoo"
-
-try:
- from ooni.common import Storage
-except:
- print "Error importing Storage"
-
-try:
- from ooni import output
-except:
- print "Error importing output"
-
-try:
- from ooni import input
-except:
- print "Error importing output"
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-# We will want to log the IP address, the port and the state
-def record((addr,port), state, extra=None):
- LOCK.acquire()
- try:
- OUT.append({'addr' : addr,
- 'port' : port,
- 'state' : state,
- 'extra' : extra})
- finally:
- LOCK.release()
-
-# For each IP address in the list, open a socket, write to the log and
-# then close the socket
-def probe(address,theCtx=None):
- sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- logging.info("Opening socket to %s",address)
- try:
- s.connect(address)
- except IOError, e:
- logging.info("Error %s from socket connect.",e)
- record(address, CONNECTING, e)
- s.close()
- return
- logging.info("Socket to %s open. Successfully launched TCP handshake.",address)
- record(address, OK)
- s.close()
-
-def parseNetworkstatus(ns):
- for line in ns:
- if line.startswith('r '):
- r = line.split()
- yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
- for line in cd:
- if line.startswith('router '):
- r = line.split()
- yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
- done = False
- context = None
-
- while True:
- LOCK.acquire()
- try:
- if addrList:
- print "Starting test %d/%d"%(
- 1+origLength-len(addrList),origLength)
- addr = addrList.pop()
- else:
- return
- finally:
- LOCK.release()
-
- try:
- logging.info("Launching probe for %s",addr)
- probe(addr, context)
- except Exception, e:
- logging.info("Unexpected error from %s",addr)
- record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
- ts = []
- origLen = len(addrList)
- for num in xrange(nThreads):
- t = threading.Thread(target=worker, args=(addrList,origLen))
- t.setName("Th#%s"%num)
- ts.append(t)
- t.start()
- for t in ts:
- t.join()
-
-def main(self, args):
- # BEGIN
- # This logic should be present in more or less all plugoos
- global OUT
- global OUT_DATA
- OUT_DATA = []
-
- try:
- OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
- except:
- print "No output file given. quitting..."
- return -1
-
- logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
- datefmt="%b %d %H:%M:%S",
- level=logging.INFO,
- filename=args.log)
- logging.info("============== STARTING NEW LOG")
- # END
-
- methodName = "socket"
- logging.info("Running tcpcon with method '%s'", methodName)
-
- addresses = []
-
- if args.input.ips:
- for fn in input.file(args.input.ips).simple():
- a, b = fn.split(":")
- addresses.append( (a,int(b)) )
-
- elif args.input.consensus:
- for fn in args:
- print fn
- for a,b in parseNetworkstatus(open(args.input.consensus)):
- addresses.append( (a,b) )
-
- if args.input.randomize:
- # Take a random permutation of the set the knuth way!
- for i in range(0, len(addresses)):
- j = random.randint(0, i)
- addresses[i], addresses[j] = addresses[j], addresses[i]
-
- if len(addresses) == 0:
- logging.error("No input source given, quiting...")
- return -1
-
- addresses = list(addresses)
-
- if not args.input.randomize:
- addresses.sort()
-
- runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
- def __init__(self):
- self.name = ""
-
- self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
- "os" ]
-
- self.input = Storage()
- self.input.ip = None
- try:
- c_file = os.path.expanduser("~/.tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- try:
- c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- if not self.input.consensus:
- print "Error importing consensus file"
- sys.exit(1)
-
- self.output = Storage()
- self.output.main = 'reports/tcpcon-1.yamlooni'
- self.output.certificates = 'reports/tcpcon_certs-1.out'
-
- # XXX This needs to be moved to a proper function
- # refactor, refactor and ... refactor!
- if os.path.exists(self.output.main):
- basedir = "/".join(self.output.main.split("/")[:-1])
- fn = self.output.main.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- if os.path.exists(self.output.certificates):
- basedir = "/".join(self.output.certificates.split("/")[:-1])
- fn = self.output.certificates.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- # We require for Tor to already be running or have recently run
- self.args = Storage()
- self.args.input = self.input
- self.args.output = self.output
- self.args.log = 'reports/tcpcon.log'
-
- def ooni_main(self, cmd):
- self.args.input.randomize = cmd.randomize
- self.args.input.ips = cmd.listfile
- main(self, self.args)
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >> sys.stderr, ("This script takes one or more networkstatus "
- "files as an argument.")
- self = None
- main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-# * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tor.py b/old-to-be-ported-code/very-old/ooni/plugins/tor.py
deleted file mode 100644
index 0d95d4d..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/tor.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import re
-import os.path
-import signal
-import subprocess
-import socket
-import threading
-import time
-import logging
-
-from pytorctl import TorCtl
-
-torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
-# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
-
-class ThreadProc(threading.Thread):
- def __init__(self, cmd):
- threading.Thread.__init__(self)
- self.cmd = cmd
- self.proc = None
-
- def run(self):
- print "running"
- try:
- self.proc = subprocess.Popen(self.cmd,
- shell = False, stdout = subprocess.PIPE,
- stderr = subprocess.PIPE)
-
- except OSError:
- logging.fatal('cannot execute command')
-
-class Tor:
- def __init__(self):
- self.start()
-
- def check(self):
- conn = TorCtl.connect()
- if conn != None:
- conn.close()
- return True
-
- return False
-
-
- def start(self):
- if not os.path.exists(torrc):
- raise OSError("torrc doesn't exist (%s)" % torrc)
-
- tor_cmd = ["tor", "-f", torrc]
-
- torproc = ThreadProc(tor_cmd)
- torproc.run()
-
- bootstrap_line = re.compile("Bootstrapped 100%: ")
-
- while True:
- if torproc.proc == None:
- time.sleep(1)
- continue
-
- init_line = torproc.proc.stdout.readline().strip()
-
- if not init_line:
- torproc.proc.kill()
- return False
-
- if bootstrap_line.search(init_line):
- break
-
- return True
-
- def stop(self):
- if not self.check():
- return
-
- conn = TorCtl.connect()
- if conn != None:
- conn.send_signal("SHUTDOWN")
- conn.close()
-
-t = Tor()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/torrc b/old-to-be-ported-code/very-old/ooni/plugins/torrc
deleted file mode 100644
index b9ffc80..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/torrc
+++ /dev/null
@@ -1,9 +0,0 @@
-SocksPort 9050
-ControlPort 9051
-VirtualAddrNetwork 10.23.47.0/10
-AutomapHostsOnResolve 1
-TransPort 9040
-TransListenAddress 127.0.0.1
-DNSPort 5353
-DNSListenAddress 127.0.0.1
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugooni.py b/old-to-be-ported-code/very-old/ooni/plugooni.py
deleted file mode 100644
index 17f17b3..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugooni.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-#
-# Plugooni, ooni plugin module for loading plgoo files.
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-
-import sys
-import os
-
-import imp, pkgutil, inspect
-
-class Plugoo:
- def __init__(self, name, plugin_type, paranoia, author):
- self.name = name
- self.author = author
- self.type = plugin_type
- self.paranoia = paranoia
-
- """
- Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
- Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
- """
- def get_tests_by_filter(self, filters, plugins):
- ret_functions = []
-
- for plugin in plugins:
- for function_ptr in dir(plugin):
- if function_ptr.endswith(filters):
- ret_functions.append((plugin,function_ptr))
- return ret_functions
-
- """
- Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
- 'dirty' and 'failed'.
- Run the tests and print 'clean','dirty' or 'failed' according to the test result.
- """
- def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
- for test in tests:
- filter_result = getattr(test[0], test[1])(self)
- if filter_result == True:
- print test[1] + ": " + clean
- elif filter_result == None:
- print test[1] + ": " + failed
- else:
- print test[1] + ": " + dirty
-
- """
- Find all the tests belonging to plgoo 'self' and run them.
- We know the tests when we see them because they end in 'filter'.
- """
- def run_plgoo_tests(self, filter):
- for function_ptr in dir(self):
- if function_ptr.endswith(filter):
- getattr(self, function_ptr)()
-
-PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
-RESERVED_NAMES = [ "skel_plgoo" ]
-
-class Plugooni():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.loadall = True
- self.plugin_name = args.plugin_name
- self.listfile = args.listfile
-
- self.plgoo_found = False
-
- # Print all the plugoons to stdout.
- def list_plugoons(self):
- print "Plugooni list:"
- for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
- if name not in RESERVED_NAMES:
- print "\t%s" %(name.split("_")[0])
-
- # Return name of the plgoo class of a plugin.
- # We know because it always ends with "Plugin".
- def get_plgoo_class(self,plugin):
- for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
- if memb.__name__.endswith("Plugin"):
- return memb
-
- # This function is responsible for loading and running the plugoons
- # the user wants to run.
- def run(self, command_object):
- print "Plugooni: the ooni plgoo plugin module loader"
-
- # iterate all modules
- for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
- # see if this module should be loaded
- if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
- self.plgoo_found = True # we found at least one plgoo!
-
- file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
- # load module
- plugin = imp.load_module(name, file, pathname, desc)
- # instantiate plgoo class and call its ooni_main()
- self.get_plgoo_class(plugin)().ooni_main(command_object)
-
- # if we couldn't find the plgoo; whine to the user
- if self.plgoo_found is False:
- print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/transparenthttp.py b/old-to-be-ported-code/very-old/ooni/transparenthttp.py
deleted file mode 100644
index 311fb32..0000000
--- a/old-to-be-ported-code/very-old/ooni/transparenthttp.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-#
-# Captive Portal Detection With Multi-Vendor Emulation
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs multiple tests that match specific vendor
-# mitm proxies
-
-import sys
-import ooni.http
-import ooni.report
-
-class TransparentHTTPProxy():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.logger = ooni.report.Log().logger
-
- def TransparentHTTPProxy_Tests(self):
- print "Transparent HTTP Proxy:"
- filter_name = "_TransparentHTTP_Tests"
- tests = [ooni.http]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
-
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/very-old/traceroute.py b/old-to-be-ported-code/very-old/traceroute.py
deleted file mode 100644
index e8252c1..0000000
--- a/old-to-be-ported-code/very-old/traceroute.py
+++ /dev/null
@@ -1,108 +0,0 @@
-try:
- from dns import resolver
-except:
- print "Error: dnspython is not installed (http://www.dnspython.org/)"
-import gevent
-import os
-import plugoo
-
-try:
- import scapy
-except:
- print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-import socket
-
-__plugoo__ = "Traceroute"
-__desc__ = "Performs TTL walking tests"
-
-class TracerouteAsset(Asset):
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-
-class Traceroute(Test):
- """A *very* quick and dirty traceroute implementation, UDP and TCP
- """
- def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
- dest_addr = socket.gethostbyname(dst)
- print "Doing traceroute on %s" % dst
-
- recv = socket.getprotobyname('icmp')
- send = socket.getprotobyname(proto)
- ttl = 1
- while True:
- recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
- if proto == "tcp":
- send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
- else:
- send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
- recv_sock.settimeout(10)
- send_sock.settimeout(10)
-
- send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
- recv_sock.bind(("", src_port))
- if proto == "tcp":
- try:
- send_sock.settimeout(2)
- send_sock.connect((dst, dst_port))
- except socket.timeout:
- pass
-
- except Exception, e:
- print "Error doing connect %s" % e
- else:
- send_sock.sendto("", (dst, dst_port))
-
- curr_addr = None
- try:
- print "receiving data..."
- _, curr_addr = recv_sock.recvfrom(512)
- curr_addr = curr_addr[0]
-
- except socket.error, e:
- print "SOCKET ERROR: %s" % e
-
- except Exception, e:
- print "ERROR: %s" % e
-
- finally:
- send_sock.close()
- recv_sock.close()
-
- if curr_addr is not None:
- curr_host = "%s" % curr_addr
- else:
- curr_host = "*"
-
- print "%d\t%s" % (ttl, curr_host)
-
- if curr_addr == dest_addr or ttl > max_hops:
- break
-
- ttl += 1
-
-
- def experiment(self, *a, **kw):
- # this is just a dirty hack
- address = kw['data'][0]
-
- self.traceroute(address)
-
-def run(ooni):
- """Run the test"""
- config = ooni.config
- urls = []
-
- traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
- config.tests.traceroute))
-
- assets = [traceroute_experiment]
-
- traceroute = Traceroute(ooni)
- ooni.logger.info("starting traceroute test")
- traceroute.run(assets)
- ooni.logger.info("finished")
diff --git a/to-be-ported/TODO b/to-be-ported/TODO
new file mode 100644
index 0000000..81d834f
--- /dev/null
+++ b/to-be-ported/TODO
@@ -0,0 +1,418 @@
+This is a list of techniques that should be added as plugins or hooks or yamlooni
+
+Implement Plugooni - our plugin framework
+Implement Yamlooni - our output format
+Implement Proxooni - our proxy spec and program
+
+We should launch our own Tor on a special port (say, 127.0.0.1:9066)
+We should act as a controller with TorCtl to do this, etc
+We should take the Tor consensus file and pass it to plugins such as marco
+
+HTTP Host header comparsion of a vs b
+HTTP Content length header comparision of a vs b
+
+GET request splitting
+ "G E T "
+ Used in Iran
+
+General Malformed HTTP requests
+ Error pages are fingerprintable
+
+traceroute
+ icmp/udp/tcp
+ each network link is an edge, each hop is a vertex in a network graph
+
+traceroute hop count
+ "TTL walking"
+
+Latency measurement
+TCP reset detection
+Forged DNS spoofing detection
+
+DNS oracle query tool
+ given DNS server foo - test resolve and look for known block pages
+
+Test HTTP header order - do they get reordered?
+
+Look for these filter fingerprints:
+X-Squid-Error: ERR_SCC_SMARTFILTER_DENIED 0
+X-Squid-Error: ERR_ACCESS_DENIED 0
+X-Cache: MISS from SmartFilter
+
+
+WWW-Authenticate: Basic realm="SmartFilter Control List HTTP Download"
+
+
+Via: 1.1 WEBFILTER.CONSERVESCHOOL.ORG:8080
+
+X-Cache: MISS from webfilter.whiteschneider.com
+X-Cache: MISS from webfilter.whiteschneider.com
+X-Cache: MISS from webfilter.whiteschneider.com
+
+Location: http://192.168.0.244/webfilter/blockpage?nonce=7d2b7e500e99a0fe&tid=3
+
+
+X-Cache: MISS from webfilter.imscs.local
+X-Cache: MISS from webfilter.tjs.at
+
+
+Via: 1.1 webwasher (Webwasher 6.8.7.9396)
+
+Websense:
+HTTP/1.0 301 Moved Permanently -> Location: http://www.websense.com/
+
+Via: HTTP/1.1 localhost.localdomain (Websense-Content_Gateway/7.1.4 [c s f ]), HTTP/1.0 localhost.localdomain (Websense-Content_Gateway/7.1.4 [cMsSf ])
+
+
+BlueCoat:
+
+Via: 1.1 testrating.dc5.es.bluecoat.com
+403 ->
+Set-Cookie: BIGipServerpool_bluecoat=1185677834.20480.0000; expires=Fri, 15-Apr-2011 10:13:21 GMT; path=/
+
+HTTP/1.0 407 Proxy Authentication Required ( The ISA Server requires authorization to fulfill the request. Access to the Web Proxy filter is denied. ) -> Via: 1.1 WEBSENSE
+
+HTTP/1.0 302 Found -> Location: http://bluecoat/?cfru=aHR0cDovLzIwMC4yNy4xMjMuMTc4Lw==
+
+HTTP/1.0 403 Forbidden
+Server: squid/3.0.STABLE8
+
+X-Squid-Error: ERR_ACCESS_DENIED 0
+X-Cache: MISS from Bluecoat
+X-Cache-Lookup: NONE from Bluecoat:3128
+Via: 1.0 Bluecoat (squid/3.0.STABLE8)
+
+ISA server:
+HTTP/1.0 403 Forbidden ( ISA Server is configured to block HTTP requests that require authentication. )
+
+
+Unknown:
+X-XSS-Protection: 1; mode=block
+
+Rimon filter:
+
+Rimon: RWC_BLOCK
+HTTP/1.1 Rimon header
+Rimon header is only sent by lighttpd
+http://www.ynetnews.com/articles/0,7340,L-3446129,00.html
+http://btya.org/pdfs/rvienerbrochure.pdf
+
+Korea filtering:
+HTTP/1.0 302 Object Moved -> Location: http://www.willtechnology.co.kr/eng/BlockingMSGew.htm
+Redirects to Korean filter:
+http://www.willtechnology.co.kr/eng/BlockingMSGew.htm
+
+UA filtering:
+HTTP/1.0 307 Temporary Redirect
+https://my.best.net.ua/login/blocked/
+
+netsweeper:
+HTTP/1.0 302 Moved
+Location: http://netsweeper1.gaggle.net:8080/webadmin/deny/index.php?dpid=53&dpruleid…
+
+Set-cookie: RT_SID_netsweeper.com.80=68a6f5c564a9db297e8feb2bff69d73f; path=/
+X-Cache: MISS from netsweeper.irishbroadband.ie
+X-Cache-Lookup: NONE from netsweeper.irishbroadband.ie:80
+Via: 1.0 netsweeper.irishbroadband.ie:80 (squid/2.6.STABLE21)
+
+Nokia:
+Via: 1.1 saec-nokiaq05ca (NetCache NetApp/6.0.7)
+Server: "Nokia"
+
+CensorNet:
+HTTP/1.0 401 Authorization Required
+WWW-Authenticate: Basic realm="CensorNet Administration Area"
+Server: CensorNet/4.0
+
+http://www.itcensor.com/censor
+
+
+Server: ZyWALL Content Filter
+
+Apache/1.3.34 (Unix) filter/1.0
+
+HTTP/1.0 502 infiniteproxyloop
+Via: 1.0 218.102.20.37 (McAfee Web Gateway 7.0.1.5.0.8505)
+
+
+Set-Cookie: McAfee-SCM-URL-Filter-Coach="dD4OzXciEcp8Ihf1dD4ZzHM5FMZ2PSvRTllOnSR4RZkqfkmEIGgb3hZlVJsEaFaXNmNS3mgsdZAxaVOKIGgrrSx4Rb8hekmNKn4g02VZToogf1SbIQcVz3Q8G/U="; Comment="McAfee URL access coaching"; Version=1; Path=/; Max-Age=900; expires=Sat, 18 Dec 2010 06:47:11 GMT;
+
+
+WWW-Authenticate: Basic realm="(Nancy McAfee)"
+
+
+No known fingerprints for:
+NetNanny
+WebChaver
+accountable2you.com
+http://www.shodanhq.com/?q=barracuda
+http://www.shodanhq.com/?q=untangle
+http://www.shodanhq.com/?q=Lightspeed
+
+Server: Smarthouse Lightspeed
+Server: Smarthouse Lightspeed2
+Server: Smarthouse Lightspeed 3
+
+Server: EdgePrism/3.8.1.1
+
+
+X-Cache: MISS from Barracuda-WebFilter.jmpsecurities.com
+Via: 1.0 Barracuda-WebFilter.jmpsecurities.com:8080 (http_scan/4.0.2.6.19)
+
+HTTP/1.0 302 Redirected by M86 Web Filter
+http://www.m86security.com/products/web_security/m86-web-filter.asp
+
+Location: http://10.1.61.37:81/cgi/block.cgi?URL=http://70.182.111.99/&IP=96.9.174.54…
+
+
+Via: 1.1 WEBSENSE
+
+
+Via: 1.1 192.168.1.251 (McAfee Web Gateway 7.1.0.1.0.10541)
+Via: 1.1 McAfeeSA3000.cbcl.lan
+
+
+X-Squid-Error: ERR_CONNECT_FAIL 111
+X-Cache: MISS from CudaWebFilter.poten.com
+
+http://212.50.251.82/ -iran squid
+
+HTTP/1.0 403 Forbidden ( Forefront TMG denied the specified Uniform Resource Locator (URL). )
+Via: 1.1 TMG
+
+
+Server: NetCache appliance (NetApp/6.0.2)
+
+
+Server: EdgePrism/3.8.1.1
+
+
+Server: Mikrotik HttpProxy
+
+
+Via: 1.1 TMG-04, 1.1 TMG-03
+
+
+X-Squid-Error: ERR_INVALID_REQ 0
+X-Cache: MISS from uspa150.trustedproxies.com
+X-Cache-Lookup: NONE from uspa150.trustedproxies.com:80
+
+http://www.shodanhq.com/host/view/93.125.95.177
+
+
+Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server
+http://203.229.245.100/ <- korea block page
+
+
+
+Server: Asroc Intelligent Security Filter 4.1.8
+
+
+
+Server: tinyproxy/1.8.2
+
+http://www.shodanhq.com/host/view/64.104.95.251
+
+
+
+Server: Asroc Intelligent Security Filter 4.1.8
+
+http://www.shodanhq.com/host/view/67.220.92.62
+
+
+Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server
+http://www.shodanhq.com/host/view/203.229.245.100
+Location: http://192.168.3.20/redirect.cgi?Time=05%2FJul%2F2011%3A21%3A29%3A32%20%2B0…
+
+
+http://www.shodanhq.com/?q=%22content+filter%22+-squid+-apache+-ZyWall&page=4
+http://www.shodanhq.com/host/view/72.5.92.51
+http://www.microsoft.com/forefront/threat-management-gateway/en/us/pricing-licensing.aspx
+
+http://meta.wikimedia.org/wiki/Talk:XFF_project
+
+% dig nats.epiccash.com
+
+; <<>> DiG 9.7.3 <<>> nats.epiccash.com
+;; global options: +cmd
+;; Got answer:
+;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 14920
+;; flags: qr rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 2, ADDITIONAL: 0
+
+;; QUESTION SECTION:
+;nats.epiccash.com. IN A
+
+;; ANSWER SECTION:
+nats.epiccash.com. 5 IN A 172.27.0.1
+
+;; AUTHORITY SECTION:
+epiccash.com. 5 IN NS ns0.example.net.
+epiccash.com. 5 IN NS ns1.example.net.
+
+;; Query time: 81 msec
+;; SERVER: 172.16.42.2#53(172.16.42.2)
+;; WHEN: Sat Jul 16 16:14:11 2011
+;; MSG SIZE rcvd: 98
+
+If we think it's squid, we can perhaps confirm it:
+echo -e "GET cache_object://localhost/info HTTP/1.0\r\n" | nc en.wikipedia.com 80
+Harvest urls from:
+http://urlblacklist.com/?sec=download
+
+https://secure.wikimedia.org/wikipedia/simple/wiki/User_talk:62.30.249.131
+
+mention WCCPv2 filters (http://www.cl.cam.ac.uk/~rnc1/talks/090528-uknof13.pdf)
+
+Cite a bunch of Richard's work:
+http://www.cl.cam.ac.uk/~rnc1/ignoring.pdf
+
+http://www.contentkeeper.com/products/web
+
+We should detect HTTP re-directs to rfc-1918 addresses; they're almost always captive portals.
+We should also detect HTTP MITM served from rfc-1918 addresses for the same reason.
+
+We should take a page from sshshuttle and run without touching the disk
+
+VIA Rail MITM's SSL In Ottawa:
+Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
+
+http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&password=al1852
+
+VIA Rail Via header (DONE):
+
+HTTP/1.0 301 Moved Permanently
+Location: http://www.google.com/
+Content-Type: text/html; charset=UTF-8
+Date: Sat, 23 Jul 2011 02:21:30 GMT
+Expires: Mon, 22 Aug 2011 02:21:30 GMT
+Cache-Control: public, max-age=2592000
+Server: gws
+Content-Length: 219
+X-XSS-Protection: 1; mode=block
+X-Cache: MISS from cache_server
+X-Cache-Lookup: MISS from cache_server:3128
+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+Connection: close
+
+<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
+<TITLE>301 Moved</TITLE></HEAD><BODY>
+<H1>301 Moved</H1>
+The document has moved
+<A HREF="http://www.google.com/">here</A>.
+</BODY></HTML>
+
+
+blocked site (DONE):
+
+HTTP/1.0 302 Moved Temporarily
+Server: squid/2.6.STABLE21
+Date: Sat, 23 Jul 2011 02:22:17 GMT
+Content-Length: 0
+Location: http://10.66.66.66/denied.html
+
+invalid request response:
+
+$ nc 8.8.8.8 80 (DONE)
+hjdashjkdsahjkdsa
+HTTP/1.0 400 Bad Request
+Server: squid/2.6.STABLE21
+Date: Sat, 23 Jul 2011 02:22:44 GMT
+Content-Type: text/html
+Content-Length: 1178
+Expires: Sat, 23 Jul 2011 02:22:44 GMT
+X-Squid-Error: ERR_INVALID_REQ 0
+X-Cache: MISS from cache_server
+X-Cache-Lookup: NONE from cache_server:3128
+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+Proxy-Connection: close
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
+</HEAD><BODY>
+<H1>ERROR</H1>
+<H2>The requested URL could not be retrieved</H2>
+<HR noshade size="1px">
+<P>
+While trying to process the request:
+<PRE>
+hjdashjkdsahjkdsa
+
+</PRE>
+<P>
+The following error was encountered:
+<UL>
+<LI>
+<STRONG>
+Invalid Request
+</STRONG>
+</UL>
+
+<P>
+Some aspect of the HTTP Request is invalid. Possible problems:
+<UL>
+<LI>Missing or unknown request method
+<LI>Missing URL
+<LI>Missing HTTP Identifier (HTTP/1.0)
+<LI>Request is too large
+<LI>Content-Length missing for POST or PUT requests
+<LI>Illegal character in hostname; underscores are not allowed
+</UL>
+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
+
+<BR clear="all">
+<HR noshade size="1px">
+<ADDRESS>
+Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
+</ADDRESS>
+</BODY></HTML>
+
+nc 10.66.66.66 80
+GET cache_object://localhost/info HTTP/1.0
+HTTP/1.0 403 Forbidden
+Server: squid/2.6.STABLE21
+Date: Sat, 23 Jul 2011 02:25:56 GMT
+Content-Type: text/html
+Content-Length: 1061
+Expires: Sat, 23 Jul 2011 02:25:56 GMT
+X-Squid-Error: ERR_ACCESS_DENIED 0
+X-Cache: MISS from cache_server
+X-Cache-Lookup: NONE from cache_server:3128
+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+Proxy-Connection: close
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
+</HEAD><BODY>
+<H1>ERROR</H1>
+<H2>The requested URL could not be retrieved</H2>
+<HR noshade size="1px">
+<P>
+While trying to retrieve the URL:
+<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
+<P>
+The following error was encountered:
+<UL>
+<LI>
+<STRONG>
+Access Denied.
+</STRONG>
+<P>
+Access control configuration prevents your request from
+being allowed at this time. Please contact your service provider if
+you feel this is incorrect.
+</UL>
+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
+
+
+<BR clear="all">
+<HR noshade size="1px">
+<ADDRESS>
+Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
+</ADDRESS>
+</BODY></HTML>
+
+
diff --git a/to-be-ported/old-api/.ropeproject/config.py b/to-be-ported/old-api/.ropeproject/config.py
new file mode 100644
index 0000000..ffebcd4
--- /dev/null
+++ b/to-be-ported/old-api/.ropeproject/config.py
@@ -0,0 +1,85 @@
+# The default ``config.py``
+
+
+def set_prefs(prefs):
+ """This function is called before opening the project"""
+
+ # Specify which files and folders to ignore in the project.
+ # Changes to ignored resources are not added to the history and
+ # VCSs. Also they are not returned in `Project.get_files()`.
+ # Note that ``?`` and ``*`` match all characters but slashes.
+ # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
+ # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
+ # '.svn': matches 'pkg/.svn' and all of its children
+ # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
+ # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
+ prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
+ '.hg', '.svn', '_svn', '.git']
+
+ # Specifies which files should be considered python files. It is
+ # useful when you have scripts inside your project. Only files
+ # ending with ``.py`` are considered to be python files by
+ # default.
+ #prefs['python_files'] = ['*.py']
+
+ # Custom source folders: By default rope searches the project
+ # for finding source folders (folders that should be searched
+ # for finding modules). You can add paths to that list. Note
+ # that rope guesses project source folders correctly most of the
+ # time; use this if you have any problems.
+ # The folders should be relative to project root and use '/' for
+ # separating folders regardless of the platform rope is running on.
+ # 'src/my_source_folder' for instance.
+ #prefs.add('source_folders', 'src')
+
+ # You can extend python path for looking up modules
+ #prefs.add('python_path', '~/python/')
+
+ # Should rope save object information or not.
+ prefs['save_objectdb'] = True
+ prefs['compress_objectdb'] = False
+
+ # If `True`, rope analyzes each module when it is being saved.
+ prefs['automatic_soa'] = True
+ # The depth of calls to follow in static object analysis
+ prefs['soa_followed_calls'] = 0
+
+ # If `False` when running modules or unit tests "dynamic object
+ # analysis" is turned off. This makes them much faster.
+ prefs['perform_doa'] = True
+
+ # Rope can check the validity of its object DB when running.
+ prefs['validate_objectdb'] = True
+
+ # How many undos to hold?
+ prefs['max_history_items'] = 32
+
+ # Shows whether to save history across sessions.
+ prefs['save_history'] = True
+ prefs['compress_history'] = False
+
+ # Set the number spaces used for indenting. According to
+ # :PEP:`8`, it is best to use 4 spaces. Since most of rope's
+ # unit-tests use 4 spaces it is more reliable, too.
+ prefs['indent_size'] = 4
+
+ # Builtin and c-extension modules that are allowed to be imported
+ # and inspected by rope.
+ prefs['extension_modules'] = []
+
+ # Add all standard c-extensions to extension_modules list.
+ prefs['import_dynload_stdmods'] = True
+
+ # If `True` modules with syntax errors are considered to be empty.
+ # The default value is `False`; When `False` syntax errors raise
+ # `rope.base.exceptions.ModuleSyntaxError` exception.
+ prefs['ignore_syntax_errors'] = False
+
+ # If `True`, rope ignores unresolvable imports. Otherwise, they
+ # appear in the importing namespace.
+ prefs['ignore_bad_imports'] = False
+
+
+def project_opened(project):
+ """This function is called after opening the project"""
+ # Do whatever you like here!
diff --git a/to-be-ported/old-api/.ropeproject/globalnames b/to-be-ported/old-api/.ropeproject/globalnames
new file mode 100644
index 0000000..2877ef5
Binary files /dev/null and b/to-be-ported/old-api/.ropeproject/globalnames differ
diff --git a/to-be-ported/old-api/.ropeproject/history b/to-be-ported/old-api/.ropeproject/history
new file mode 100644
index 0000000..fcd9c96
--- /dev/null
+++ b/to-be-ported/old-api/.ropeproject/history
@@ -0,0 +1 @@
+]q(]q]qe.
\ No newline at end of file
diff --git a/to-be-ported/old-api/.ropeproject/objectdb b/to-be-ported/old-api/.ropeproject/objectdb
new file mode 100644
index 0000000..f276839
Binary files /dev/null and b/to-be-ported/old-api/.ropeproject/objectdb differ
diff --git a/to-be-ported/old-api/TESTS_ARE_MOVING.txt b/to-be-ported/old-api/TESTS_ARE_MOVING.txt
new file mode 100644
index 0000000..f4c0084
--- /dev/null
+++ b/to-be-ported/old-api/TESTS_ARE_MOVING.txt
@@ -0,0 +1,8 @@
+7/10/2012
+
+All new tests will be moved to the directory /nettests/.
+
+Tests that are in this directory are either here for historical reasons or have
+not yet been properly tested and fully supporting the new API.
+
+A.
diff --git a/to-be-ported/old-api/chinatrigger.py b/to-be-ported/old-api/chinatrigger.py
new file mode 100644
index 0000000..cf4bcb3
--- /dev/null
+++ b/to-be-ported/old-api/chinatrigger.py
@@ -0,0 +1,140 @@
+import random
+import string
+import struct
+import time
+
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, defer
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.scapyproto import ScapyTest
+
+from ooni.lib.txscapy import txsr, txsend
+
+class scapyArgs(usage.Options):
+ optParameters = [['dst', 'd', None, 'Specify the target address'],
+ ['port', 'p', None, 'Specify the target port'],
+ ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
+ ]
+
+class ChinaTriggerTest(ScapyTest):
+ """
+ This test is a OONI based implementation of the C tool written
+ by Philipp Winter to engage chinese probes in active scanning.
+
+ Example of running it:
+ ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
+ """
+ implements(IPlugin, ITest)
+
+ shortName = "chinatrigger"
+ description = "Triggers the chinese probes into scanning"
+ requirements = ['root']
+ options = scapyArgs
+ blocking = False
+
+ receive = True
+ pcapfile = 'example_scapy.pcap'
+ timeout = 5
+
+ def initialize(self, reactor=None):
+ if not self.reactor:
+ from twisted.internet import reactor
+ self.reactor = reactor
+
+ @staticmethod
+ def set_random_servername(pkt):
+ ret = pkt[:121]
+ for i in range(16):
+ ret += random.choice(string.ascii_lowercase)
+ ret += pkt[121+16:]
+ return ret
+
+ @staticmethod
+ def set_random_time(pkt):
+ ret = pkt[:11]
+ ret += struct.pack('!I', int(time.time()))
+ ret += pkt[11+4:]
+ return ret
+
+ @staticmethod
+ def set_random_field(pkt):
+ ret = pkt[:15]
+ for i in range(28):
+ ret += chr(random.randint(0, 256))
+ ret += pkt[15+28:]
+ return ret
+
+ @staticmethod
+ def mutate(pkt, idx):
+ """
+ Slightly changed mutate function.
+ """
+ ret = pkt[:idx-1]
+ mutation = chr(random.randint(0, 256))
+ while mutation == pkt[idx]:
+ mutation = chr(random.randint(0, 256))
+ ret += mutation
+ ret += pkt[idx:]
+ return ret
+
+ @staticmethod
+ def set_all_random_fields(pkt):
+ pkt = ChinaTriggerTest.set_random_servername(pkt)
+ pkt = ChinaTriggerTest.set_random_time(pkt)
+ pkt = ChinaTriggerTest.set_random_field(pkt)
+ return pkt
+
+ def build_packets(self, *args, **kw):
+ """
+ Override this method to build scapy packets.
+ """
+ from scapy.all import IP, TCP
+ pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+ "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+ "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+ "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+ "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+ "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+ "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+ "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+ "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+ "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+ "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+ "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+ "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+ "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+ "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+ "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+ "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+ "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+ "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+ "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+ "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+ "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+ "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+ "\x00\x00"
+
+ pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+ pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+ for x in range(len(pkt)):
+ mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+ pkts.append(mutation)
+ return pkts
+
+ def load_assets(self):
+ if self.local_options:
+ self.dst = self.local_options['dst']
+ self.port = int(self.local_options['port'])
+ if self.local_options['pcap']:
+ self.pcapfile = self.local_options['pcap']
+ if not self.port or not self.dst:
+ pass
+
+ return {}
+
+#chinatrigger = ChinaTriggerTest(None, None, None)
+
diff --git a/to-be-ported/old-api/daphn3.py b/to-be-ported/old-api/daphn3.py
new file mode 100644
index 0000000..bf4d60d
--- /dev/null
+++ b/to-be-ported/old-api/daphn3.py
@@ -0,0 +1,152 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, endpoints
+
+from ooni.plugoo import reports
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+ def connectionMade(self):
+ self.next_state()
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+ protocol = Daphn3ClientProtocol
+ mutator = None
+ steps = None
+ test = None
+
+ def buildProtocol(self, addr):
+ p = self.protocol()
+ p.factory = self
+ p.test = self.test
+
+ if self.steps:
+ p.steps = self.steps
+
+ if not self.mutator:
+ self.mutator = daphn3.Mutator(p.steps)
+
+ else:
+ print "Moving on to next mutation"
+ self.mutator.next()
+
+ p.mutator = self.mutator
+ p.current_state = self.mutator.state()
+ return p
+
+ def clientConnectionFailed(self, reason):
+ print "We failed connecting the the OONIB"
+ print "Cannot perform test. Perhaps it got blocked?"
+ print "Please report this to tor-assistants(a)torproject.org"
+ self.test.result['error'] = ('Failed in connecting to OONIB', reason)
+ self.test.end(d)
+
+ def clientConnectionLost(self, reason):
+ print "Connection Lost."
+
+class daphn3Args(usage.Options):
+ optParameters = [['pcap', 'f', None,
+ 'PCAP to read for generating the YAML output'],
+
+ ['output', 'o', 'daphn3.yaml',
+ 'What file should be written'],
+
+ ['yaml', 'y', None,
+ 'The input file to the test'],
+
+ ['host', 'h', None, 'Target Hostname'],
+ ['port', 'p', None, 'Target port number'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+class daphn3Test(OONITest):
+ implements(IPlugin, ITest)
+
+ shortName = "daphn3"
+ description = "daphn3"
+ requirements = None
+ options = daphn3Args
+ blocking = False
+
+ local_options = None
+
+ steps = None
+
+ def initialize(self):
+ if not self.local_options:
+ self.end()
+ return
+
+ self.factory = Daphn3ClientFactory()
+ self.factory.test = self
+
+ if self.local_options['pcap']:
+ self.tool = True
+
+ elif self.local_options['yaml']:
+ self.steps = daphn3.read_yaml(self.local_options['yaml'])
+
+ else:
+ log.msg("Not enough inputs specified to the test")
+ self.end()
+
+ def runTool(self):
+ import yaml
+ pcap = daphn3.read_pcap(self.local_options['pcap'])
+ f = open(self.local_options['output'], 'w')
+ f.write(yaml.dump(pcap))
+ f.close()
+
+ def control(self, exp_res, args):
+ try:
+ mutation = self.factory.mutator.get(0)
+ self.result['censored'] = False
+ except:
+ mutation = None
+
+ return {'mutation_number': args['mutation'],
+ 'value': mutation}
+
+ def _failure(self, *argc, **kw):
+ self.result['censored'] = True
+ self.result['error'] = ('Failed in connecting', (argc, kw))
+ self.end()
+
+ def experiment(self, args):
+ log.msg("Doing mutation %s" % args['mutation'])
+ self.factory.steps = self.steps
+ host = self.local_options['host']
+ port = int(self.local_options['port'])
+ log.msg("Connecting to %s:%s" % (host, port))
+
+ if self.ended:
+ return
+
+ endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
+ d = endpoint.connect(self.factory)
+ d.addErrback(self._failure)
+ return d
+
+ def load_assets(self):
+ if not self.local_options:
+ return {}
+ if not self.steps:
+ print "Error: No assets!"
+ self.end()
+ return {}
+ mutations = 0
+ for x in self.steps:
+ mutations += len(x['data'])
+ return {'mutation': range(mutations)}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#daphn3test = daphn3Test(None, None, None)
diff --git a/to-be-ported/old-api/domclass.py b/to-be-ported/old-api/domclass.py
new file mode 100644
index 0000000..3080c40
--- /dev/null
+++ b/to-be-ported/old-api/domclass.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+#-*- encoding: utf-8 -*-
+#
+# domclass
+# ********
+#
+# :copyright: (c) 2012 by Arturo Filastò
+# :license: see LICENSE for more details.
+#
+# how this works
+# --------------
+#
+# This classifier uses the DOM structure of a website to determine how similar
+# the two sites are.
+# The procedure we use is the following:
+# * First we parse all the DOM tree of the web page and we build a list of
+# TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
+# (html, a), (a, b), (html, c)).
+#
+# * We then use this information to build a matrix (M) where m[i][j] = P(of
+# transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
+# Note: M is a square matrix that is number_of_tags wide.
+#
+# * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
+#
+# * The corelation between page A and B is given via this formula:
+# correlation = dot_product(e_A, e_B), where e_A and e_B are
+# resepectively the eigenvalues for the probability matrix A and the
+# probability matrix B.
+#
+
+try:
+ import numpy
+except:
+ print "Error numpy not installed!"
+
+import yaml
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.http import HTTPTest
+
+class domclassArgs(usage.Options):
+ optParameters = [['output', 'o', None, 'Output to write'],
+ ['file', 'f', None, 'Corpus file'],
+ ['fileb', 'b', None, 'Corpus file'],
+ ['urls', 'u', None, 'URL List'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+# All HTML4 tags
+# XXX add link to W3C page where these came from
+alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
+ 'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+ 'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
+ 'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+ 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
+ 'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+ 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
+ 'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+ 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+ 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
+
+# Reduced subset of only the most common tags
+commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+ 'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
+ 'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+ 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
+ 'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+ 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
+ 'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+ 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+ 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
+
+# The tags we are intested in using for our analysis
+thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
+ 'H3', 'H4', 'IFRAME ', 'INPUT',
+ 'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
+ 'STYLE', 'TR']
+
+def compute_probability_matrix(dataset):
+ """
+ Compute the probability matrix based on the input dataset.
+
+ :dataset: an array of pairs representing the parent child relationships.
+ """
+ import itertools
+ ret = {}
+ matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
+
+ for data in dataset:
+ x = data[0].upper()
+ y = data[1].upper()
+ try:
+ x = thetags.index(x)
+ except:
+ x = len(thetags)
+
+ try:
+ y = thetags.index(y)
+ except:
+ y = len(thetags)
+
+ matrix[x,y] += 1
+
+ for x in xrange(len(thetags) + 1):
+ possibilities = 0
+ for y in matrix[x]:
+ possibilities += y
+
+ for i in xrange(len(matrix[x])):
+ if possibilities != 0:
+ matrix[x][i] = matrix[x][i]/possibilities
+
+ return matrix
+
+def compute_eigenvalues(matrix):
+ """
+ Returns the eigenvalues of the supplied square matrix.
+
+ :matrix: must be a square matrix and diagonalizable.
+ """
+ return numpy.linalg.eigvals(matrix)
+
+def readDOM(content=None, filename=None):
+ """
+ Parses the DOM of the HTML page and returns an array of parent, child
+ pairs.
+
+ :content: the content of the HTML page to be read.
+
+ :filename: the filename to be read from for getting the content of the
+ page.
+ """
+ from bs4 import BeautifulSoup
+
+ if filename:
+ f = open(filename)
+ content = ''.join(f.readlines())
+ f.close()
+
+ dom = BeautifulSoup(content)
+ couples = []
+ for x in dom.findAll():
+ couples.append((str(x.parent.name), str(x.name)))
+
+ return couples
+
+class domclassTest(HTTPTest):
+ implements(IPlugin, ITest)
+
+ shortName = "domclass"
+ description = "domclass"
+ requirements = None
+ options = domclassArgs
+ blocking = False
+
+ follow_redirects = True
+ #tool = True
+
+ def runTool(self):
+ site_a = readDOM(filename=self.local_options['file'])
+ site_b = readDOM(filename=self.local_options['fileb'])
+ a = {}
+ a['matrix'] = compute_probability_matrix(site_a)
+ a['eigen'] = compute_eigenvalues(a['matrix'])
+
+ self.result['eigenvalues'] = a['eigen']
+ b = {}
+ b['matrix'] = compute_probability_matrix(site_b)
+ b['eigen'] = compute_eigenvalues(b['matrix'])
+
+ #print "A: %s" % a
+ #print "B: %s" % b
+ correlation = numpy.vdot(a['eigen'],b['eigen'])
+ correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
+ correlation = (correlation + 1)/2
+ print "Corelation: %s" % correlation
+ self.end()
+ return a
+
+ def processResponseBody(self, data):
+ site_a = readDOM(data)
+ #site_b = readDOM(self.local_options['fileb'])
+ a = {}
+ a['matrix'] = compute_probability_matrix(site_a)
+ a['eigen'] = compute_eigenvalues(a['matrix'])
+
+
+ if len(data) == 0:
+ self.result['eigenvalues'] = None
+ self.result['matrix'] = None
+ else:
+ self.result['eigenvalues'] = a['eigen']
+ #self.result['matrix'] = a['matrix']
+ #self.result['content'] = data[:200]
+ #b = compute_matrix(site_b)
+ print "A: %s" % a
+ return a['eigen']
+
+ def load_assets(self):
+ if self.local_options:
+ if self.local_options['file']:
+ self.tool = True
+ return {}
+ elif self.local_options['urls']:
+ return {'url': Asset(self.local_options['urls'])}
+ else:
+ self.end()
+ return {}
+ else:
+ return {}
+
+#domclass = domclassTest(None, None, None)
diff --git a/to-be-ported/old-api/dropin.cache b/to-be-ported/old-api/dropin.cache
new file mode 100755
index 0000000..65c2187
--- /dev/null
+++ b/to-be-ported/old-api/dropin.cache
@@ -0,0 +1,243 @@
+(dp1
+S'tcpconnect'
+p2
+ccopy_reg
+_reconstructor
+p3
+(ctwisted.plugin
+CachedDropin
+p4
+c__builtin__
+object
+p5
+NtRp6
+(dp7
+S'moduleName'
+p8
+S'ooni.plugins.tcpconnect'
+p9
+sS'description'
+p10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p11
+sS'plugins'
+p12
+(lp13
+g3
+(ctwisted.plugin
+CachedPlugin
+p14
+g5
+NtRp15
+(dp16
+S'provided'
+p17
+(lp18
+ctwisted.plugin
+IPlugin
+p19
+acooni.plugoo.interface
+ITest
+p20
+asS'dropin'
+p21
+g6
+sS'name'
+p22
+S'tcpconnect'
+p23
+sg10
+NsbasbsS'domclass'
+p24
+g3
+(g4
+g5
+NtRp25
+(dp26
+g8
+S'ooni.plugins.domclass'
+p27
+sg10
+Nsg12
+(lp28
+g3
+(g14
+g5
+NtRp29
+(dp30
+g17
+(lp31
+g19
+ag20
+asg21
+g25
+sg22
+S'domclass'
+p32
+sg10
+NsbasbsS'bridget'
+p33
+g3
+(g4
+g5
+NtRp34
+(dp35
+g8
+S'ooni.plugins.bridget'
+p36
+sg10
+Nsg12
+(lp37
+g3
+(g14
+g5
+NtRp38
+(dp39
+g17
+(lp40
+g19
+ag20
+asg21
+g34
+sg22
+S'bridget'
+p41
+sg10
+S"\n XXX fill me in\n\n :ivar config:\n An :class:`ooni.lib.txtorcon.TorConfig` instance.\n :ivar relays:\n A list of all provided relays to test.\n :ivar bridges:\n A list of all provided bridges to test.\n :ivar socks_port:\n Integer for Tor's SocksPort.\n :ivar control_port:\n Integer for Tor's ControlPort.\n :ivar transport:\n String defining the Tor's ClientTransportPlugin, for testing \n a bridge's pluggable transport functionality.\n :ivar tor_binary:\n Path to the Tor binary to use, e.g. '/usr/sbin/tor'\n "
+p42
+sbasbsS'daphn3'
+p43
+g3
+(g4
+g5
+NtRp44
+(dp45
+g8
+S'plugins.daphn3'
+p46
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p47
+sg12
+(lp48
+g3
+(g14
+g5
+NtRp49
+(dp50
+g17
+(lp51
+g19
+ag20
+asg21
+g44
+sg22
+S'daphn3test'
+p52
+sg10
+NsbasbsS'httpt'
+p53
+g3
+(g4
+g5
+NtRp54
+(dp55
+g8
+S'ooni.plugins.httpt'
+p56
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p57
+sg12
+(lp58
+sbsS'chinatrigger'
+p59
+g3
+(g4
+g5
+NtRp60
+(dp61
+g8
+S'plugins.chinatrigger'
+p62
+sg10
+Nsg12
+(lp63
+g3
+(g14
+g5
+NtRp64
+(dp65
+g17
+(lp66
+g19
+ag20
+asg21
+g60
+sg22
+S'chinatrigger'
+p67
+sg10
+S'\n This test is a OONI based implementation of the C tool written\n by Philipp Winter to engage chinese probes in active scanning.\n\n Example of running it:\n ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap\n '
+p68
+sbasbsS'dnstamper'
+p69
+g3
+(g4
+g5
+NtRp70
+(dp71
+g8
+S'ooni.plugins.dnstamper'
+p72
+sg10
+S'\n dnstamper\n *********\n\n This test resolves DNS for a list of domain names, one per line, in the\n file specified in the ooni-config under the setting "dns_experiment". If\n the file is top-1m.txt, the test will be run using Amazon\'s list of top\n one million domains. The experimental dns servers to query should\n be specified one per line in assets/dns_servers.txt.\n\n The test reports censorship if the cardinality of the intersection of\n the query result set from the control server and the query result set\n from the experimental server is zero, which is to say, if the two sets\n have no matching results whatsoever.\n\n NOTE: This test frequently results in false positives due to GeoIP-based\n load balancing on major global sites such as google, facebook, and\n youtube, etc.\n\n :author: Isis Lovecruft, Arturo Filast\xc3\xb2\n :license: see LICENSE for more details\n\n TODO:\n * Finish porting to twisted\n
* Finish the client.Resolver() subclass and test it\n * Use the DNS tests from captiveportal\n * Use plugoo/reports.py for final data\n'
+p73
+sg12
+(lp74
+g3
+(g14
+g5
+NtRp75
+(dp76
+g17
+(lp77
+g19
+ag20
+asg21
+g70
+sg22
+S'dnstamper'
+p78
+sg10
+S'\n XXX fill me in\n '
+p79
+sbasbsS'blocking'
+p80
+g3
+(g4
+g5
+NtRp81
+(dp82
+g8
+S'plugins.blocking'
+p83
+sg10
+Nsg12
+(lp84
+g3
+(g14
+g5
+NtRp85
+(dp86
+g17
+(lp87
+g19
+ag20
+asg21
+g81
+sg22
+S'blocking'
+p88
+sg10
+Nsbasbs.
\ No newline at end of file
diff --git a/to-be-ported/old-api/httpt.py b/to-be-ported/old-api/httpt.py
new file mode 100644
index 0000000..358f1ea
--- /dev/null
+++ b/to-be-ported/old-api/httpt.py
@@ -0,0 +1,94 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import http
+from ooni.utils import log
+
+class httptArgs(usage.Options):
+ optParameters = [['urls', 'f', None, 'Urls file'],
+ ['url', 'u', 'http://torproject.org/', 'Test single site'],
+ ['resume', 'r', 0, 'Resume at this index'],
+ ['rules', 'y', None, 'Specify the redirect rules file']]
+
+class httptTest(http.HTTPTest):
+ implements(IPlugin, ITest)
+
+ shortName = "httpt"
+ description = "httpt"
+ requirements = None
+ options = httptArgs
+ blocking = False
+
+
+ def testPattern(self, value, pattern, type):
+ if type == 'eq':
+ return value == pattern
+ elif type == 're':
+ import re
+ if re.match(pattern, value):
+ return True
+ else:
+ return False
+ else:
+ return None
+
+ def testPatterns(self, patterns, location):
+ test_result = False
+
+ if type(patterns) == list:
+ for pattern in patterns:
+ test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+ else:
+ test_result |= self.testPattern(location, patterns['value'], patterns['type'])
+
+ return test_result
+
+ def testRules(self, rules, location):
+ result = {}
+ blocked = False
+ for rule, value in rules.items():
+ current_rule = {}
+ current_rule['name'] = value['name']
+ current_rule['patterns'] = value['patterns']
+ current_rule['test'] = self.testPatterns(value['patterns'], location)
+ blocked |= current_rule['test']
+ result[rule] = current_rule
+ result['blocked'] = blocked
+ return result
+
+ def processRedirect(self, location):
+ self.result['redirect'] = None
+ try:
+ rules_file = self.local_options['rules']
+ import yaml
+ rules = yaml.load(open(rules_file))
+ log.msg("Testing rules %s" % rules)
+ redirect = self.testRules(rules, location)
+ self.result['redirect'] = redirect
+ except TypeError:
+ log.msg("No rules file. Got a redirect, but nothing to do.")
+
+
+ def control(self, experiment_result, args):
+ print self.response
+ print self.request
+ # What you return here ends up inside of the report.
+ log.msg("Running control")
+ return {}
+
+ def load_assets(self):
+ if self.local_options and self.local_options['urls']:
+ return {'url': Asset(self.local_options['urls'])}
+ else:
+ return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#httpt = httptTest(None, None, None)
diff --git a/to-be-ported/old-api/tcpconnect.py b/to-be-ported/old-api/tcpconnect.py
new file mode 100644
index 0000000..7758a9e
--- /dev/null
+++ b/to-be-ported/old-api/tcpconnect.py
@@ -0,0 +1,65 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from ooni.plugoo.interface import ITest
+from ooni.plugoo.tests import OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+
+class tcpconnectArgs(usage.Options):
+ optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+class tcpconnectTest(OONITest):
+ implements(IPlugin, ITest)
+
+ shortName = "tcpconnect"
+ description = "tcpconnect"
+ requirements = None
+ options = tcpconnectArgs
+ blocking = False
+
+ def experiment(self, args):
+ try:
+ host, port = args['asset'].split(':')
+ except:
+ raise Exception("Error in parsing asset. Wrong format?")
+ class DummyFactory(Factory):
+ def buildProtocol(self, addr):
+ return Protocol()
+
+ def gotProtocol(p):
+ p.transport.loseConnection()
+ log.msg("Got a connection!")
+ log.msg(str(p))
+ return {'result': True, 'target': [host, port]}
+
+ def gotError(err):
+ log.msg("Had error :(")
+ log.msg(err)
+ return {'result': False, 'target': [host, port]}
+
+ # What you return here gets handed as input to control
+ point = TCP4ClientEndpoint(self.reactor, host, int(port))
+ d = point.connect(DummyFactory())
+ d.addCallback(gotProtocol)
+ d.addErrback(gotError)
+ return d
+
+ def load_assets(self):
+ if self.local_options:
+ return {'asset': Asset(self.local_options['asset'])}
+ else:
+ return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#tcpconnect = tcpconnectTest(None, None, None)
diff --git a/to-be-ported/old-api/tcpscan.py b/to-be-ported/old-api/tcpscan.py
new file mode 100644
index 0000000..b371c88
--- /dev/null
+++ b/to-be-ported/old-api/tcpscan.py
@@ -0,0 +1,84 @@
+"""
+ TCP Port Scanner
+ ****************
+
+ Does a TCP connect scan on the IP:port pairs.
+
+"""
+import os
+from gevent import socket
+from datetime import datetime
+import socks
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+__plugoo__ = "TCP Port Scanner"
+__desc__ = "This a test template to be used to build your own tests"
+
+class TCPScanAsset(Asset):
+ """
+ This is the asset that should be used by the Test. It will
+ contain all the code responsible for parsing the asset file
+ and should be passed on instantiation to the test.
+ """
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+
+class TCPScan(Test):
+ """
+ The main Test class
+ """
+
+ def experiment(self, *a, **kw):
+ """
+ Fill this up with the tasks that should be performed
+ on the "dirty" network and should be compared with the
+ control.
+ """
+ addr = kw['data']
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ res = False
+ try:
+ self.logger.debug('Doing a connection to %s' % addr)
+ s.connect((addr.split(':')[0], int(addr.split(':')[1])))
+ res = True
+ except socket.error, msg:
+ self.logger.debug('Connection failed to %s: %s' % (addr, msg))
+
+ finally:
+ s.close()
+
+ return {'Time': datetime.now(),
+ 'Address': addr,
+ 'Status': res}
+
+ def control(self):
+ """
+ Fill this up with the control related code.
+ """
+ return True
+
+def run(ooni, asset=None):
+ """
+ This is the function that will be called by OONI
+ and it is responsible for instantiating and passing
+ the arguments to the Test class.
+ """
+ config = ooni.config
+
+ # This the assets array to be passed to the run function of
+ # the test
+ if asset:
+ assets = [TCPScanAsset(asset)]
+ else:
+ assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
+ "tcpscan.txt"))]
+
+ # Instantiate the Test
+ thetest = TCPScan(ooni)
+ ooni.logger.info("starting TCP Scan...")
+ # Run the test with argument assets
+ thetest.run(assets)
+ ooni.logger.info("finished.")
diff --git a/to-be-ported/spec/proxooni-spec.txt b/to-be-ported/spec/proxooni-spec.txt
new file mode 100644
index 0000000..7cc476f
--- /dev/null
+++ b/to-be-ported/spec/proxooni-spec.txt
@@ -0,0 +1,65 @@
+
+ Proxyooni specification
+ version 0.0
+ Jacob Appelbaum
+
+0. Preface
+
+ This document describes a new proxy that is required to support ooni-probe.
+
+1. Overview
+
+ There is no common proxy type that thwarts even the most basic traffic
+ monitoring. The Proxyooni specification aims to provide a proxy that is
+ encrypted by default, optionally authenticated, and will provide a way to run
+ specific ooni-probe tests natively on the system where the proxy is running.
+
+2. Implementation
+
+ Proxyooni may be written in any language, the reference implementation will be
+ implemented in Python. The program shall be called ooni-proxy and it will handle
+ running as a privileged user or an unprivileged user on supported systems. We
+ aim to support ooni-proxy on Debian Gnu/Linux as the reference platform.
+
+2.1 Connections
+
+ When ooni-proxy runs, it should open a single port and it will allow TLS 1.0
+ clients to connect with a cipher suite that provides perfect forward secrecy.
+
+2.2 Certificates
+
+ ooni-proxy should use a certificate if supplied or dynamically generate a
+ certificate on startup; any connecting client should bootstrap trust with a
+ TOFU model, a client may ignore the
+
+2.3 Authentication
+
+ ooni-proxy should provide open access by default with no authentication.
+ It should support TLS-PSK[0] if authentication is desired. Key distribution is
+ explictly an out of scope problem.
+
+3.0 Services offered
+
+ Post authentication, a remote client should treat ooni-proxy as a SOCKS4A[1]
+ proxy. It should be possible to chain as many Proxyooni proxies as desired.
+
+3.1 Additional services offered
+
+ ooni-proxy should allow for the sending of raw socket data - this is currently
+ left unspecified. This should be specified in the next revision of the
+ specification.
+
+3.2 Advanced meta-services
+
+ It may be desired to load code on the ooni-proxy from a client with newer
+ tests. This should be specified in the next revision of the specification.
+
+4. Security Concerns
+
+ It is probably not a good idea to run ooni-proxy unless you have permission to
+ do so. Consider your network context carefully; if it is dangerous to run a test
+ ensure that you do not run the test.
+
+[0] http://en.wikipedia.org/wiki/TLS-PSK
+[1] http://en.wikipedia.org/wiki/SOCKS#SOCKS_4a
+
diff --git a/to-be-ported/very-old/TODO.plgoons b/to-be-ported/very-old/TODO.plgoons
new file mode 100644
index 0000000..ace2a10
--- /dev/null
+++ b/to-be-ported/very-old/TODO.plgoons
@@ -0,0 +1,79 @@
+We should implement the following as plugoons:
+
+dns_plgoo.py - Various DNS checks
+
+As a start - we should perform a known good check against a name or list of
+names. As input, we should take an ip address, a name or a list of names for
+testing; we also take dns servers for experiment or control data. For output we
+emit UDP or TCP packets - we should support proxying these requests when
+possible as is the case with TCP but probably not with UDP for certain DNS
+request types.
+
+http_plgoo.py - Various HTTP checks
+
+We should compare two pages and see if we have identical properties.
+At the very least, we should print the important differences - perhaps
+with a diff like output? We should look for fingerprints in URLS that are
+returned. We should detect 302 re-direction.
+
+As input, we should take an ip address, a name or a list of names for testing;
+we also take a list of headers such as random user agent strings and so on.
+We should emit TCP packets and ensure that we do not leak DNS for connections
+that we expect to proxy to a remote network.
+
+latency_plgoo.py - Measure latency for a host or a list of hosts
+
+As input, we should take an ip address, a name or a list of names for testing;
+We should measure the mean latency from the ooni-probe to the host with various
+traceroute tests. We should also measure the latency between the ooni-probe and
+a given server for any other protocol that is request and response oriented;
+HTTP latency may be calculated by simply tracking the delta between requests
+and responses.
+
+tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
+
+tcptrace_plgoo.py should allow for both stray and in-connection traceroute
+modes.
+
+udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
+- it may also be nice to simply make a random A record request in a DNS packet
+and use it as the payload for a UDP traceroute.
+
+reversetrace_plgoo.py should give a remote host the client's IP and return the
+output of a traceroute to that IP from the remote host. It will need a remote
+component if run against a web server. It would not need a remote component if
+run against route-views - we can simply telnet over Tor and ask it to trace to
+our detected client IP.
+
+keyword_plgoo.py should take a keyword or a list of keywords for use as a
+payload in a varity of protocols. This should be protocol aware - dns keyword
+filtering requires a sniffer to catch stray packets after the censor wins the
+race. HTTP payloads in open connections may be similar and in practice, we'll
+have to find tune it.
+
+icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
+servers. They have dozens of tests and to implement this plgoo, we'll need to
+add many things to ooni. More details here:
+http://netalyzr.icsi.berkeley.edu/faq.html
+http://netalyzr.icsi.berkeley.edu/json/id=example-session
+
+HTML output:
+http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+JSON output:
+http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+Netalyzer log:
+http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
+
+sniffer_plgoo.py - We need a generic method for capturing packets during a full
+run - this may be better as a core ooni-probe feature but we should implement
+packet capture in a plugin if it is done no where else.
+
+nmap_plgoo.py - We should take a list of hosts and run nmap against each of
+these hosts; many hosts are collected during testing and they should be scanned
+with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
+more reasonable.
+
diff --git a/to-be-ported/very-old/TO_BE_PORTED b/to-be-ported/very-old/TO_BE_PORTED
new file mode 100644
index 0000000..49ce5e0
--- /dev/null
+++ b/to-be-ported/very-old/TO_BE_PORTED
@@ -0,0 +1,14 @@
+
+The tests in this directory are very old, and have neither been ported to
+Twisted, nor to the new twisted.trial API framework. Although, they are not
+old in the sense of the *seriously old* OONI code which was written two years
+ago.
+
+These tests should be updated at least to use Twisted.
+
+If you want to hack on something care free, feel free to mess with these files
+because it would be difficult to not improve on them.
+
+<(A)3
+isis
+0x2cdb8b35
diff --git a/to-be-ported/very-old/ooni-probe.diff b/to-be-ported/very-old/ooni-probe.diff
new file mode 100644
index 0000000..fc61d3f
--- /dev/null
+++ b/to-be-ported/very-old/ooni-probe.diff
@@ -0,0 +1,358 @@
+diff --git a/TODO b/TODO
+index c2e19af..51fa559 100644
+--- a/TODO
++++ b/TODO
+@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
+ Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
+
+ http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&pas…
++
++VIA Rail Via header:
++
++HTTP/1.0 301 Moved Permanently
++Location: http://www.google.com/
++Content-Type: text/html; charset=UTF-8
++Date: Sat, 23 Jul 2011 02:21:30 GMT
++Expires: Mon, 22 Aug 2011 02:21:30 GMT
++Cache-Control: public, max-age=2592000
++Server: gws
++Content-Length: 219
++X-XSS-Protection: 1; mode=block
++X-Cache: MISS from cache_server
++X-Cache-Lookup: MISS from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Connection: close
++
++<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
++<TITLE>301 Moved</TITLE></HEAD><BODY>
++<H1>301 Moved</H1>
++The document has moved
++<A HREF="http://www.google.com/">here</A>.
++</BODY></HTML>
++
++
++blocked site:
++
++HTTP/1.0 302 Moved Temporarily
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:17 GMT
++Content-Length: 0
++Location: http://10.66.66.66/denied.html
++
++invalid request response:
++
++$ nc 8.8.8.8 80
++hjdashjkdsahjkdsa
++HTTP/1.0 400 Bad Request
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:44 GMT
++Content-Type: text/html
++Content-Length: 1178
++Expires: Sat, 23 Jul 2011 02:22:44 GMT
++X-Squid-Error: ERR_INVALID_REQ 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to process the request:
++<PRE>
++hjdashjkdsahjkdsa
++
++</PRE>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Invalid Request
++</STRONG>
++</UL>
++
++<P>
++Some aspect of the HTTP Request is invalid. Possible problems:
++<UL>
++<LI>Missing or unknown request method
++<LI>Missing URL
++<LI>Missing HTTP Identifier (HTTP/1.0)
++<LI>Request is too large
++<LI>Content-Length missing for POST or PUT requests
++<LI>Illegal character in hostname; underscores are not allowed
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>.
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++nc 10.66.66.66 80
++GET cache_object://localhost/info HTTP/1.0
++HTTP/1.0 403 Forbidden
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:25:56 GMT
++Content-Type: text/html
++Content-Length: 1061
++Expires: Sat, 23 Jul 2011 02:25:56 GMT
++X-Squid-Error: ERR_ACCESS_DENIED 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to retrieve the URL:
++<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Access Denied.
++</STRONG>
++<P>
++Access control configuration prevents your request from
++being allowed at this time. Please contact your service provider if
++you feel this is incorrect.
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>.
++
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++
+diff --git a/ooni/command.py b/ooni/command.py
+index 361190f..df1a58c 100644
+--- a/ooni/command.py
++++ b/ooni/command.py
+@@ -13,6 +13,7 @@ import ooni.captive_portal
+ import ooni.namecheck
+ import ooni.dns_poisoning
+ import ooni.dns_cc_check
++import ooni.transparenthttp
+
+ class Command():
+ def __init__(self, args):
+@@ -48,6 +49,15 @@ class Command():
+ help="run captiveportal tests"
+ )
+
++ # --transhttp
++ def cb_transhttp(option, opt, value, oparser):
++ self.action = opt[2:]
++ optparser.add_option(
++ "--transhttp",
++ action="callback", callback=cb_transhttp,
++ help="run Transparent HTTP tests"
++ )
++
+ # --dns
+ def cb_dnstests(option, opt, value, oparser):
+ self.action = opt[2:]
+@@ -122,7 +132,7 @@ class Command():
+ if (not self.action):
+ raise optparse.OptionError(
+ 'is required',
+- '--dns | --dnsbulk | --captiveportal | --help | --version'
++ '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
+ )
+
+ except optparse.OptionError, err:
+@@ -138,6 +148,10 @@ class Command():
+ captive_portal = ooni.captive_portal.CaptivePortal
+ captive_portal(self).main()
+
++ def transhttp(self):
++ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
++ transparent_http(self).main()
++
+ def dns(self):
+ dnstests = ooni.namecheck.DNS
+ dnstests(self).main()
+diff --git a/ooni/dns.py b/ooni/dns.py
+index 95da6ef..90d50bd 100644
+--- a/ooni/dns.py
++++ b/ooni/dns.py
+@@ -8,7 +8,7 @@ from socket import gethostbyname
+ import ooni.common
+
+ # apt-get install python-dns
+-import DNS
++import dns
+ import random
+
+ """ Wrap gethostbyname """
+diff --git a/ooni/http.py b/ooni/http.py
+index 62365bb..bb72001 100644
+--- a/ooni/http.py
++++ b/ooni/http.py
+@@ -7,8 +7,14 @@
+ from socket import gethostbyname
+ import ooni.common
+ import urllib2
++import httplib
++from urlparse import urlparse
++from pprint import pprint
+ import pycurl
++import random
++import string
+ import re
++from BeautifulSoup import BeautifulSoup
+
+ # By default, we'll be Torbutton's UA
+ default_ua = { 'User-Agent' :
+@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
+ default_proxy_host = "127.0.0.1"
+ default_proxy_port = "9050"
+
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
++#class HTTPResponse(object):
++# def __init__(self):
+
+
+ """A very basic HTTP fetcher that uses Tor by default and returns a curl
+@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
+ http_code = getinfo(pycurl.HTTP_CODE)
+ return response, http_code
+
+-"""A very basic HTTP fetcher that returns a urllib3 response object."""
++"""A very basic HTTP fetcher that returns a urllib2 response object."""
+ def http_fetch(url,
+ headers= default_ua,
+ label="generic HTTP fetch"):
+@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
+ else:
+ return True
+
++def http_request(self, method, url, path=None):
++ """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
++ purl = urlparse(url)
++ host = purl.netloc
++ conn = httplib.HTTPConnection(host, 80)
++ if path is None:
++ path = purl.path
++ conn.request(method, purl.path)
++ response = conn.getresponse()
++ headers = dict(response.getheaders())
++ self.headers = headers
++ self.data = response.read()
++ return True
++
++def search_headers(self, s_headers, url):
++ if http_request(self, "GET", url):
++ headers = self.headers
++ else:
++ return None
++ result = {}
++ for h in s_headers.items():
++ result[h[0]] = h[0] in headers
++ return result
++
++def http_header_match_dict(experimental_url, dict_header):
++ result = {}
++ url_header = http_get_header_dict(experimental_url)
++
++# XXX for testing
++# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
++
++def search_squid_headers(self):
++ url = "http://securityfocus.org/blabla"
++ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
++ ret = search_headers(self, s_headers, url)
++ for i in ret.items():
++ if i[1] is True:
++ return False
++ return True
++
++def random_bad_request(self):
++ url = "http://securityfocus.org/blabla"
++ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
++ if http_request(self, r_str, url):
++ return True
++ else:
++ return None
++
++def squid_search_bad_request(self):
++ if random_bad_request(self):
++ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
++ for i in s_headers.items():
++ if i[0] in self.headers:
++ return False
++ return True
++ else:
++ return None
++
++def squid_cacheobject_request(self):
++ url = "http://securityfocus.org/blabla"
++ if http_request(self, "GET", url, "cache_object://localhost/info"):
++ soup = BeautifulSoup(self.data)
++ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
++ return False
++ else:
++ return True
++ else:
++ return None
++
++
+ def MSHTTP_CP_Tests(self):
+ experiment_url = "http://www.msftncsi.com/ncsi.txt"
+ expectedResponse = "Microsoft NCSI" # Only this - nothing more
+@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
+
+ # Google ChromeOS fetches this url in guest mode
+ # and they expect the user to authenticate
+- def googleChromeOSHTTPTest(self):
+- print "noop"
+- #url = "http://www.google.com/"
++def googleChromeOSHTTPTest(self):
++ print "noop"
++ #url = "http://www.google.com/"
++
++def SquidHeader_TransparentHTTP_Tests(self):
++ return search_squid_headers(self)
++
++def SquidBadRequest_TransparentHTTP_Tests(self):
++ squid_cacheobject_request(self)
++ return squid_search_bad_request(self)
++
++def SquidCacheobject_TransparentHTTP_Tests(self):
++ return squid_cacheobject_request(self)
++
++
diff --git a/to-be-ported/very-old/ooni/#namecheck.py# b/to-be-ported/very-old/ooni/#namecheck.py#
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/to-be-ported/very-old/ooni/#namecheck.py#
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = args.randomize
+
+ def DNS_Tests(self):
+ print "DNS tampering detection:"
+ filter_name = "_DNS_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/to-be-ported/very-old/ooni/.DS_Store b/to-be-ported/very-old/ooni/.DS_Store
new file mode 100644
index 0000000..f5738a5
Binary files /dev/null and b/to-be-ported/very-old/ooni/.DS_Store differ
diff --git a/to-be-ported/very-old/ooni/__init__.py b/to-be-ported/very-old/ooni/__init__.py
new file mode 100644
index 0000000..8f1b96e
--- /dev/null
+++ b/to-be-ported/very-old/ooni/__init__.py
@@ -0,0 +1,12 @@
+"""\
+This is your package, 'ooni'.
+
+It was provided by the package, `package`.
+
+Please change this documentation, and write this module!
+"""
+
+__version__ = '0.0.1'
+
+# If you run 'make test', this is your failing test.
+# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/to-be-ported/very-old/ooni/command.py b/to-be-ported/very-old/ooni/command.py
new file mode 100644
index 0000000..e5f8f9f
--- /dev/null
+++ b/to-be-ported/very-old/ooni/command.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8
+"""\
+Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
+"""
+
+import os
+import sys
+import re
+import optparse
+
+# Only include high level ooni tests at this time
+import ooni.captive_portal
+import ooni.namecheck
+import ooni.dns_poisoning
+import ooni.dns_cc_check
+import ooni.transparenthttp
+import ooni.helpers
+import ooni.plugooni
+import ooni.input
+
+class Command():
+ def __init__(self, args):
+ sys.argv = sys.argv[0:1]
+ sys.argv.extend(args)
+ self.startup_options()
+
+ def startup_options(self):
+ self.action = None
+ self.from_ = None
+ self.to = None
+ self.parser = None
+ self.emitter = None
+ self.emit_header = None
+ self.emit_trailer = None
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = True
+ self.cc = None
+ self.hostname = None
+ self.listfile = None
+ self.listplugooni = False
+ self.plugin_name = "all"
+ self.controlproxy = None # "socks4a://127.0.0.1:9050/"
+ self.experimentproxy = None
+
+ usage = """
+
+ 'ooni' is the Open Observatory of Network Interference
+
+ command line usage: ooni-probe [options]"""
+
+ optparser = optparse.OptionParser(usage=usage)
+
+ # --plugin
+ def cb_plugin(option, opt, value, oparser):
+ self.action = opt[2:]
+ self.plugin_name = str(value)
+ optparser.add_option(
+ "--plugin", type="string",
+ action="callback", callback=cb_plugin,
+ help="run the Plugooni plgoo plugin specified"
+ )
+
+ # --listplugins
+ def cb_list_plugins(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--listplugins",
+ action="callback", callback=cb_list_plugins,
+ help="list available Plugooni as plgoos plugin names"
+ )
+
+ # --captiveportal
+ def cb_captiveportal(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--captiveportal",
+ action="callback", callback=cb_captiveportal,
+ help="run vendor emulated captiveportal tests"
+ )
+
+ # --transhttp
+ def cb_transhttp(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--transhttp",
+ action="callback", callback=cb_transhttp,
+ help="run Transparent HTTP tests"
+ )
+
+ # --dns
+ def cb_dnstests(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dns",
+ action="callback", callback=cb_dnstests,
+ help="run fixed generic dns tests"
+ )
+
+ # --dnsbulk
+ def cb_dnsbulktests(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dnsbulk",
+ action="callback", callback=cb_dnsbulktests,
+ help="run bulk DNS tests in random.shuffle() order"
+ )
+
+ # --dns-cc-check
+ def cb_dnscccheck(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dnscccheck",
+ action="callback", callback=cb_dnscccheck,
+ help="run cc specific bulk DNS tests in random.shuffle() order"
+ )
+
+ # --cc [country code]
+ def cb_cc(option, opt, value, optparser):
+ # XXX: We should check this against a list of supported county codes
+ # and then return the matching value from the list into self.cc
+ self.cc = str(value)
+ optparser.add_option(
+ "--cc", type="string",
+ action="callback", callback=cb_cc,
+ help="set a specific county code -- default is None",
+ )
+
+ # --list [url/hostname/ip list in file]
+ def cb_list(option, opt, value, optparser):
+ self.listfile = os.path.expanduser(value)
+ if not os.path.isfile(self.listfile):
+ print "Wrong file '" + value + "' in --list."
+ sys.exit(1)
+ optparser.add_option(
+ "--list", type="string",
+ action="callback", callback=cb_list,
+ help="file to read from -- default is None",
+ )
+
+ # --url [url/hostname/ip]
+ def cb_host(option, opt, value, optparser):
+ self.hostname = str(value)
+ optparser.add_option(
+ "--url", type="string",
+ action="callback", callback=cb_host,
+ help="set URL/hostname/IP for use in tests -- default is None",
+ )
+
+ # --controlproxy [scheme://host:port]
+ def cb_controlproxy(option, opt, value, optparser):
+ self.controlproxy = str(value)
+ optparser.add_option(
+ "--controlproxy", type="string",
+ action="callback", callback=cb_controlproxy,
+ help="proxy to be used as a control -- default is None",
+ )
+
+ # --experimentproxy [scheme://host:port]
+ def cb_experimentproxy(option, opt, value, optparser):
+ self.experimentproxy = str(value)
+ optparser.add_option(
+ "--experimentproxy", type="string",
+ action="callback", callback=cb_experimentproxy,
+ help="proxy to be used for experiments -- default is None",
+ )
+
+
+
+ # --randomize
+ def cb_randomize(option, opt, value, optparser):
+ self.randomize = bool(int(value))
+ optparser.add_option(
+ "--randomize", type="choice",
+ choices=['0', '1'], metavar="0|1",
+ action="callback", callback=cb_randomize,
+ help="randomize host order -- default is on",
+ )
+
+ # XXX TODO:
+ # pause/resume scans for dns_BULK_DNS_Tests()
+ # setting of control/experiment resolver
+ # setting of control/experiment proxy
+ #
+
+ def cb_version(option, opt, value, oparser):
+ self.action = 'version'
+ optparser.add_option(
+ "-v", "--version",
+ action="callback", callback=cb_version,
+ help="print ooni-probe version"
+ )
+
+ # parse options
+ (opts, args) = optparser.parse_args()
+
+ # validate options
+ try:
+ if (args):
+ raise optparse.OptionError('extra arguments found', args)
+ if (not self.action):
+ raise optparse.OptionError(
+ 'RTFS', 'required arguments missing'
+ )
+
+ except optparse.OptionError, err:
+ sys.stderr.write(str(err) + '\n\n')
+ optparser.print_help()
+ sys.exit(1)
+
+ def version(self):
+ print """
+ooni-probe pre-alpha
+Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
+See: https://www.torproject.org/ooni/
+
+"""
+
+ def run(self):
+ getattr(self, self.action)()
+
+ def plugin(self):
+ plugin_run = ooni.plugooni.Plugooni
+ plugin_run(self).run(self)
+
+ def listplugins(self):
+ plugin_run = ooni.plugooni.Plugooni
+ plugin_run(self).list_plugoons()
+
+ def captiveportal(self):
+ captive_portal = ooni.captive_portal.CaptivePortal
+ captive_portal(self).main()
+
+ def transhttp(self):
+ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
+ transparent_http(self).main()
+
+ def dns(self):
+ dnstests = ooni.namecheck.DNS
+ dnstests(self).main()
+
+ def dnsbulk(self):
+ dnstests = ooni.dns_poisoning.DNSBulk
+ dnstests(self).main()
+
+ def dnscccheck(self):
+ dnstests = ooni.dns_cc_check.DNSBulk
+ dnstests(self).main()
+
diff --git a/to-be-ported/very-old/ooni/dns_poisoning.py b/to-be-ported/very-old/ooni/dns_poisoning.py
new file mode 100644
index 0000000..939391e
--- /dev/null
+++ b/to-be-ported/very-old/ooni/dns_poisoning.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+class DNSBulk():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.randomize = args.randomize
+ self.debug = False
+
+ def DNS_Tests(self):
+ print "DNS tampering detection for list of domains:"
+ filter_name = "_DNS_BULK_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/to-be-ported/very-old/ooni/dnsooni.py b/to-be-ported/very-old/ooni/dnsooni.py
new file mode 100644
index 0000000..bfdfe51
--- /dev/null
+++ b/to-be-ported/very-old/ooni/dnsooni.py
@@ -0,0 +1,356 @@
+#!/usr/bin/env python
+#
+# DNS support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+
+from socket import gethostbyname
+import ooni.common
+
+# requires python-dns
+# (pydns.sourceforge.net)
+try:
+ import DNS
+# Mac OS X needs this
+except:
+ try:
+ import dns as DNS
+ except:
+ pass # Never mind, let's break later.
+import random
+from pprint import pprint
+
+""" Wrap gethostbyname """
+def dns_resolve(hostname):
+ try:
+ resolved_host = gethostbyname(hostname)
+ return resolved_host
+ except:
+ return False
+
+"""Perform a resolution on test_hostname and compare it with the expected
+ control_resolved ip address. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True; otherwise
+ it returns False.
+"""
+def dns_resolve_match(experiment_hostname, control_resolved,
+ label="generic DNS comparison"):
+ experiment_resolved = dns_resolve(experiment_hostname)
+ if experiment_resolved == False:
+ return None
+ if experiment_resolved:
+ if str(experiment_resolved) != str(control_resolved):
+ print label + " control " + str(control_resolved) + " data does not " \
+ "match experiment response: " + str(experiment_resolved)
+ return False
+ return True
+
+def generic_DNS_resolve(experiment_hostname, experiment_resolver):
+ if experiment_resolver == None:
+ req = DNS.Request(name=experiment_hostname) # local resolver
+ else:
+ req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
+ resolved_data = req.req().answers
+ return resolved_data
+
+""" Return a list of all known censors. """
+def load_list_of_known_censors(known_proxy_file=None):
+ proxyfile = "proxy-lists/ips.txt"
+ known_proxy_file = open(proxyfile, 'r', 1)
+ known_proxy_list = []
+ for known_proxy in known_proxy_file.readlines():
+ known_proxy_list.append(known_proxy)
+ known_proxy_file.close()
+ known_proxy_count = len(known_proxy_list)
+ print "Loading " + str(known_proxy_count) + " known proxies..."
+ return known_proxy_list, known_proxy_count
+
+def load_list_of_test_hosts(hostfile=None):
+ if hostfile == None:
+ hostfile="censorship-lists/norwegian-dns-blacklist.txt"
+ host_list_file = open(hostfile, 'r', 1)
+ host_list = []
+ for host_name in host_list_file.readlines():
+ if host_name.isspace():
+ continue
+ else:
+ host_list.append(host_name)
+ host_list_file.close()
+ host_count = len(host_list)
+ #print "Loading " + str(host_count) + " test host names..."
+ return host_list, host_count
+
+""" Return True with a list of censors if we find a known censor from
+ known_proxy_list in the experiment_data DNS response. Otherwise return
+ False and None. """
+def contains_known_censors(known_proxy_list, experiment_data):
+ match = False
+ proxy_list = []
+ for answer in range(len(experiment_data)):
+ for known_proxy in known_proxy_list:
+ if answer == known_proxy:
+ print "CONFLICT: known proxy discovered: " + str(known_proxy),
+ proxy_list.append(known_proxy)
+ match = True
+ return match, proxy_list
+
+""" Return True and the experiment response that failed to match."""
+def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
+ known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
+ conflict_list = []
+ conflict = False
+ if known_proxy_found:
+ print "known proxy discovered: " + str(known_proxies)
+ for answer in range(len(control_data)):
+ if control_data[answer]['data'] == experiment_data:
+ print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
+ continue
+ else:
+ conflict = True
+ conflict_list.append(experiment_data)
+ #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
+ return conflict, conflict_list
+
+def dns_DNS_BULK_Tests(self, hostfile=None,
+ known_good_resolver="8.8.8.8", test_resolver=None):
+ tampering = False # By default we'll pretend the internet is nice
+ tampering_list = []
+ host_list, host_count = load_list_of_test_hosts()
+ known_proxies, proxy_count = load_list_of_known_censors()
+ check_count = 1
+ if test_resolver == None:
+ DNS.ParseResolvConf() # Set the local resolver as our default
+ if self.randomize:
+ random.shuffle(host_list) # This makes our list non-sequential for now
+ for host_name in host_list:
+ host_name = host_name.strip()
+ print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+ print "Resolving with control resolver..."
+ print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
+ print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
+ # XXX TODO - we need to keep track of the status of these requests and then resume them
+ while True:
+ try:
+ control_data = generic_DNS_resolve(host_name, known_good_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "control resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Resolving with experiment resolver..."
+ while True:
+ try:
+ experiment_data = generic_DNS_resolve(host_name, test_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "experiment resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Comparing control and experiment...",
+ tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
+ if tampering:
+ tampering_list.append(conflicts)
+ print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+ check_count = check_count + 1
+ host_list.close()
+ return tampering
+
+""" Attempt to resolve random_hostname and return True and None if empty. If an
+ address is returned we return False and the returned address.
+"""
+def dns_response_empty(random_hostname):
+ response = dns_resolve(random_hostname)
+ if response == False:
+ return True, None
+ return False, response
+
+def dns_multi_response_empty(count, size):
+ for i in range(count):
+ randName = ooni.common._randstring(size)
+ response_empty, response_ip = dns_response_empty(randName)
+ if response_empty == True and response_ip == None:
+ responses_are_empty = True
+ else:
+ print label + " " + randName + " found with value " + str(response_ip)
+ responses_are_empty = False
+ return responses_are_empty
+
+""" Attempt to resolve one random host name per tld in tld_list where the
+ hostnames are random strings with a length between min_length and
+ max_length. Return True if list is empty, otherwise return False."""
+def dns_list_empty(tld_list, min_length, max_length,
+ label="generic DNS list test"):
+ for tld in tld_list:
+ randName = ooni.common._randstring(min_length, max_length) + tld
+ response_empty, response_ip = dns_response_empty(randName)
+ return response_empty
+
+# Known bad test
+# Test for their DNS breakage and their HTTP MITM
+# "Family Shield" is 208.67.222.123 and 208.67.220.123
+# returns 67.215.65.130 for filtered sites like kink.com
+# block.opendns.com is a block page where users are redirected
+# 208.67.216.135 208.67.217.135 are the block pages currently point
+# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
+# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
+# whatever the HOST header says
+# Amusingly - their Server header is: "OpenDNS Guide"
+""" Return True if we are not being directed as known OpenDNS block pages."""
+def OpenDNS_DNS_Tests(self):
+ return OpenDNS_Censorship_DNS_TESTS(self)
+ return OpenDNS_NXDomain_DNS_TESTS(self)
+
+def OpenDNS_Censorship_DNS_TESTS(self):
+ known_filter = "67.215.65.130"
+ randName = ooni.common._randstring(10)
+ redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
+ if redirected:
+ return False
+ else:
+ return True
+
+def OpenDNS_NXDomain_DNS_TESTS(self):
+ known_filter = "67.215.65.132"
+ randName = ooni.common._randstring(10)
+ redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
+ if redirected:
+ return False
+ else:
+ return True
+
+"""Returns True if the experiment_url returns the well known Italian block page."""
+def cc_DNS_Tests_it(self):
+ tampering = False # By default we'll pretend the internet is nice
+ tampering_list = []
+ conflicts = []
+ known_good_resolver = "8.8.8.8"
+ host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
+ known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
+ known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
+
+ check_count = 1
+ DNS.ParseResolvConf()
+ # Set the local resolver as our default
+ if self.randomize:
+ random.shuffle(host_list) # This makes our list non-sequential for now
+ print "We're testing (" + str(host_count) + ") URLs"
+ print "We're looking for (" + str(known_block_count) + ") block pages"
+ print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
+ for test_resolver in known_censoring_resolvers:
+ test_resolver = test_resolver.strip()
+ for host_name in host_list:
+ host_name = host_name.strip()
+ print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+ print "Testing " + host_name + " with control resolver: " + known_good_resolver
+ print "Testing " + host_name + " with experiment resolver: " + test_resolver
+ while True:
+ try:
+ control_data = generic_DNS_resolve(host_name, known_good_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "control resolver appears to be failing..."
+ break
+ except:
+ print "Timeout; looping!"
+ continue
+
+ while True:
+ try:
+ experiment_data = generic_DNS_resolve(host_name, test_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "experiment resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Comparing control and experiment...",
+ tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
+ if tampering:
+ tampering_list.append(conflicts)
+ print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+ check_count = check_count + 1
+
+ host_list.close()
+ return tampering
+
+
+## XXX TODO
+## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
+
+""" Returns True if the experiment_host returns a well known Australian filter
+ IP address."""
+def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
+ # http://www.robtex.com/ip/61.88.88.88.html
+ # http://requests.optus.net.au/dns/
+ known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
+ known_censoring_resolvers = ["61.88.88.88"] # Optus
+ for resolver in known_censoring_resolvers:
+ blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
+ if blocked:
+ return True
+
+"""Returns True if experiment_hostname as resolved by experiment_resolver
+ resolves to control_data. Returns False if there is no match or None if the
+ attempt fails."""
+def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
+ control_data):
+ req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
+ resolved_data = s.req().answers
+ for answer in range(len(resolved_data)):
+ if resolved_data[answer]['data'] == control_data:
+ return True
+ return False
+
+# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
+# details
+""" Return True if Tor would consider the network fine; False if it's hostile
+ and has no signs of DNS tampering. """
+def Tor_DNS_Tests(self):
+ response_rfc2606_empty = RFC2606_DNS_Tests(self)
+ tor_tld_list = ["", ".com", ".org", ".net"]
+ response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
+ return response_tor_empty | response_rfc2606_empty
+
+""" Return True if RFC2606 would consider the network hostile; False if it's all
+ clear and has no signs of DNS tampering. """
+def RFC2606_DNS_Tests(self):
+ tld_list = [".invalid", ".test"]
+ return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
+
+""" Return True if googleChromeDNSTest would consider the network OK."""
+def googleChrome_CP_Tests(self):
+ maxGoogleDNSTests = 3
+ GoogleDNSTestSize = 10
+ return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
+ GoogleDNSTestSize)
+def googleChrome_DNS_Tests(self):
+ return googleChrome_CP_Tests(self)
+
+""" Return True if MSDNSTest would consider the network OK."""
+def MSDNS_CP_Tests(self):
+ experimentHostname = "dns.msftncsi.com"
+ expectedResponse = "131.107.255.255"
+ return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
+
+def MSDNS_DNS_Tests(self):
+ return MSDNS_CP_Tests(self)
diff --git a/to-be-ported/very-old/ooni/helpers.py b/to-be-ported/very-old/ooni/helpers.py
new file mode 100644
index 0000000..514e65f
--- /dev/null
+++ b/to-be-ported/very-old/ooni/helpers.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+
+import ooni.common
+import pycurl
+import random
+import zipfile
+import os
+from xml.dom import minidom
+try:
+ from BeautifulSoup import BeautifulSoup
+except:
+ pass # Never mind, let's break later.
+
+def get_random_url(self):
+ filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
+ fp = zipfile.ZipFile(filepath, "r")
+ fp.open("top-1m.csv")
+ content = fp.read("top-1m.csv")
+ return "http://" + random.choice(content.split("\n")).split(",")[1]
+
+"""Pick a random header and use that for the request"""
+def get_random_headers(self):
+ filepath = os.getcwd() + "/test-lists/whatheaders.xml"
+ headers = []
+ content = open(filepath, "r").read()
+ soup = BeautifulSoup(content)
+ measurements = soup.findAll('measurement')
+ i = random.randint(0,len(measurements))
+ for vals in measurements[i].findAll('header'):
+ name = vals.find('name').string
+ value = vals.find('value').string
+ if name != "host":
+ headers.append((name, value))
+ return headers
diff --git a/to-be-ported/very-old/ooni/http.py b/to-be-ported/very-old/ooni/http.py
new file mode 100644
index 0000000..59e2abb
--- /dev/null
+++ b/to-be-ported/very-old/ooni/http.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+#
+
+from socket import gethostbyname
+import ooni.common
+import ooni.helpers
+import ooni.report
+import urllib2
+import httplib
+from urlparse import urlparse
+from pprint import pprint
+import pycurl
+import random
+import string
+import re
+from pprint import pprint
+try:
+ from BeautifulSoup import BeautifulSoup
+except:
+ pass # Never mind, let's break later.
+
+# By default, we'll be Torbutton's UA
+default_ua = { 'User-Agent' :
+ 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
+
+# Use pycurl to connect over a proxy
+PROXYTYPE_SOCKS5 = 5
+default_proxy_type = PROXYTYPE_SOCKS5
+default_proxy_host = "127.0.0.1"
+default_proxy_port = "9050"
+
+#class HTTPResponse(object):
+# def __init__(self):
+
+
+"""A very basic HTTP fetcher that uses Tor by default and returns a curl
+ object."""
+def http_proxy_fetch(url, headers, proxy_type=5,
+ proxy_host="127.0.0.1",
+ proxy_port=9050):
+ request = pycurl.Curl()
+ request.setopt(pycurl.PROXY, proxy_host)
+ request.setopt(pycurl.PROXYPORT, proxy_port)
+ request.setopt(pycurl.PROXYTYPE, proxy_type)
+ request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
+ request.setopt(pycurl.URL, url)
+ response = request.perform()
+ http_code = getinfo(pycurl.HTTP_CODE)
+ return response, http_code
+
+"""A very basic HTTP fetcher that returns a urllib2 response object."""
+def http_fetch(url,
+ headers= default_ua,
+ label="generic HTTP fetch"):
+ request = urllib2.Request(url, None, headers)
+ response = urllib2.urlopen(request)
+ return response
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+ control_result. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True with the http
+ status code; otherwise it returns False.
+"""
+def http_content_match(experimental_url, control_result,
+ headers= { 'User-Agent' : default_ua },
+ label="generic HTTP content comparison"):
+ request = urllib2.Request(experimental_url, None, headers)
+ response = urllib2.urlopen(request)
+ responseContents = response.read()
+ responseCode = response.code
+ if responseContents != False:
+ if str(responseContents) != str(control_result):
+ print label + " control " + str(control_result) + " data does not " \
+ "match experiment response: " + str(responseContents)
+ return False, responseCode
+ return True, responseCode
+ else:
+ print "HTTP connection appears to have failed"
+ return False, False
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+ control_result as a regex. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True with the HTTP
+ status code; otherwise it returns False.
+"""
+def http_content_fuzzy_match(experimental_url, control_result,
+ headers= { 'User-Agent' : default_ua },
+ label="generic HTTP content comparison"):
+ request = urllib2.Request(experimental_url, None, headers)
+ response = urllib2.urlopen(request)
+ responseContents = response.read()
+ responseCode = response.code
+ pattern = re.compile(control_result)
+ match = pattern.search(responseContents)
+ if responseContents != False:
+ if not match:
+ print label + " control " + str(control_result) + " data does not " \
+ "match experiment response: " + str(responseContents)
+ return False, responseCode
+ return True, responseCode
+ else:
+ print "HTTP connection appears to have failed"
+ return False, False
+
+"""Compare two HTTP status codes as integers and return True if they match."""
+def http_status_code_match(experiment_code, control_code):
+ if int(experiment_code) != int(control_code):
+ return False
+ return True
+
+"""Compare two HTTP status codes as integers and return True if they don't match."""
+def http_status_code_no_match(experiment_code, control_code):
+ if http_status_code_match(experiment_code, control_code):
+ return False
+ return True
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it matches, False if it does not."""
+def http_header_match(experiment_url, control_header, control_result):
+ response = http_fetch(url, label=label)
+ remote_header = response.get_header(control_header)
+ if str(remote_header) == str(control_result):
+ return True
+ else:
+ return False
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it does not matche, False if it does."""
+def http_header_no_match(experiment_url, control_header, control_result):
+ match = http_header_match(experiment_url, control_header, control_result)
+ if match:
+ return False
+ else:
+ return True
+
+def send_browser_headers(self, browser, conn):
+ headers = ooni.helpers.get_random_headers(self)
+ for h in headers:
+ conn.putheader(h[0], h[1])
+ conn.endheaders()
+ return True
+
+def http_request(self, method, url, path=None):
+ purl = urlparse(url)
+ host = purl.netloc
+ conn = httplib.HTTPConnection(host, 80)
+ conn.connect()
+ if path is None:
+ path = purl.path
+ conn.putrequest(method, purl.path)
+ send_browser_headers(self, None, conn)
+ response = conn.getresponse()
+ headers = dict(response.getheaders())
+ self.headers = headers
+ self.data = response.read()
+ return True
+
+def search_headers(self, s_headers, url):
+ if http_request(self, "GET", url):
+ headers = self.headers
+ else:
+ return None
+ result = {}
+ for h in s_headers.items():
+ result[h[0]] = h[0] in headers
+ return result
+
+# XXX for testing
+# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
+
+"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
+def search_squid_headers(self):
+ test_name = "squid header"
+ self.logger.info("RUNNING %s test" % test_name)
+ url = ooni.helpers.get_random_url(self)
+ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
+ ret = search_headers(self, s_headers, url)
+ for i in ret.items():
+ if i[1] is True:
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+
+def random_bad_request(self):
+ url = ooni.helpers.get_random_url(self)
+ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
+ if http_request(self, r_str, url):
+ return True
+ else:
+ return None
+
+"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
+def squid_search_bad_request(self):
+ test_name = "squid bad request"
+ self.logger.info("RUNNING %s test" % test_name)
+ if random_bad_request(self):
+ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
+ for i in s_headers.items():
+ if i[0] in self.headers:
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ self.logger.warning("the %s test returned failed" % test_name)
+ return None
+
+"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
+def squid_cacheobject_request(self):
+ url = ooni.helpers.get_random_url(self)
+ test_name = "squid cacheobject"
+ self.logger.info("RUNNING %s test" % test_name)
+ if http_request(self, "GET", url, "cache_object://localhost/info"):
+ soup = BeautifulSoup(self.data)
+ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ else:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ self.logger.warning("the %s test failed" % test_name)
+ return None
+
+
+def MSHTTP_CP_Tests(self):
+ test_name = "MS HTTP Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ experiment_url = "http://www.msftncsi.com/ncsi.txt"
+ expectedResponse = "Microsoft NCSI" # Only this - nothing more
+ expectedResponseCode = "200" # Must be this - nothing else
+ label = "MS HTTP"
+ headers = { 'User-Agent' : 'Microsoft NCSI' }
+ content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
+ headers, label)
+ status_match = http_status_code_match(expectedResponseCode,
+ experiment_code)
+ if status_match and content_match:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+def AppleHTTP_CP_Tests(self):
+ test_name = "Apple HTTP Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ experiment_url = "http://www.apple.com/library/test/success.html"
+ expectedResponse = "Success" # There is HTML that contains this string
+ expectedResponseCode = "200"
+ label = "Apple HTTP"
+ headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
+ 'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
+ ' Mobile/1A543a Safari/419.3' }
+ content_match, experiment_code = http_content_fuzzy_match(
+ experiment_url, expectedResponse, headers)
+ status_match = http_status_code_match(expectedResponseCode,
+ experiment_code)
+ if status_match and content_match:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ print label + "content match:" + str(content_match) + " status match:" + str(status_match)
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+def WC3_CP_Tests(self):
+ test_name = "W3 Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
+ draftResponseCode = "428"
+ label = "WC3 draft-nottingham-http-portal"
+ response = http_fetch(url, label=label)
+ responseCode = response.code
+ if http_status_code_no_match(responseCode, draftResponseCode):
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ print label + " status match:" + status_match
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+# Google ChromeOS fetches this url in guest mode
+# and they expect the user to authenticate
+def googleChromeOSHTTPTest(self):
+ print "noop"
+ #url = "http://www.google.com/"
+
+def SquidHeader_TransparentHTTP_Tests(self):
+ return search_squid_headers(self)
+
+def SquidBadRequest_TransparentHTTP_Tests(self):
+ return squid_search_bad_request(self)
+
+def SquidCacheobject_TransparentHTTP_Tests(self):
+ return squid_cacheobject_request(self)
+
+
diff --git a/to-be-ported/very-old/ooni/input.py b/to-be-ported/very-old/ooni/input.py
new file mode 100644
index 0000000..c32ab48
--- /dev/null
+++ b/to-be-ported/very-old/ooni/input.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+class file:
+ def __init__(self, name=None):
+ if name:
+ self.name = name
+
+ def simple(self, name=None):
+ """ Simple file parsing method:
+ Read a file line by line and output an array with all it's lines, without newlines
+ """
+ if name:
+ self.name = name
+ output = []
+ try:
+ f = open(self.name, "r")
+ for line in f.readlines():
+ output.append(line.strip())
+ return output
+ except:
+ return output
+
+ def csv(self, name=None):
+ if name:
+ self.name = name
+
+ def yaml(self, name):
+ if name:
+ self.name = name
+
+ def consensus(self, name):
+ if name:
+ self.name = name
diff --git a/to-be-ported/very-old/ooni/namecheck.py b/to-be-ported/very-old/ooni/namecheck.py
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/to-be-ported/very-old/ooni/namecheck.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = args.randomize
+
+ def DNS_Tests(self):
+ print "DNS tampering detection:"
+ filter_name = "_DNS_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/to-be-ported/very-old/ooni/plugins/__init__.py b/to-be-ported/very-old/ooni/plugins/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/to-be-ported/very-old/ooni/plugins/dnstest_plgoo.py b/to-be-ported/very-old/ooni/plugins/dnstest_plgoo.py
new file mode 100644
index 0000000..0c0cfa7
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/dnstest_plgoo.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+
+import sys
+import re
+from pprint import pprint
+from twisted.internet import reactor, endpoints
+from twisted.names import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class DNSTestPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ""
+ self.control_response = []
+
+ def response_split(self, response):
+ a = []
+ b = []
+ for i in response:
+ a.append(i[0])
+ b.append(i[1])
+
+ return a,b
+
+ def cb(self, type, hostname, dns_server, value):
+ if self.control_response is None:
+ self.control_response = []
+ if type == 'control' and self.control_response != value:
+ print "%s %s" % (dns_server, value)
+ self.control_response.append((dns_server,value))
+ pprint(self.control_response)
+ if type == 'experiment':
+ pprint(self.control_response)
+ _, res = self.response_split(self.control_response)
+ if value not in res:
+ print "res (%s) : " % value
+ pprint(res)
+ print "---"
+ print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
+
+ else:
+ print "%s appears to be clean on %s" % (hostname, dns_server)
+ self.r2.servers = [('212.245.158.66',53)]
+ print "HN: %s %s" % (hostname, value)
+
+ def err(self, pck, error):
+ pprint(pck)
+ error.printTraceback()
+ reactor.stop()
+ print "error!"
+ pass
+
+ def ooni_main(self, args):
+ self.experimentalproxy = ''
+ self.test_hostnames = ['dio.it']
+ self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
+ self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
+
+ self.control_res = []
+ self.control_response = None
+
+ self.r1 = client.Resolver(None, [self.control_dns.pop()])
+ self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
+
+ for hostname in self.test_hostnames:
+ for dns_server in self.control_dns:
+ self.r1.servers = [dns_server]
+ f = self.r1.getHostByName(hostname)
+ pck = (hostname, dns_server)
+ f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+ for dns_server in self.experiment_dns:
+ self.r2.servers = [dns_server]
+ pck = (hostname, dns_server)
+ f = self.r2.getHostByName(hostname)
+ f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+ reactor.run()
+
diff --git a/to-be-ported/very-old/ooni/plugins/http_plgoo.py b/to-be-ported/very-old/ooni/plugins/http_plgoo.py
new file mode 100644
index 0000000..021e863
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/http_plgoo.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+
+import sys
+import re
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ''
+
+ def cb(self, type, content):
+ print "got %d bytes from %s" % (len(content), type) # DEBUG
+ if not self.buf:
+ self.buf = content
+ else:
+ if self.buf == content:
+ print "SUCCESS"
+ else:
+ print "FAIL"
+ reactor.stop()
+
+ def endpoint(self, scheme, host, port):
+ ep = None
+ if scheme == 'http':
+ ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ elif scheme == 'https':
+ ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
+ return ep
+
+ def ooni_main(self):
+ # We don't have the Command object so cheating for now.
+ url = 'http://check.torproject.org/'
+ self.controlproxy = 'socks4a://127.0.0.1:9050'
+ self.experimentalproxy = ''
+
+ if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
+ return None
+ scheme, host, port, path = client._parse(url)
+
+ ctrl_dest = self.endpoint(scheme, host, port)
+ if not ctrl_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ if self.controlproxy:
+ _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
+ control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+ else:
+ control = ctrl_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('control', x))
+ control.connect(f)
+
+ exp_dest = self.endpoint(scheme, host, port)
+ if not exp_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ # FIXME: use the experiment proxy if there is one
+ experiment = exp_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('experiment', x))
+ experiment.connect(f)
+
+ reactor.run()
+
diff --git a/to-be-ported/very-old/ooni/plugins/marco_plgoo.py b/to-be-ported/very-old/ooni/plugins/marco_plgoo.py
new file mode 100644
index 0000000..cb63df7
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/marco_plgoo.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python
+# Copyright 2009 The Tor Project, Inc.
+# License at end of file.
+#
+# This tests connections to a list of Tor nodes in a given Tor consensus file
+# while also recording the certificates - it's not a perfect tool but complete
+# or even partial failure should raise alarms.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+ from ooni.plugooni import Plugoo
+except:
+ print "Error importing Plugoo"
+
+try:
+ from ooni.common import Storage
+except:
+ print "Error importing Storage"
+
+try:
+ from ooni import output
+except:
+ print "Error importing output"
+
+try:
+ from ooni import input
+except:
+ print "Error importing output"
+
+
+
+ssl = OpenSSL = None
+
+try:
+ import ssl
+except ImportError:
+ pass
+
+if ssl is None:
+ try:
+ import OpenSSL.SSL
+ import OpenSSL.crypto
+ except ImportError:
+ pass
+
+if ssl is None and OpenSSL is None:
+ if socket.ssl:
+ print """Your Python is too old to have the ssl module, and you haven't
+installed pyOpenSSL. I'll try to work with what you've got, but I can't
+record certificates so well."""
+ else:
+ print """Your Python has no OpenSSL support. Upgrade to 2.6, install
+pyOpenSSL, or both."""
+ sys.exit(1)
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+HANDSHAKING = "nohandshake"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+def clean_pem_cert(cert):
+ idx = cert.find('-----END')
+ if idx > 1 and cert[idx-1] != '\n':
+ cert = cert.replace('-----END','\n-----END')
+ return cert
+
+def record((addr,port), state, extra=None, cert=None):
+ LOCK.acquire()
+ try:
+ OUT.append({'addr' : addr,
+ 'port' : port,
+ 'state' : state,
+ 'extra' : extra})
+ if cert:
+ CERT_OUT.append({'addr' : addr,
+ 'port' : port,
+ 'clean_cert' : clean_pem_cert(cert)})
+ finally:
+ LOCK.release()
+
+def probe(address,theCtx=None):
+ sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ logging.info("Opening socket to %s",address)
+ try:
+ s.connect(address)
+ except IOError, e:
+ logging.info("Error %s from socket connect.",e)
+ record(address, CONNECTING, e)
+ s.close()
+ return
+ logging.info("Socket to %s open. Launching SSL handshake.",address)
+ if ssl:
+ try:
+ s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
+ # "MARCO!"
+ s.do_handshake()
+ except IOError, e:
+ logging.info("Error %s from ssl handshake",e)
+ record(address, HANDSHAKING, e)
+ s.close()
+ sock.close()
+ return
+ cert = s.getpeercert(True)
+ if cert != None:
+ cert = ssl.DER_cert_to_PEM_cert(cert)
+ elif OpenSSL:
+ try:
+ s = OpenSSL.SSL.Connection(theCtx, s)
+ s.set_connect_state()
+ s.setblocking(True)
+ s.do_handshake()
+ cert = s.get_peer_certificate()
+ if cert != None:
+ cert = OpenSSL.crypto.dump_certificate(
+ OpenSSL.crypto.FILETYPE_PEM, cert)
+ except IOError, e:
+ logging.info("Error %s from OpenSSL handshake",e)
+ record(address, HANDSHAKING, e)
+ s.close()
+ sock.close()
+ return
+ else:
+ try:
+ s = socket.ssl(s)
+ s.write('a')
+ cert = s.server()
+ except IOError, e:
+ logging.info("Error %s from socket.ssl handshake",e)
+ record(address, HANDSHAKING, e)
+ sock.close()
+ return
+
+ logging.info("SSL handshake with %s finished",address)
+ # "POLO!"
+ record(address,OK, cert=cert)
+ if (ssl or OpenSSL):
+ s.close()
+ sock.close()
+
+def parseNetworkstatus(ns):
+ for line in ns:
+ if line.startswith('r '):
+ r = line.split()
+ yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+ for line in cd:
+ if line.startswith('router '):
+ r = line.split()
+ yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+ done = False
+ logging.info("Launching thread.")
+
+ if OpenSSL is not None:
+ context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
+ else:
+ context = None
+
+ while True:
+ LOCK.acquire()
+ try:
+ if addrList:
+ print "Starting test %d/%d"%(
+ 1+origLength-len(addrList),origLength)
+ addr = addrList.pop()
+ else:
+ return
+ finally:
+ LOCK.release()
+
+ try:
+ logging.info("Launching probe for %s",addr)
+ probe(addr, context)
+ except Exception, e:
+ logging.info("Unexpected error from %s",addr)
+ record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+ ts = []
+ origLen = len(addrList)
+ for num in xrange(nThreads):
+ t = threading.Thread(target=worker, args=(addrList,origLen))
+ t.setName("Th#%s"%num)
+ ts.append(t)
+ t.start()
+ for t in ts:
+ logging.info("Joining thread %s",t.getName())
+ t.join()
+
+def main(self, args):
+ # BEGIN
+ # This logic should be present in more or less all plugoos
+ global OUT
+ global CERT_OUT
+ global OUT_DATA
+ global CERT_OUT_DATA
+ OUT_DATA = []
+ CERT_OUT_DATA = []
+
+ try:
+ OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+ except:
+ print "No output file given. quitting..."
+ return -1
+
+ try:
+ CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
+ except:
+ print "No output cert file given. quitting..."
+ return -1
+
+ logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+ datefmt="%b %d %H:%M:%S",
+ level=logging.INFO,
+ filename=args.log)
+ logging.info("============== STARTING NEW LOG")
+ # END
+
+ if ssl is not None:
+ methodName = "ssl"
+ elif OpenSSL is not None:
+ methodName = "OpenSSL"
+ else:
+ methodName = "socket"
+ logging.info("Running marco with method '%s'", methodName)
+
+ addresses = []
+
+ if args.input.ips:
+ for fn in input.file(args.input.ips).simple():
+ a, b = fn.split(":")
+ addresses.append( (a,int(b)) )
+
+ elif args.input.consensus:
+ for fn in args:
+ print fn
+ for a,b in parseNetworkstatus(open(args.input.consensus)):
+ addresses.append( (a,b) )
+
+ if args.input.randomize:
+ # Take a random permutation of the set the knuth way!
+ for i in range(0, len(addresses)):
+ j = random.randint(0, i)
+ addresses[i], addresses[j] = addresses[j], addresses[i]
+
+ if len(addresses) == 0:
+ logging.error("No input source given, quiting...")
+ return -1
+
+ addresses = list(addresses)
+
+ if not args.input.randomize:
+ addresses.sort()
+
+ runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+
+ self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+ "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
+
+ self.input = Storage()
+ self.input.ip = None
+ try:
+ c_file = os.path.expanduser("~/.tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ try:
+ c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ if not self.input.consensus:
+ print "Error importing consensus file"
+ sys.exit(1)
+
+ self.output = Storage()
+ self.output.main = 'reports/marco-1.yamlooni'
+ self.output.certificates = 'reports/marco_certs-1.out'
+
+ # XXX This needs to be moved to a proper function
+ # refactor, refactor and ... refactor!
+ if os.path.exists(self.output.main):
+ basedir = "/".join(self.output.main.split("/")[:-1])
+ fn = self.output.main.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ if os.path.exists(self.output.certificates):
+ basedir = "/".join(self.output.certificates.split("/")[:-1])
+ fn = self.output.certificates.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ # We require for Tor to already be running or have recently run
+ self.args = Storage()
+ self.args.input = self.input
+ self.args.output = self.output
+ self.args.log = 'reports/marco.log'
+
+ def ooni_main(self, cmd):
+ self.args.input.randomize = cmd.randomize
+ self.args.input.ips = cmd.listfile
+ main(self, self.args)
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >> sys.stderr, ("This script takes one or more networkstatus "
+ "files as an argument.")
+ self = None
+ main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/to-be-ported/very-old/ooni/plugins/proxy_plgoo.py b/to-be-ported/very-old/ooni/plugins/proxy_plgoo.py
new file mode 100644
index 0000000..d175c1c
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/proxy_plgoo.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+
+import sys
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ''
+
+ def cb(self, type, content):
+ print "got %d bytes from %s" % (len(content), type) # DEBUG
+ if not self.buf:
+ self.buf = content
+ else:
+ if self.buf == content:
+ print "SUCCESS"
+ else:
+ print "FAIL"
+ reactor.stop()
+
+ def endpoint(self, scheme, host, port):
+ ep = None
+ if scheme == 'http':
+ ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ elif scheme == 'https':
+ from twisted.internet import ssl
+ ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
+ ssl.ClientContextFactory())
+ return ep
+
+ def ooni_main(self, cmd):
+ # We don't have the Command object so cheating for now.
+ url = cmd.hostname
+
+ # FIXME: validate that url is on the form scheme://host[:port]/path
+ scheme, host, port, path = client._parse(url)
+
+ ctrl_dest = self.endpoint(scheme, host, port)
+ if not ctrl_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ if cmd.controlproxy:
+ assert scheme != 'https', "no support for proxied https atm, sorry"
+ _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
+ control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+ print "proxy: ", proxy_host, proxy_port
+ else:
+ control = ctrl_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('control', x))
+ control.connect(f)
+
+ exp_dest = self.endpoint(scheme, host, port)
+ if not exp_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ # FIXME: use the experiment proxy if there is one
+ experiment = exp_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('experiment', x))
+ experiment.connect(f)
+
+ reactor.run()
diff --git a/to-be-ported/very-old/ooni/plugins/simple_dns_plgoo.py b/to-be-ported/very-old/ooni/plugins/simple_dns_plgoo.py
new file mode 100644
index 0000000..87d3684
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/simple_dns_plgoo.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+from ooni.plugooni import Plugoo
+
+class DNSBulkPlugin(Plugoo):
+ def __init__(self):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.randomize = True # Pass this down properly
+ self.debug = False
+
+ def DNS_Tests(self):
+ print "DNS tampering detection for list of domains:"
+ tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
+ self.run_tests(tests)
+
+ def magic_main(self):
+ self.run_plgoo_tests("_Tests")
+
+ def ooni_main(self, args):
+ self.magic_main()
+
diff --git a/to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py b/to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py
new file mode 100644
index 0000000..01dee81
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py
@@ -0,0 +1,278 @@
+#!/usr/bin/python
+# Copyright 2011 The Tor Project, Inc.
+# License at end of file.
+#
+# This is a modified version of the marco plugoo. Given a list of #
+# IP:port addresses, this plugoo will attempt a TCP connection with each
+# host and write the results to a .yamlooni file.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+ from ooni.plugooni import Plugoo
+except:
+ print "Error importing Plugoo"
+
+try:
+ from ooni.common import Storage
+except:
+ print "Error importing Storage"
+
+try:
+ from ooni import output
+except:
+ print "Error importing output"
+
+try:
+ from ooni import input
+except:
+ print "Error importing output"
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+# We will want to log the IP address, the port and the state
+def record((addr,port), state, extra=None):
+ LOCK.acquire()
+ try:
+ OUT.append({'addr' : addr,
+ 'port' : port,
+ 'state' : state,
+ 'extra' : extra})
+ finally:
+ LOCK.release()
+
+# For each IP address in the list, open a socket, write to the log and
+# then close the socket
+def probe(address,theCtx=None):
+ sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ logging.info("Opening socket to %s",address)
+ try:
+ s.connect(address)
+ except IOError, e:
+ logging.info("Error %s from socket connect.",e)
+ record(address, CONNECTING, e)
+ s.close()
+ return
+ logging.info("Socket to %s open. Successfully launched TCP handshake.",address)
+ record(address, OK)
+ s.close()
+
+def parseNetworkstatus(ns):
+ for line in ns:
+ if line.startswith('r '):
+ r = line.split()
+ yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+ for line in cd:
+ if line.startswith('router '):
+ r = line.split()
+ yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+ done = False
+ context = None
+
+ while True:
+ LOCK.acquire()
+ try:
+ if addrList:
+ print "Starting test %d/%d"%(
+ 1+origLength-len(addrList),origLength)
+ addr = addrList.pop()
+ else:
+ return
+ finally:
+ LOCK.release()
+
+ try:
+ logging.info("Launching probe for %s",addr)
+ probe(addr, context)
+ except Exception, e:
+ logging.info("Unexpected error from %s",addr)
+ record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+ ts = []
+ origLen = len(addrList)
+ for num in xrange(nThreads):
+ t = threading.Thread(target=worker, args=(addrList,origLen))
+ t.setName("Th#%s"%num)
+ ts.append(t)
+ t.start()
+ for t in ts:
+ t.join()
+
+def main(self, args):
+ # BEGIN
+ # This logic should be present in more or less all plugoos
+ global OUT
+ global OUT_DATA
+ OUT_DATA = []
+
+ try:
+ OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+ except:
+ print "No output file given. quitting..."
+ return -1
+
+ logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+ datefmt="%b %d %H:%M:%S",
+ level=logging.INFO,
+ filename=args.log)
+ logging.info("============== STARTING NEW LOG")
+ # END
+
+ methodName = "socket"
+ logging.info("Running tcpcon with method '%s'", methodName)
+
+ addresses = []
+
+ if args.input.ips:
+ for fn in input.file(args.input.ips).simple():
+ a, b = fn.split(":")
+ addresses.append( (a,int(b)) )
+
+ elif args.input.consensus:
+ for fn in args:
+ print fn
+ for a,b in parseNetworkstatus(open(args.input.consensus)):
+ addresses.append( (a,b) )
+
+ if args.input.randomize:
+ # Take a random permutation of the set the knuth way!
+ for i in range(0, len(addresses)):
+ j = random.randint(0, i)
+ addresses[i], addresses[j] = addresses[j], addresses[i]
+
+ if len(addresses) == 0:
+ logging.error("No input source given, quiting...")
+ return -1
+
+ addresses = list(addresses)
+
+ if not args.input.randomize:
+ addresses.sort()
+
+ runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+
+ self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+ "os" ]
+
+ self.input = Storage()
+ self.input.ip = None
+ try:
+ c_file = os.path.expanduser("~/.tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ try:
+ c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ if not self.input.consensus:
+ print "Error importing consensus file"
+ sys.exit(1)
+
+ self.output = Storage()
+ self.output.main = 'reports/tcpcon-1.yamlooni'
+ self.output.certificates = 'reports/tcpcon_certs-1.out'
+
+ # XXX This needs to be moved to a proper function
+ # refactor, refactor and ... refactor!
+ if os.path.exists(self.output.main):
+ basedir = "/".join(self.output.main.split("/")[:-1])
+ fn = self.output.main.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ if os.path.exists(self.output.certificates):
+ basedir = "/".join(self.output.certificates.split("/")[:-1])
+ fn = self.output.certificates.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ # We require for Tor to already be running or have recently run
+ self.args = Storage()
+ self.args.input = self.input
+ self.args.output = self.output
+ self.args.log = 'reports/tcpcon.log'
+
+ def ooni_main(self, cmd):
+ self.args.input.randomize = cmd.randomize
+ self.args.input.ips = cmd.listfile
+ main(self, self.args)
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >> sys.stderr, ("This script takes one or more networkstatus "
+ "files as an argument.")
+ self = None
+ main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/to-be-ported/very-old/ooni/plugins/tor.py b/to-be-ported/very-old/ooni/plugins/tor.py
new file mode 100644
index 0000000..0d95d4d
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/tor.py
@@ -0,0 +1,80 @@
+import re
+import os.path
+import signal
+import subprocess
+import socket
+import threading
+import time
+import logging
+
+from pytorctl import TorCtl
+
+torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
+# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
+
+class ThreadProc(threading.Thread):
+ def __init__(self, cmd):
+ threading.Thread.__init__(self)
+ self.cmd = cmd
+ self.proc = None
+
+ def run(self):
+ print "running"
+ try:
+ self.proc = subprocess.Popen(self.cmd,
+ shell = False, stdout = subprocess.PIPE,
+ stderr = subprocess.PIPE)
+
+ except OSError:
+ logging.fatal('cannot execute command')
+
+class Tor:
+ def __init__(self):
+ self.start()
+
+ def check(self):
+ conn = TorCtl.connect()
+ if conn != None:
+ conn.close()
+ return True
+
+ return False
+
+
+ def start(self):
+ if not os.path.exists(torrc):
+ raise OSError("torrc doesn't exist (%s)" % torrc)
+
+ tor_cmd = ["tor", "-f", torrc]
+
+ torproc = ThreadProc(tor_cmd)
+ torproc.run()
+
+ bootstrap_line = re.compile("Bootstrapped 100%: ")
+
+ while True:
+ if torproc.proc == None:
+ time.sleep(1)
+ continue
+
+ init_line = torproc.proc.stdout.readline().strip()
+
+ if not init_line:
+ torproc.proc.kill()
+ return False
+
+ if bootstrap_line.search(init_line):
+ break
+
+ return True
+
+ def stop(self):
+ if not self.check():
+ return
+
+ conn = TorCtl.connect()
+ if conn != None:
+ conn.send_signal("SHUTDOWN")
+ conn.close()
+
+t = Tor()
diff --git a/to-be-ported/very-old/ooni/plugins/torrc b/to-be-ported/very-old/ooni/plugins/torrc
new file mode 100644
index 0000000..b9ffc80
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/torrc
@@ -0,0 +1,9 @@
+SocksPort 9050
+ControlPort 9051
+VirtualAddrNetwork 10.23.47.0/10
+AutomapHostsOnResolve 1
+TransPort 9040
+TransListenAddress 127.0.0.1
+DNSPort 5353
+DNSListenAddress 127.0.0.1
+
diff --git a/to-be-ported/very-old/ooni/plugooni.py b/to-be-ported/very-old/ooni/plugooni.py
new file mode 100644
index 0000000..17f17b3
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugooni.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+#
+# Plugooni, ooni plugin module for loading plgoo files.
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+
+import sys
+import os
+
+import imp, pkgutil, inspect
+
+class Plugoo:
+ def __init__(self, name, plugin_type, paranoia, author):
+ self.name = name
+ self.author = author
+ self.type = plugin_type
+ self.paranoia = paranoia
+
+ """
+ Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
+ Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
+ """
+ def get_tests_by_filter(self, filters, plugins):
+ ret_functions = []
+
+ for plugin in plugins:
+ for function_ptr in dir(plugin):
+ if function_ptr.endswith(filters):
+ ret_functions.append((plugin,function_ptr))
+ return ret_functions
+
+ """
+ Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
+ 'dirty' and 'failed'.
+ Run the tests and print 'clean','dirty' or 'failed' according to the test result.
+ """
+ def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
+ for test in tests:
+ filter_result = getattr(test[0], test[1])(self)
+ if filter_result == True:
+ print test[1] + ": " + clean
+ elif filter_result == None:
+ print test[1] + ": " + failed
+ else:
+ print test[1] + ": " + dirty
+
+ """
+ Find all the tests belonging to plgoo 'self' and run them.
+ We know the tests when we see them because they end in 'filter'.
+ """
+ def run_plgoo_tests(self, filter):
+ for function_ptr in dir(self):
+ if function_ptr.endswith(filter):
+ getattr(self, function_ptr)()
+
+PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
+RESERVED_NAMES = [ "skel_plgoo" ]
+
+class Plugooni():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.loadall = True
+ self.plugin_name = args.plugin_name
+ self.listfile = args.listfile
+
+ self.plgoo_found = False
+
+ # Print all the plugoons to stdout.
+ def list_plugoons(self):
+ print "Plugooni list:"
+ for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+ if name not in RESERVED_NAMES:
+ print "\t%s" %(name.split("_")[0])
+
+ # Return name of the plgoo class of a plugin.
+ # We know because it always ends with "Plugin".
+ def get_plgoo_class(self,plugin):
+ for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
+ if memb.__name__.endswith("Plugin"):
+ return memb
+
+ # This function is responsible for loading and running the plugoons
+ # the user wants to run.
+ def run(self, command_object):
+ print "Plugooni: the ooni plgoo plugin module loader"
+
+ # iterate all modules
+ for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+ # see if this module should be loaded
+ if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
+ self.plgoo_found = True # we found at least one plgoo!
+
+ file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
+ # load module
+ plugin = imp.load_module(name, file, pathname, desc)
+ # instantiate plgoo class and call its ooni_main()
+ self.get_plgoo_class(plugin)().ooni_main(command_object)
+
+ # if we couldn't find the plgoo; whine to the user
+ if self.plgoo_found is False:
+ print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
+
+if __name__ == '__main__':
+ self.main()
diff --git a/to-be-ported/very-old/ooni/transparenthttp.py b/to-be-ported/very-old/ooni/transparenthttp.py
new file mode 100644
index 0000000..311fb32
--- /dev/null
+++ b/to-be-ported/very-old/ooni/transparenthttp.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Captive Portal Detection With Multi-Vendor Emulation
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple tests that match specific vendor
+# mitm proxies
+
+import sys
+import ooni.http
+import ooni.report
+
+class TransparentHTTPProxy():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.logger = ooni.report.Log().logger
+
+ def TransparentHTTPProxy_Tests(self):
+ print "Transparent HTTP Proxy:"
+ filter_name = "_TransparentHTTP_Tests"
+ tests = [ooni.http]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/to-be-ported/very-old/traceroute.py b/to-be-ported/very-old/traceroute.py
new file mode 100644
index 0000000..e8252c1
--- /dev/null
+++ b/to-be-ported/very-old/traceroute.py
@@ -0,0 +1,108 @@
+try:
+ from dns import resolver
+except:
+ print "Error: dnspython is not installed (http://www.dnspython.org/)"
+import gevent
+import os
+import plugoo
+
+try:
+ import scapy
+except:
+ print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+import socket
+
+__plugoo__ = "Traceroute"
+__desc__ = "Performs TTL walking tests"
+
+class TracerouteAsset(Asset):
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+
+class Traceroute(Test):
+ """A *very* quick and dirty traceroute implementation, UDP and TCP
+ """
+ def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
+ dest_addr = socket.gethostbyname(dst)
+ print "Doing traceroute on %s" % dst
+
+ recv = socket.getprotobyname('icmp')
+ send = socket.getprotobyname(proto)
+ ttl = 1
+ while True:
+ recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
+ if proto == "tcp":
+ send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
+ else:
+ send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
+ recv_sock.settimeout(10)
+ send_sock.settimeout(10)
+
+ send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
+ recv_sock.bind(("", src_port))
+ if proto == "tcp":
+ try:
+ send_sock.settimeout(2)
+ send_sock.connect((dst, dst_port))
+ except socket.timeout:
+ pass
+
+ except Exception, e:
+ print "Error doing connect %s" % e
+ else:
+ send_sock.sendto("", (dst, dst_port))
+
+ curr_addr = None
+ try:
+ print "receiving data..."
+ _, curr_addr = recv_sock.recvfrom(512)
+ curr_addr = curr_addr[0]
+
+ except socket.error, e:
+ print "SOCKET ERROR: %s" % e
+
+ except Exception, e:
+ print "ERROR: %s" % e
+
+ finally:
+ send_sock.close()
+ recv_sock.close()
+
+ if curr_addr is not None:
+ curr_host = "%s" % curr_addr
+ else:
+ curr_host = "*"
+
+ print "%d\t%s" % (ttl, curr_host)
+
+ if curr_addr == dest_addr or ttl > max_hops:
+ break
+
+ ttl += 1
+
+
+ def experiment(self, *a, **kw):
+ # this is just a dirty hack
+ address = kw['data'][0]
+
+ self.traceroute(address)
+
+def run(ooni):
+ """Run the test"""
+ config = ooni.config
+ urls = []
+
+ traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
+ config.tests.traceroute))
+
+ assets = [traceroute_experiment]
+
+ traceroute = Traceroute(ooni)
+ ooni.logger.info("starting traceroute test")
+ traceroute.run(assets)
+ ooni.logger.info("finished")
1
0