tor-commits
Threads by month
- ----- 2025 -----
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
November 2012
- 18 participants
- 1508 discussions

[ooni-probe/master] * Keeping bridge thing in the ooni/bridget directory. Some of these are
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit d257a577cdc72967076a49784deba2468057d7fb
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 10:15:05 2012 +0000
* Keeping bridge thing in the ooni/bridget directory. Some of these are
generic enough that they should be in nettests, but I remember something
about git hating on symlinks...
---
nettests/core/echo.py | 205 -----------------------------------
ooni/bridget/tests/echo.py | 205 +++++++++++++++++++++++++++++++++++
ooni/bridget/tests/tls-handshake.py | 32 ++++++
ooni/tls-handshake.py | 32 ------
4 files changed, 237 insertions(+), 237 deletions(-)
diff --git a/nettests/core/echo.py b/nettests/core/echo.py
deleted file mode 100644
index a0826b6..0000000
--- a/nettests/core/echo.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-# +---------+
-# | echo.py |
-# +---------+
-# A simple ICMP-8 ping test.
-#
-# :author: Isis Lovecruft
-# :version: 0.0.1-pre-alpha
-# :license: (c) 2012 Isis Lovecruft
-# see attached LICENCE file
-#
-
-import os
-import sys
-
-from pprint import pprint
-
-from twisted.internet import reactor
-from twisted.plugin import IPlugin
-from twisted.python import usage
-from ooni.nettest import TestCase
-from ooni.utils import log, Storage
-from ooni.utils.net import PermissionsError, IfaceError
-
-try:
- from scapy.all import sr1, IP, ICMP ## XXX v4/v6?
- from ooni.lib import txscapy
- from ooni.lib.txscapy import txsr, txsend
- from ooni.templates.scapyt import ScapyTest
-except:
- log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
-
-## xxx TODO: move these to a utility function for determining OSes
-LINUX=sys.platform.startswith("linux")
-OPENBSD=sys.platform.startswith("openbsd")
-FREEBSD=sys.platform.startswith("freebsd")
-NETBSD=sys.platform.startswith("netbsd")
-DARWIN=sys.platform.startswith("darwin")
-SOLARIS=sys.platform.startswith("sunos")
-WINDOWS=sys.platform.startswith("win32")
-
-class EchoTest(ScapyTest):
- """
- xxx fill me in
- """
- name = 'echo'
- author = 'Isis Lovecruft <isis(a)torproject.org>'
- description = 'A simple ICMP-8 test to see if a host is reachable.'
- version = '0.0.1'
- inputFile = ['file', 'f', None, 'File of list of IPs to ping']
- requirements = None
- report = Storage()
-
- optParameters = [
- ['interface', 'i', None, 'Network interface to use'],
- ['count', 'c', 5, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 'l', 25, 'Set the IP Time to Live', int],
- ['timeout', 't', 2, 'Seconds until timeout if no response', int],
- ['pcap', 'p', None, 'Save pcap to this file'],
- ['receive', 'r', True, 'Receive response packets']
- ]
-
- def setUpClass(self, *a, **kw):
- '''
- :ivar ifaces:
- Struct returned from getifaddrs(3) and turned into a tuple in the
- form (*ifa_name, AF_FAMILY, *ifa_addr)
- '''
- super(EchoTest, self).__init__(*a, **kw)
-
- ## allow subclasses which register/implement external classes
- ## to define their own reactor without overrides:
- if not hasattr(super(EchoTest, self), 'reactor'):
- log.debug("%s test: Didn't find reactor!" % self.name)
- self.reactor = reactor
-
- if self.localOptions:
- log.debug("%s localOptions found" % self.name)
- log.debug("%s test options: %s" % (self.name, self.subOptions))
- self.local_options = self.localOptions.parseOptions(self.subOptions)
- for key, value in self.local_options:
- log.debug("Set attribute %s[%s] = %s" % (self.name, key, value))
- setattr(self, key, value)
-
- ## xxx is this now .subOptions?
- #self.inputFile = self.localOptions['file']
- self.timeout *= 1000 ## convert to milliseconds
-
- if not self.interface:
- log.msg("No network interface specified!")
- log.debug("OS detected: %s" % sys.platform)
- if LINUX or OPENBSD or NETBSD or FREEBSD or DARWIN or SOLARIS:
- from twisted.internet.test import _posixifaces
- log.msg("Attempting to discover network interfaces...")
- ifaces = _posixifaces._interfaces()
- elif WINDOWS:
- from twisted.internet.test import _win32ifaces
- log.msg("Attempting to discover network interfaces...")
- ifaces = _win32ifaces._interfaces()
- else:
- log.debug("Client OS %s not accounted for!" % sys.platform)
- log.debug("Unable to discover network interfaces...")
- ifaces = [('lo', '')]
-
- ## found = {'eth0': '1.1.1.1'}
- found = [{i[0]: i[2]} for i in ifaces if i[0] != 'lo']
- log.info("Found interfaces:\n%s" % pprint(found))
- self.interfaces = self.tryInterfaces(found)
- else:
- ## xxx need a way to check that iface exists, is up, and
- ## we have permissions on it
- log.debug("Our interface has been set to %s" % self.interface)
-
- if self.pcap:
- try:
- self.pcapfile = open(self.pcap, 'a+')
- except:
- log.msg("Unable to write to pcap file %s" % self.pcap)
- self.pcapfile = None
-
- try:
- assert os.path.isfile(self.file)
- fp = open(self.file, 'r')
- except Exception, e:
- hosts = ['8.8.8.8', '38.229.72.14']
- log.err(e)
- else:
- self.inputs = self.inputProcessor(fp)
- self.removePorts(hosts)
-
- log.debug("Initialization of %s test completed with:\n%s"
- % (self.name, ''.join(self.__dict__)))
-
- @staticmethod
- def inputParser(inputs):
- log.debug("Removing possible ports from host addresses...")
- log.debug("Initial inputs:\n%s" % pprint(inputs))
-
- assert isinstance(inputs, list)
- hosts = [h.rsplit(':', 1)[0] for h in inputs]
- log.debug("Inputs converted to:\n%s" % hosts)
-
- return hosts
-
- def tryInterfaces(self, ifaces):
- try:
- from scapy.all import sr1 ## we want this check to be blocking
- except:
- log.msg("This test requires scapy: www.secdev.org/projects/scapy")
- raise SystemExit
-
- ifup = {}
- while ifaces:
- for ifname, ifaddr in ifaces:
- log.debug("Currently testing network capabilities of interface"
- + "%s by sending a packet to our address %s"
- % (ifname, ifaddr))
- try:
- pkt = IP(dst=ifaddr)/ICMP()
- ans, unans = sr(pkt, iface=ifname, timeout=self.timeout)
- except Exception, e:
- raise PermissionsError if e.find("Errno 1") else log.err(e)
- else:
- ## xxx i think this logic might be wrong
- log.debug("Interface test packet\n%s\n\n%s"
- % (pkt.summary(), pkt.show2()))
- if ans.summary():
- log.info("Received answer for test packet on interface"
- +"%s :\n%s" % (ifname, ans.summary()))
- ifup.update(ifname, ifaddr)
- else:
- log.info("Our interface test packet was unanswered:\n%s"
- % unans.summary())
-
- if len(ifup) > 0:
- log.msg("Discovered the following working network interfaces: %s"
- % ifup)
- return ifup
- else:
- raise IfaceError("Could not find a working network interface.")
-
- def buildPackets(self):
- log.debug("self.input is %s" % self.input)
- log.debug("self.hosts is %s" % self.hosts)
- for addr in self.input:
- packet = IP(dst=self.input)/ICMP()
- self.request.append(packet)
- return packet
-
- def test_icmp(self):
- if self.recieve:
- self.buildPackets()
- all = []
- for packet in self.request:
- d = self.sendReceivePackets(packets=packet)
- all.append(d)
- self.response.update({packet: d})
- d_list = defer.DeferredList(all)
- return d_list
- else:
- d = self.sendPackets()
- return d
diff --git a/ooni/bridget/tests/echo.py b/ooni/bridget/tests/echo.py
new file mode 100644
index 0000000..a0826b6
--- /dev/null
+++ b/ooni/bridget/tests/echo.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simple ICMP-8 ping test.
+#
+# :author: Isis Lovecruft
+# :version: 0.0.1-pre-alpha
+# :license: (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from pprint import pprint
+
+from twisted.internet import reactor
+from twisted.plugin import IPlugin
+from twisted.python import usage
+from ooni.nettest import TestCase
+from ooni.utils import log, Storage
+from ooni.utils.net import PermissionsError, IfaceError
+
+try:
+ from scapy.all import sr1, IP, ICMP ## XXX v4/v6?
+ from ooni.lib import txscapy
+ from ooni.lib.txscapy import txsr, txsend
+ from ooni.templates.scapyt import ScapyTest
+except:
+ log.msg("This test requires scapy, see www.secdev.org/projects/scapy")
+
+## xxx TODO: move these to a utility function for determining OSes
+LINUX=sys.platform.startswith("linux")
+OPENBSD=sys.platform.startswith("openbsd")
+FREEBSD=sys.platform.startswith("freebsd")
+NETBSD=sys.platform.startswith("netbsd")
+DARWIN=sys.platform.startswith("darwin")
+SOLARIS=sys.platform.startswith("sunos")
+WINDOWS=sys.platform.startswith("win32")
+
+class EchoTest(ScapyTest):
+ """
+ xxx fill me in
+ """
+ name = 'echo'
+ author = 'Isis Lovecruft <isis(a)torproject.org>'
+ description = 'A simple ICMP-8 test to see if a host is reachable.'
+ version = '0.0.1'
+ inputFile = ['file', 'f', None, 'File of list of IPs to ping']
+ requirements = None
+ report = Storage()
+
+ optParameters = [
+ ['interface', 'i', None, 'Network interface to use'],
+ ['count', 'c', 5, 'Number of packets to send', int],
+ ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+ ['ttl', 'l', 25, 'Set the IP Time to Live', int],
+ ['timeout', 't', 2, 'Seconds until timeout if no response', int],
+ ['pcap', 'p', None, 'Save pcap to this file'],
+ ['receive', 'r', True, 'Receive response packets']
+ ]
+
+ def setUpClass(self, *a, **kw):
+ '''
+ :ivar ifaces:
+ Struct returned from getifaddrs(3) and turned into a tuple in the
+ form (*ifa_name, AF_FAMILY, *ifa_addr)
+ '''
+ super(EchoTest, self).__init__(*a, **kw)
+
+ ## allow subclasses which register/implement external classes
+ ## to define their own reactor without overrides:
+ if not hasattr(super(EchoTest, self), 'reactor'):
+ log.debug("%s test: Didn't find reactor!" % self.name)
+ self.reactor = reactor
+
+ if self.localOptions:
+ log.debug("%s localOptions found" % self.name)
+ log.debug("%s test options: %s" % (self.name, self.subOptions))
+ self.local_options = self.localOptions.parseOptions(self.subOptions)
+ for key, value in self.local_options:
+ log.debug("Set attribute %s[%s] = %s" % (self.name, key, value))
+ setattr(self, key, value)
+
+ ## xxx is this now .subOptions?
+ #self.inputFile = self.localOptions['file']
+ self.timeout *= 1000 ## convert to milliseconds
+
+ if not self.interface:
+ log.msg("No network interface specified!")
+ log.debug("OS detected: %s" % sys.platform)
+ if LINUX or OPENBSD or NETBSD or FREEBSD or DARWIN or SOLARIS:
+ from twisted.internet.test import _posixifaces
+ log.msg("Attempting to discover network interfaces...")
+ ifaces = _posixifaces._interfaces()
+ elif WINDOWS:
+ from twisted.internet.test import _win32ifaces
+ log.msg("Attempting to discover network interfaces...")
+ ifaces = _win32ifaces._interfaces()
+ else:
+ log.debug("Client OS %s not accounted for!" % sys.platform)
+ log.debug("Unable to discover network interfaces...")
+ ifaces = [('lo', '')]
+
+ ## found = {'eth0': '1.1.1.1'}
+ found = [{i[0]: i[2]} for i in ifaces if i[0] != 'lo']
+ log.info("Found interfaces:\n%s" % pprint(found))
+ self.interfaces = self.tryInterfaces(found)
+ else:
+ ## xxx need a way to check that iface exists, is up, and
+ ## we have permissions on it
+ log.debug("Our interface has been set to %s" % self.interface)
+
+ if self.pcap:
+ try:
+ self.pcapfile = open(self.pcap, 'a+')
+ except:
+ log.msg("Unable to write to pcap file %s" % self.pcap)
+ self.pcapfile = None
+
+ try:
+ assert os.path.isfile(self.file)
+ fp = open(self.file, 'r')
+ except Exception, e:
+ hosts = ['8.8.8.8', '38.229.72.14']
+ log.err(e)
+ else:
+ self.inputs = self.inputProcessor(fp)
+ self.removePorts(hosts)
+
+ log.debug("Initialization of %s test completed with:\n%s"
+ % (self.name, ''.join(self.__dict__)))
+
+ @staticmethod
+ def inputParser(inputs):
+ log.debug("Removing possible ports from host addresses...")
+ log.debug("Initial inputs:\n%s" % pprint(inputs))
+
+ assert isinstance(inputs, list)
+ hosts = [h.rsplit(':', 1)[0] for h in inputs]
+ log.debug("Inputs converted to:\n%s" % hosts)
+
+ return hosts
+
+ def tryInterfaces(self, ifaces):
+ try:
+ from scapy.all import sr1 ## we want this check to be blocking
+ except:
+ log.msg("This test requires scapy: www.secdev.org/projects/scapy")
+ raise SystemExit
+
+ ifup = {}
+ while ifaces:
+ for ifname, ifaddr in ifaces:
+ log.debug("Currently testing network capabilities of interface"
+ + "%s by sending a packet to our address %s"
+ % (ifname, ifaddr))
+ try:
+ pkt = IP(dst=ifaddr)/ICMP()
+ ans, unans = sr(pkt, iface=ifname, timeout=self.timeout)
+ except Exception, e:
+ raise PermissionsError if e.find("Errno 1") else log.err(e)
+ else:
+ ## xxx i think this logic might be wrong
+ log.debug("Interface test packet\n%s\n\n%s"
+ % (pkt.summary(), pkt.show2()))
+ if ans.summary():
+ log.info("Received answer for test packet on interface"
+ +"%s :\n%s" % (ifname, ans.summary()))
+ ifup.update(ifname, ifaddr)
+ else:
+ log.info("Our interface test packet was unanswered:\n%s"
+ % unans.summary())
+
+ if len(ifup) > 0:
+ log.msg("Discovered the following working network interfaces: %s"
+ % ifup)
+ return ifup
+ else:
+ raise IfaceError("Could not find a working network interface.")
+
+ def buildPackets(self):
+ log.debug("self.input is %s" % self.input)
+ log.debug("self.hosts is %s" % self.hosts)
+ for addr in self.input:
+ packet = IP(dst=self.input)/ICMP()
+ self.request.append(packet)
+ return packet
+
+ def test_icmp(self):
+ if self.recieve:
+ self.buildPackets()
+ all = []
+ for packet in self.request:
+ d = self.sendReceivePackets(packets=packet)
+ all.append(d)
+ self.response.update({packet: d})
+ d_list = defer.DeferredList(all)
+ return d_list
+ else:
+ d = self.sendPackets()
+ return d
diff --git a/ooni/bridget/tests/tls-handshake.py b/ooni/bridget/tests/tls-handshake.py
new file mode 100644
index 0000000..eba950e
--- /dev/null
+++ b/ooni/bridget/tests/tls-handshake.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+import subprocess
+from subprocess import PIPE
+serverport = "129.21.124.215:443"
+# a subset of those from firefox
+ciphers = [
+ "ECDHE-ECDSA-AES256-SHA",
+ "ECDHE-RSA-AES256-SHA",
+ "DHE-RSA-CAMELLIA256-SHA",
+ "DHE-DSS-CAMELLIA256-SHA",
+ "DHE-RSA-AES256-SHA",
+ "DHE-DSS-AES256-SHA",
+ "ECDH-ECDSA-AES256-CBC-SHA",
+ "ECDH-RSA-AES256-CBC-SHA",
+ "CAMELLIA256-SHA",
+ "AES256-SHA",
+ "ECDHE-ECDSA-RC4-SHA",
+ "ECDHE-ECDSA-AES128-SHA",
+ "ECDHE-RSA-RC4-SHA",
+ "ECDHE-RSA-AES128-SHA",
+ "DHE-RSA-CAMELLIA128-SHA",
+ "DHE-DSS-CAMELLIA128-SHA"
+]
+def checkBridgeConnection(host, port)
+ cipher_arg = ":".join(ciphers)
+ cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
+ cmd += ["-cipher", cipher_arg]
+ proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
+ out, error = proc.communicate()
+ success = "Cipher is DHE-RSA-AES256-SHA" in out
+ return success
diff --git a/ooni/tls-handshake.py b/ooni/tls-handshake.py
deleted file mode 100644
index eba950e..0000000
--- a/ooni/tls-handshake.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-import subprocess
-from subprocess import PIPE
-serverport = "129.21.124.215:443"
-# a subset of those from firefox
-ciphers = [
- "ECDHE-ECDSA-AES256-SHA",
- "ECDHE-RSA-AES256-SHA",
- "DHE-RSA-CAMELLIA256-SHA",
- "DHE-DSS-CAMELLIA256-SHA",
- "DHE-RSA-AES256-SHA",
- "DHE-DSS-AES256-SHA",
- "ECDH-ECDSA-AES256-CBC-SHA",
- "ECDH-RSA-AES256-CBC-SHA",
- "CAMELLIA256-SHA",
- "AES256-SHA",
- "ECDHE-ECDSA-RC4-SHA",
- "ECDHE-ECDSA-AES128-SHA",
- "ECDHE-RSA-RC4-SHA",
- "ECDHE-RSA-AES128-SHA",
- "DHE-RSA-CAMELLIA128-SHA",
- "DHE-DSS-CAMELLIA128-SHA"
-]
-def checkBridgeConnection(host, port)
- cipher_arg = ":".join(ciphers)
- cmd = ["openssl", "s_client", "-connect", "%s:%s" % (host,port)]
- cmd += ["-cipher", cipher_arg]
- proc = subprocess.Popen(cmd, stdout=PIPE, stderr=PIPE,stdin=PIPE)
- out, error = proc.communicate()
- success = "Cipher is DHE-RSA-AES256-SHA" in out
- return success
1
0

[ooni-probe/master] * Removing old tests which have already been ported: dnstamper, echo,
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 77c07070e7e8575abc7e6b9fdeed4d7664736ec3
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 16:47:40 2012 +0000
* Removing old tests which have already been ported: dnstamper, echo,
blocking.
---
nettests/core/dnstamper.py | 29 +++--
ooni/plugins/blocking.py | 46 ------
ooni/plugins/dnstamper.py | 338 --------------------------------------------
ooni/plugins/echo.py | 127 -----------------
4 files changed, 19 insertions(+), 521 deletions(-)
diff --git a/nettests/core/dnstamper.py b/nettests/core/dnstamper.py
index b5fcea3..aad2ef3 100644
--- a/nettests/core/dnstamper.py
+++ b/nettests/core/dnstamper.py
@@ -1,6 +1,5 @@
# -*- encoding: utf-8 -*-
#
-#
# dnstamper
# *********
#
@@ -25,16 +24,13 @@ from twisted.names.error import DNSQueryRefusedError
class DNSTamperTest(nettest.TestCase):
name = "DNS tamper"
-
description = "DNS censorship detection test"
version = "0.2"
-
lookupTimeout = [1]
-
requirements = None
+
inputFile = ['file', 'f', None,
'Input file of list of hostnames to attempt to resolve']
-
optParameters = [['controlresolver', 'c', '8.8.8.8',
'Known good DNS server'],
['testresolvers', 't', None,
@@ -43,20 +39,18 @@ class DNSTamperTest(nettest.TestCase):
def setUp(self):
self.report['test_lookups'] = {}
self.report['test_reverse'] = {}
-
self.report['control_lookup'] = []
-
self.report['a_lookups'] = {}
-
self.report['tampering'] = {}
self.test_a_lookups = {}
self.control_a_lookups = []
-
self.control_reverse = None
self.test_reverse = {}
if not self.localOptions['testresolvers']:
+ log.msg("You did not specify a file of DNS servers to test!",
+ "See the '--testresolvers' option.")
self.test_resolvers = ['8.8.8.8']
return
@@ -181,6 +175,14 @@ class DNSTamperTest(nettest.TestCase):
return r
def do_reverse_lookups(self, result):
+ """
+ Take a resolved address in the form "176.139.79.178.in-addr.arpa." and
+ attempt to reverse the domain with both the control and test DNS
+ servers to see if they match.
+
+ :param result:
+ A resolved domain name.
+ """
log.msg("Doing the reverse lookups %s" % self.input)
list_of_ds = []
@@ -209,6 +211,12 @@ class DNSTamperTest(nettest.TestCase):
return dl
def compare_results(self, *arg, **kw):
+ """
+ Take the set intersection of two test result sets. If the intersection
+ is greater than zero (there are matching addresses in both sets) then
+ the no censorship is reported. Else, if no IP addresses match other
+ addresses, then we mark it as a censorship event.
+ """
log.msg("Comparing results for %s" % self.input)
log.msg(self.test_a_lookups)
@@ -222,7 +230,8 @@ class DNSTamperTest(nettest.TestCase):
# Address has not tampered with on DNS server
self.report['tampering'][test] = False
- elif self.control_reverse and set([self.control_reverse]) & set([self.report['test_reverse'][test]]):
+ elif self.control_reverse and set([self.control_reverse]) \
+ & set([self.report['test_reverse'][test]]):
# Further testing has eliminated false positives
self.report['tampering'][test] = 'reverse-match'
diff --git a/ooni/plugins/blocking.py b/ooni/plugins/blocking.py
deleted file mode 100644
index 4dd2db1..0000000
--- a/ooni/plugins/blocking.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-
-from plugoo.assets import Asset
-from plugoo.tests import ITest, OONITest
-
-class BlockingArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file'],
- ['resume', 'r', 0, 'Resume at this index'],
- ['shit', 'o', None, 'Other arguments']]
-
-class BlockingTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "blocking"
- description = "Blocking plugin"
- requirements = None
- options = BlockingArgs
- # Tells this to be blocking.
- blocking = True
-
- def control(self, experiment_result, args):
- print "Experiment Result:", experiment_result
- print "Args", args
- return experiment_result
-
- def experiment(self, args):
- import urllib
- url = 'http://torproject.org/' if not 'asset' in args else args['asset']
- try:
- req = urllib.urlopen(url)
- except:
- return {'error': 'Connection failed!'}
-
- return {'page': req.readlines()}
-
- def load_assets(self):
- if self.local_options and self.local_options['asset']:
- return {'asset': Asset(self.local_options['asset'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#blocking = BlockingTest(None, None, None)
diff --git a/ooni/plugins/dnstamper.py b/ooni/plugins/dnstamper.py
deleted file mode 100644
index 40df505..0000000
--- a/ooni/plugins/dnstamper.py
+++ /dev/null
@@ -1,338 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- dnstamper
- *********
-
- This test resolves DNS for a list of domain names, one per line, in the
- file specified in the ooni-config under the setting "dns_experiment". If
- the file is top-1m.txt, the test will be run using Amazon's list of top
- one million domains. The experimental dns servers to query should
- be specified one per line in assets/dns_servers.txt.
-
- The test reports censorship if the cardinality of the intersection of
- the query result set from the control server and the query result set
- from the experimental server is zero, which is to say, if the two sets
- have no matching results whatsoever.
-
- NOTE: This test frequently results in false positives due to GeoIP-based
- load balancing on major global sites such as google, facebook, and
- youtube, etc.
-
- :author: Isis Lovecruft, Arturo Filastò
- :license: see LICENSE for more details
-
- TODO:
- * Finish porting to twisted
- * Finish the client.Resolver() subclass and test it
- * Use the DNS tests from captiveportal
- * Use plugoo/reports.py for final data
-"""
-
-import os
-
-from twisted.names import client, dns
-from twisted.internet import reactor, defer
-from twisted.internet.error import CannotListenError
-from twisted.internet.protocol import Factory, Protocol
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from zope.interface import implements
-
-from ooni.plugoo.assets import Asset
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.utils import log
-
-class AlexaAsset(Asset):
- """
- Class for parsing the Alexa top-1m.txt as an asset.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- self = Asset.parse_line(self, line)
- return line.split(',')[1].replace('\n','')
-
-class DNSTamperArgs(usage.Options):
- optParameters = [['hostnames', 'h', None,
- 'Asset file of hostnames to resolve'],
- ['controlresolver', 'c', '8.8.8.8',
- 'Known good DNS server'],
- ['testresolvers', 't', None,
- 'Asset file of DNS servers to test'],
- ['localresolvers', 'l', False,
- 'Also test local servers'],
- ['port', 'p', None,
- 'Local UDP port to send queries over'],
- ['usereverse', 'r', False,
- 'Also try reverse DNS resolves'],
- ['resume', 's', 0,
- 'Resume at this index in the asset file']]
-
-class DNSTamperResolver(client.Resolver):
- """
- Twisted by default issues DNS queries over cryptographically random
- UDP ports to mitigate the Berstein/Kaminsky attack on limited DNS
- Transaction ID numbers.[1][2][3]
-
- This is fine, unless the client has external restrictions which require
- DNS queries to be conducted over UDP port 53. Twisted does not provide
- an easy way to change this, ergo subclassing client.Resolver.[4] It
- would perhaps be wise to patch twisted.names.client and request a merge
- into upstream.
-
- [1] https://twistedmatrix.com/trac/ticket/3342
- [2] http://blog.netherlabs.nl/articles/2008/07/09/ \
- some-thoughts-on-the-recent-dns-vulnerability
- [3] http://www.blackhat.com/presentations/bh-dc-09/Kaminsky/ \
- BlackHat-DC-09-Kaminsky-DNS-Critical-Infrastructure.pdf
- [4] http://comments.gmane.org/gmane.comp.python.twisted/22794
- """
- def __init__(self):
- super(DNSTamperResolver, self).__init__(self, resolv, servers,
- timeout, reactor)
- #client.Resolver.__init__(self)
-
- if self.local_options['port']:
- self.port = self.local_options['port']
- else:
- self.port = '53'
-
- def _connectedProtocol(self):
- """
- Return a new DNSDatagramProtocol bound to a specific port
- rather than the default cryptographically-random port.
- """
- if 'protocol' in self.__dict__:
- return self.protocol
- proto = dns.DNSDatagramProtocol(self)
-
- ## XXX We may need to remove the while loop, which was
- ## originally implemented to safeguard against attempts to
- ## bind to the same random port twice...but then the code
- ## would be blocking...
- while True:
- try:
- self._reactor.listenUDP(self.port, proto)
- except error.CannotListenError:
- pass
- else:
- return proto
-
-class DNSTamperTest(OONITest):
- """
- XXX fill me in
- """
- implements(IPlugin, ITest)
-
- shortName = "dnstamper"
- description = "DNS censorship detection test"
- requirements = None
- options = DNSTamperArgs
- blocking = False
-
- def __init__(self, local_options, global_options,
- report, ooninet=None, reactor=None):
- super(DNSTamperTest, self).__init__(local_options, global_options,
- report, ooninet, reactor)
-
- def __repr__(self):
- represent = "DNSTamperTest(OONITest): local_options=%r, " \
- "global_options=%r, assets=%r" % (self.local_options,
- self.global_options,
- self.assets)
- return represent
-
- def initialize(self):
- if self.local_options:
- ## client.createResolver() turns 'None' into '/etc/resolv.conf' on
- ## posix systems, ignored on Windows.
- if self.local_options['localresolvers']:
- self.resolvconf = None
- else:
- self.resolvconf = ''
-
- def load_assets(self):
- assets = {}
-
- #default_hostnames = ['baidu.com', 'torrentz.eu', 'twitter.com',
- # 'ooni.nu', 'google.com', 'torproject.org']
- #default_resolvers = ['209.244.0.3', '208.67.222.222']
-
- def asset_file(asset_option):
- return self.local_options[asset_option]
-
- def list_to_asset(list_):
- def next(list_):
- host = list_.pop()
- if host is not None:
- yield str(host)
- while len(list_) > 0:
- next(list_)
-
- if self.local_options:
- if asset_file('hostnames'):
- ## The default filename for the Alexa Top 1 Million:
- if asset_file('hostnames') == 'top-1m.txt':
- assets.update({'hostnames':
- AlexaAsset(asset_file('hostnames'))})
- else:
- assets.update({'hostnames':
- Asset(asset_file('hostnames'))})
- else:
- log.msg("Error! We need an asset file containing the " +
- "hostnames that we should test DNS with! Please use " +
- "the '-h' option. Using pre-defined hostnames...")
-
- if asset_file('testresolvers'):
- assets.update({'testresolvers':
- Asset(asset_file('testresolvers'))})
-
- return assets
-
- def lookup(self, hostname, resolver):
- """
- Resolves a hostname through a DNS nameserver to the corresponding IP
- addresses.
- """
- def got_result(result, hostname, resolver):
- log.msg('Resolved %s through %s to %s'
- % (hostname, resolver, result))
- report = {'resolved': True,
- 'domain': hostname,
- 'nameserver': resolver,
- 'address': result }
- log.msg(report)
- return result
-
- def got_error(err, hostname, resolver):
- log.msg(err.printTraceback())
- report = {'resolved': False,
- 'domain': hostname,
- 'nameserver': resolver,
- 'address': err }
- log.msg(report)
- return err
-
- res = client.createResolver(resolvconf=self.resolvconf,
- servers=[(resolver, 53)])
-
- ## XXX should we do self.d.addCallback(resHostByName, hostname)?
- #d = res.getHostByName(hostname)
- #d.addCallbacks(got_result, got_error)
-
- #d = defer.Deferred()
- #d.addCallback(res.getHostByName, hostname)
-
- #d = res.getHostByName(hostname)
- #d.addCallback(got_result, result, hostname, resolver)
- #d.addErrback(got_error, err, hostname, resolver)
-
- res.addCallback(getHostByName, hostname)
- res.addCallback(got_result, result, hostname, resolver)
- res.addErrback(got_error, err, hostname, resolver)
-
- if self.local_options['usereverse']:
- #d.addCallback(self.reverse_lookup, result, resolver)
- #d.addErrback(log.msg(err.printTraceback()))
-
- #d.addCallback(self.reverse_lookup, result, resolver)
- #d.addErrback(log.msg(err.printTraceback()))
-
- res.addCallback(self.reverse_lookup, result, resolver)
- res.addErraback(log.msg(err.printTraceback()))
-
- return res
-
- def reverse_lookup(self, address, resolver):
- """
- Attempt to do a reverse DNS lookup to determine if the control and exp
- sets from a positive result resolve to the same domain, in order to
- remove false positives due to GeoIP load balancing.
- """
- res = client.createResolver(resolvconf=self.resolvconf,
- servers=[(resolver, 53)])
- ptr = '.'.join(addr.split('.')[::-1]) + '.in-addr.arpa'
- reverse = res.lookupPointer(ptr)
- reverse.addCallback(lambda (address, auth, add):
- util.println(address[0].payload.name))
- reverse.addErrback(log.err)
-
- ## XXX do we need to stop the reactor?
- #d.addBoth(lambda r: reactor.stop())
-
- return reverse
-
- def experiment(self, args):
- """
- Compares the lookup() sets of the control and experiment groups.
- """
- for hostname in args:
- for testresolver in self.assets['testresolvers']:
- #addressd = defer.Deferred()
- #addressd.addCallback(self.lookup, hostname, testresolver)
- #addressd.addErrback(log.err)
-
- self.d.addCallback(self.lookup, hostname, testresolver)
- self.d.addErrback(log.err)
-
- #addressd = self.lookup(hostname, testresolver)
-
- #self.d.addCallback(self.lookup, hostname, testserver)
-
- print "%s" % type(addressd)
-
- return self.d
-
- def control(self, experiment_result, args):
- print "EXPERIMENT RESULT IS %s" % experiment_result
- (exp_address, hostname, testserver, exp_reversed) = experiment_result
- control_server = self.local_options['controlserver']
- ctrl_address = self.lookup(hostname, control_server)
-
- ## XXX getHostByName() appears to be returning only one IP...
-
- if len(set(exp_address) & set(ctrl_address)) > 0:
- log.msg("Address %s has not tampered with on DNS server %s"
- % (hostname, test_server))
- return {'hostname': hostname,
- 'test-nameserver': test_server,
- 'test-address': exp_address,
- 'control-nameserver': control_server,
- 'control-address': ctrl_address,
- 'tampering-detected': False}
- else:
- log.msg("Address %s has possibly been tampered on %s:"
- % (hostname, test_server))
- log.msg("DNS resolution through testserver %s yeilds: %s"
- % (test_server, exp_address))
- log.msg("However, DNS resolution through controlserver %s yeilds: %s"
- % (control_server, ctrl_address))
-
- if self.local_options['usereverse']:
- ctrl_reversed = self.reverse_lookup(experiment_result, control_server)
- if len(set(ctrl_reversed) & set(exp_reversed)) > 0:
- log.msg("Further testing has eliminated false positives")
- else:
- log.msg("Reverse DNS on the results returned by %s returned:"
- % (test_server))
- log.msg("%s" % exp_reversed)
- log.msg("which does not match the expected domainname: %s"
- % ctrl_reversed)
- return {'hostname': hostname,
- 'test-nameserver': test_server,
- 'test-address': exp_address,
- 'test-reversed': exp_reversed,
- 'control-nameserver': control_server,
- 'control-address': ctrl_address,
- 'control-reversed': ctrl_reversed,
- 'tampering-detected': True}
- else:
- return {'hostname': hostname,
- 'test-nameserver': test_server,
- 'test-address': exp_address,
- 'control-nameserver': control_server,
- 'control-address': ctrl_address,
- 'tampering-detected': False}
-
-#dnstamper = DNSTamperTest(None, None, None)
diff --git a/ooni/plugins/echo.py b/ooni/plugins/echo.py
deleted file mode 100644
index bc1b2a8..0000000
--- a/ooni/plugins/echo.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-#
-# +---------+
-# | echo.py |
-# +---------+
-# A simply ICMP-8 ping test.
-#
-# :author: Isis Lovecruft
-# :version: 0.1.0-pre-alpha
-# :license: (c) 2012 Isis Lovecruft
-# see attached LICENCE file
-#
-
-import os
-import sys
-
-from twisted.plugin import IPlugin
-from twisted.python import usage
-from zope.interface import implements
-
-from lib import txscapy
-from utils import log
-from plugoo.assets import Asset
-from plugoo.interface import ITest
-from protocols.scapyproto import ScapyTest
-
-class EchoOptions(usage.Options):
- optParameters = [
- ['interface', 'i', None, 'Network interface to use'],
- ['destination', 'd', None, 'File of hosts to ping'],
- ['count', 'c', 5, 'Number of packets to send', int],
- ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
- ['ttl', 't', 25, 'Set the IP Time to Live', int],
- ]
- optFlags = []
-
-class EchoAsset(Asset):
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- if line.startswith('#'):
- return
- else:
- return line.replace('\n', '')
-
-class EchoTest(ScapyTest):
- implements(IPlugin, ITest)
-
- shortName = 'echo'
- description = 'A simple ICMP-8 test to check if a host is reachable'
- options = EchoOptions
- requirements = None
- blocking = False
-
- pcap_file = 'echo.pcap'
- receive = True
-
- def initialize(self):
- self.request = {}
- self.response = {}
-
- if self.local_options:
-
- options = self.local_options
-
- if options['interface']:
- self.interface = options['interface']
-
- if options['count']:
- ## there's a Counter() somewhere, use it
- self.count = options['count']
-
- if options['size']:
- self.size = options['size']
-
- if options['ttl']:
- self.ttl = options['ttl']
-
- def load_assets(self):
- assets = {}
- option = self.local_options
-
- if option and option['destination']:
-
- try:
- from scapy.all import IP
- except:
- log.err()
-
- if os.path.isfile(option['destination']):
- with open(option['destination']) as hosts:
- for line in hosts.readlines():
- assets.update({'host': EchoAsset(line)})
- else:
- while type(options['destination']) is str:
- try:
- IP(options['destination'])
- except:
- log.err()
- break
- assets.update({'host': options['destination']})
- else:
- log.msg("Couldn't understand destination option...")
- log.msg("Give one IPv4 address, or a file with one address per line.")
- return assets
-
- def experiment(self, args):
- if len(args) == 0:
- log.err("Error: We're Echo, not Narcissus!")
- log.err(" Provide a list of hosts to ping...")
- d = sys.exit(1)
- return d
-
- ## XXX v4 / v6
- from scapy.all import ICMP, IP, sr
- ping = sr(IP(dst=args)/ICMP())
- if ping:
- self.response.update(ping.show())
- else:
- log.msg('No response received from %s' % args)
-
- def control(self, *args):
- pass
-
-echo = EchoTest(None, None, None)
1
0

[ooni-probe/master] * Moving tests which are not ported to the new API to the top level
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 8894f057967a779875c8cb5b9c00971408d08fc9
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 17:03:33 2012 +0000
* Moving tests which are not ported to the new API to the top level
old-to-be-ported/ directory.
---
ooni/example_plugins/examplescapy.py | 49 --------
ooni/example_plugins/skel.py | 29 -----
ooni/hack_this/TO_BE_PORTED | 14 --
ooni/hack_this/dnstamper.py | 200 -------------------------------
ooni/hack_this/tcpscan.py | 84 -------------
ooni/hack_this/traceroute.py | 108 -----------------
ooni/plugins/TESTS_ARE_MOVING.txt | 8 --
ooni/plugins/chinatrigger.py | 140 ----------------------
ooni/plugins/daphn3.py | 152 ------------------------
ooni/plugins/domclass.py | 216 ----------------------------------
ooni/plugins/httpt.py | 94 ---------------
ooni/plugins/tcpconnect.py | 65 ----------
12 files changed, 0 insertions(+), 1159 deletions(-)
diff --git a/ooni/example_plugins/examplescapy.py b/ooni/example_plugins/examplescapy.py
deleted file mode 100644
index 21a919d..0000000
--- a/ooni/example_plugins/examplescapy.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import random
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, defer
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.scapyproto import ScapyTest
-
-from ooni.lib.txscapy import txsr, txsend
-
-class scapyArgs(usage.Options):
- optParameters = []
-
-class ExampleScapyTest(ScapyTest):
- """
- An example of writing a scapy Test
- """
- implements(IPlugin, ITest)
-
- shortName = "example_scapy"
- description = "An example of a scapy test"
- requirements = None
- options = scapyArgs
- blocking = False
-
- receive = True
- pcapfile = 'example_scapy.pcap'
- def initialize(self, reactor=None):
- if not self.reactor:
- from twisted.internet import reactor
- self.reactor = reactor
-
- self.request = {}
- self.response = {}
-
- def build_packets(self):
- """
- Override this method to build scapy packets.
- """
- from scapy.all import IP, TCP
- return IP()/TCP()
-
- def load_assets(self):
- return {}
-
-examplescapy = ExampleScapyTest(None, None, None)
-
diff --git a/ooni/example_plugins/skel.py b/ooni/example_plugins/skel.py
deleted file mode 100644
index 5f46620..0000000
--- a/ooni/example_plugins/skel.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from plugoo.tests import ITest, TwistedTest
-import log
-
-class SkelArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file'],
- ['resume', 'r', 0, 'Resume at this index'],
- ['other', 'o', None, 'Other arguments']]
-
-class SkelTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "skeleton"
- description = "Skeleton plugin"
- requirements = None
- options = SkelArgs
- blocking = False
-
- def load_assets(self):
- if self.local_options:
- return {'asset': open(self.local_options['asset'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-skel = SkelTest(None, None, None)
diff --git a/ooni/hack_this/TO_BE_PORTED b/ooni/hack_this/TO_BE_PORTED
deleted file mode 100644
index 49ce5e0..0000000
--- a/ooni/hack_this/TO_BE_PORTED
+++ /dev/null
@@ -1,14 +0,0 @@
-
-The tests in this directory are very old, and have neither been ported to
-Twisted, nor to the new twisted.trial API framework. Although, they are not
-old in the sense of the *seriously old* OONI code which was written two years
-ago.
-
-These tests should be updated at least to use Twisted.
-
-If you want to hack on something care free, feel free to mess with these files
-because it would be difficult to not improve on them.
-
-<(A)3
-isis
-0x2cdb8b35
diff --git a/ooni/hack_this/dnstamper.py b/ooni/hack_this/dnstamper.py
deleted file mode 100644
index d6f87a6..0000000
--- a/ooni/hack_this/dnstamper.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- dnstamper
- *********
-
- This test resolves DNS for a list of domain names, one per line, in the
- file specified in the ooni-config under the setting "dns_experiment". If
- the file is top-1m.txt, the test will be run using Amazon's list of top
- one million domains. The experimental dns servers to query should
- be specified one per line in assets/dns_servers.txt.
-
- The test reports censorship if the cardinality of the intersection of
- the query result set from the control server and the query result set
- from the experimental server is zero, which is to say, if the two sets
- have no matching results whatsoever.
-
- NOTE: This test frequently results in false positives due to GeoIP-based
- load balancing on major global sites such as google, facebook, and
- youtube, etc.
-
- :copyright: (c) 2012 Arturo Filastò, Isis Lovecruft
- :license: see LICENSE for more details
-
- TODO:
- * Switch to using Twisted's DNS builtins instead of dnspython
- *
-"""
-
-import os
-
-from twisted.names import client
-from twisted.internet import reactor
-from twisted.internet.protocol import Factory, Protocol
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from zope.interface import implements
-
-from ooni.plugoo.assets import Asset
-from ooni.plugoo.tests import ITest, OONITest
-from ooni import log
-
-class Top1MAsset(Asset):
- """
- Class for parsing the Alexa top-1m.txt as an asset.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
- def parse_line(self, line):
- self = Asset.parse_line(self, line)
- return line.split(',')[1].replace('\n','')
-
-class DNSTamperAsset(Asset):
- """
- Creates DNS testing specific Assets.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-class DNSTamperArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'Asset file of hostnames to resolve'],
- ['controlserver', 'c', '8.8.8.8', 'Known good DNS server'],
- ['testservers', 't', None, 'Asset file of the DNS servers to test'],
- ['resume', 'r', 0, 'Resume at this index in the asset file']]
-'''
- def control(self, experiment_result, args):
- print "Experiment Result:", experiment_result
- print "Args", args
- return experiment_result
-
- def experiment(self, args):
-'''
-
-class DNSTamperTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "DNSTamper"
- description = "DNS censorship detection test"
- requirements = None
- options = DNSTamperArgs
- blocking = False
-
- def load_assets(self):
- if self.local_options:
- if self.local_options['asset']:
- assetf = self.local_options['asset']
- if assetf == 'top-1m.txt':
- return {'asset': Top1MAsset(assetf)}
- else:
- return {'asset': DNSTamperAsset(assetf)}
- else:
- return {}
-
- def lookup(self, hostname, nameserver):
- """
- Resolves a hostname through a DNS nameserver to the corresponding
- IP addresses.
- """
- def got_result(result):
- #self.logger.log(result)
- print result
- reactor.stop()
-
- def got_failure(failure):
- failure.printTraceback()
- reactor.stop()
-
- res = client.createResolver(servers=[(nameserver, 53)])
- d = res.getHostByName(hostname)
- d.addCallbacks(got_result, got_failure)
-
- ## XXX MAY ALSO BE:
- #answer = res.getAddress(servers=[('nameserver', 53)])
-
- ret = []
-
- for data in answer:
- ret.append(data.address)
-
- return ret
-
- def reverse_lookup(self, ip, nameserver):
- """
- Attempt to do a reverse DNS lookup to determine if the control and exp
- sets from a positive result resolve to the same domain, in order to
- remove false positives due to GeoIP load balancing.
- """
- res = client.createResolver(servers=nameserver)
- n = reversename.from_address(ip)
- revn = res.query(n, "PTR").__iter__().next().to_text()[:-1]
-
- return revn
-
- def experiment(self, *a, **kw):
- """
- Compares the lookup() sets of the control and experiment groups.
- """
- # this is just a dirty hack
- address = kw['data'][0]
- ns = kw['data'][1]
-
- config = self.config
- ctrl_ns = config.tests.dns_control_server
-
- print "ADDRESS: %s" % address
- print "NAMESERVER: %s" % ns
-
- exp = self.lookup(address, ns)
- control = self.lookup(address, ctrl_ns)
-
- result = []
-
- if len(set(exp) & set(control)) > 0:
- print "Address %s has not tampered with on DNS server %s\n" % (address, ns)
- result = (address, ns, exp, control, False)
- return result
- else:
- print "Address %s has possibly been tampered on %s:\nDNS resolution through %s yeilds:\n%s\nAlthough the control group DNS servers resolve to:\n%s" % (address, ns, ns, exp, control)
- result = (address, ns, exp, control, True)
-
- if config.tests.dns_reverse_lookup:
-
- exprevn = [self.reverse_lookup(ip, ns) for ip in exp]
- ctrlrevn = [self.reverse_lookup(ip, ctrl_ns)
- for ip in control]
-
- if len(set(exprevn) & set(ctrlrevn)) > 0:
- print "Further testing has eliminated this as a false positive."
- else:
- print "Reverse DNS on the results returned by %s returned:\n%s\nWhich does not match the expected domainname:\n%s\n" % (ns, exprevn, ctrlrevn)
- return result
-
- else:
- print "\n"
- return result
-
-#def run(ooni):
-# """
-# Run the test.
-# """
-# config = ooni.config
-# urls = []
-#
-# if (config.tests.dns_experiment == "top-1m.txt"):
-# dns_experiment = Top1MAsset(os.path.join(config.main.assetdir,
-# config.tests.dns_experiment))
-# else:
-# dns_experiment = DNSTAsset(os.path.join(config.main.assetdir,
-# config.tests.dns_experiment))
-# dns_experiment_dns = DNSTAsset(os.path.join(config.main.assetdir,
-# config.tests.dns_experiment_dns))
-#
-# assets = [dns_experiment, dns_experiment_dns]
-#
-# dnstest = DNST(ooni)
-# ooni.logger.info("Beginning dnstamper test...")
-# dnstest.run(assets, {'index': 1})
-# ooni.logger.info("Dnstamper test completed!")
-
-dnstamper = DNSTamperTest(None, None, None)
diff --git a/ooni/hack_this/tcpscan.py b/ooni/hack_this/tcpscan.py
deleted file mode 100644
index b371c88..0000000
--- a/ooni/hack_this/tcpscan.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
- TCP Port Scanner
- ****************
-
- Does a TCP connect scan on the IP:port pairs.
-
-"""
-import os
-from gevent import socket
-from datetime import datetime
-import socks
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-__plugoo__ = "TCP Port Scanner"
-__desc__ = "This a test template to be used to build your own tests"
-
-class TCPScanAsset(Asset):
- """
- This is the asset that should be used by the Test. It will
- contain all the code responsible for parsing the asset file
- and should be passed on instantiation to the test.
- """
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-
-class TCPScan(Test):
- """
- The main Test class
- """
-
- def experiment(self, *a, **kw):
- """
- Fill this up with the tasks that should be performed
- on the "dirty" network and should be compared with the
- control.
- """
- addr = kw['data']
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- res = False
- try:
- self.logger.debug('Doing a connection to %s' % addr)
- s.connect((addr.split(':')[0], int(addr.split(':')[1])))
- res = True
- except socket.error, msg:
- self.logger.debug('Connection failed to %s: %s' % (addr, msg))
-
- finally:
- s.close()
-
- return {'Time': datetime.now(),
- 'Address': addr,
- 'Status': res}
-
- def control(self):
- """
- Fill this up with the control related code.
- """
- return True
-
-def run(ooni, asset=None):
- """
- This is the function that will be called by OONI
- and it is responsible for instantiating and passing
- the arguments to the Test class.
- """
- config = ooni.config
-
- # This the assets array to be passed to the run function of
- # the test
- if asset:
- assets = [TCPScanAsset(asset)]
- else:
- assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
- "tcpscan.txt"))]
-
- # Instantiate the Test
- thetest = TCPScan(ooni)
- ooni.logger.info("starting TCP Scan...")
- # Run the test with argument assets
- thetest.run(assets)
- ooni.logger.info("finished.")
diff --git a/ooni/hack_this/traceroute.py b/ooni/hack_this/traceroute.py
deleted file mode 100644
index e8252c1..0000000
--- a/ooni/hack_this/traceroute.py
+++ /dev/null
@@ -1,108 +0,0 @@
-try:
- from dns import resolver
-except:
- print "Error: dnspython is not installed (http://www.dnspython.org/)"
-import gevent
-import os
-import plugoo
-
-try:
- import scapy
-except:
- print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-import socket
-
-__plugoo__ = "Traceroute"
-__desc__ = "Performs TTL walking tests"
-
-class TracerouteAsset(Asset):
- def __init__(self, file=None):
- self = Asset.__init__(self, file)
-
-
-class Traceroute(Test):
- """A *very* quick and dirty traceroute implementation, UDP and TCP
- """
- def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
- dest_addr = socket.gethostbyname(dst)
- print "Doing traceroute on %s" % dst
-
- recv = socket.getprotobyname('icmp')
- send = socket.getprotobyname(proto)
- ttl = 1
- while True:
- recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
- if proto == "tcp":
- send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
- else:
- send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
- recv_sock.settimeout(10)
- send_sock.settimeout(10)
-
- send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
- recv_sock.bind(("", src_port))
- if proto == "tcp":
- try:
- send_sock.settimeout(2)
- send_sock.connect((dst, dst_port))
- except socket.timeout:
- pass
-
- except Exception, e:
- print "Error doing connect %s" % e
- else:
- send_sock.sendto("", (dst, dst_port))
-
- curr_addr = None
- try:
- print "receiving data..."
- _, curr_addr = recv_sock.recvfrom(512)
- curr_addr = curr_addr[0]
-
- except socket.error, e:
- print "SOCKET ERROR: %s" % e
-
- except Exception, e:
- print "ERROR: %s" % e
-
- finally:
- send_sock.close()
- recv_sock.close()
-
- if curr_addr is not None:
- curr_host = "%s" % curr_addr
- else:
- curr_host = "*"
-
- print "%d\t%s" % (ttl, curr_host)
-
- if curr_addr == dest_addr or ttl > max_hops:
- break
-
- ttl += 1
-
-
- def experiment(self, *a, **kw):
- # this is just a dirty hack
- address = kw['data'][0]
-
- self.traceroute(address)
-
-def run(ooni):
- """Run the test"""
- config = ooni.config
- urls = []
-
- traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
- config.tests.traceroute))
-
- assets = [traceroute_experiment]
-
- traceroute = Traceroute(ooni)
- ooni.logger.info("starting traceroute test")
- traceroute.run(assets)
- ooni.logger.info("finished")
diff --git a/ooni/plugins/TESTS_ARE_MOVING.txt b/ooni/plugins/TESTS_ARE_MOVING.txt
deleted file mode 100644
index f4c0084..0000000
--- a/ooni/plugins/TESTS_ARE_MOVING.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-7/10/2012
-
-All new tests will be moved to the directory /nettests/.
-
-Tests that are in this directory are either here for historical reasons or have
-not yet been properly tested and fully supporting the new API.
-
-A.
diff --git a/ooni/plugins/chinatrigger.py b/ooni/plugins/chinatrigger.py
deleted file mode 100644
index cf4bcb3..0000000
--- a/ooni/plugins/chinatrigger.py
+++ /dev/null
@@ -1,140 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, defer
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.scapyproto import ScapyTest
-
-from ooni.lib.txscapy import txsr, txsend
-
-class scapyArgs(usage.Options):
- optParameters = [['dst', 'd', None, 'Specify the target address'],
- ['port', 'p', None, 'Specify the target port'],
- ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
- ]
-
-class ChinaTriggerTest(ScapyTest):
- """
- This test is a OONI based implementation of the C tool written
- by Philipp Winter to engage chinese probes in active scanning.
-
- Example of running it:
- ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
- """
- implements(IPlugin, ITest)
-
- shortName = "chinatrigger"
- description = "Triggers the chinese probes into scanning"
- requirements = ['root']
- options = scapyArgs
- blocking = False
-
- receive = True
- pcapfile = 'example_scapy.pcap'
- timeout = 5
-
- def initialize(self, reactor=None):
- if not self.reactor:
- from twisted.internet import reactor
- self.reactor = reactor
-
- @staticmethod
- def set_random_servername(pkt):
- ret = pkt[:121]
- for i in range(16):
- ret += random.choice(string.ascii_lowercase)
- ret += pkt[121+16:]
- return ret
-
- @staticmethod
- def set_random_time(pkt):
- ret = pkt[:11]
- ret += struct.pack('!I', int(time.time()))
- ret += pkt[11+4:]
- return ret
-
- @staticmethod
- def set_random_field(pkt):
- ret = pkt[:15]
- for i in range(28):
- ret += chr(random.randint(0, 256))
- ret += pkt[15+28:]
- return ret
-
- @staticmethod
- def mutate(pkt, idx):
- """
- Slightly changed mutate function.
- """
- ret = pkt[:idx-1]
- mutation = chr(random.randint(0, 256))
- while mutation == pkt[idx]:
- mutation = chr(random.randint(0, 256))
- ret += mutation
- ret += pkt[idx:]
- return ret
-
- @staticmethod
- def set_all_random_fields(pkt):
- pkt = ChinaTriggerTest.set_random_servername(pkt)
- pkt = ChinaTriggerTest.set_random_time(pkt)
- pkt = ChinaTriggerTest.set_random_field(pkt)
- return pkt
-
- def build_packets(self, *args, **kw):
- """
- Override this method to build scapy packets.
- """
- from scapy.all import IP, TCP
- pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
- "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
- "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
- "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
- "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
- "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
- "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
- "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
- "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
- "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
- "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
- "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
- "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
- "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
- "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
- "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
- "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
- "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
- "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
- "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
- "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
- "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
- "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
- "\x00\x00"
-
- pkt = ChinaTriggerTest.set_all_random_fields(pkt)
- pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
- for x in range(len(pkt)):
- mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
- pkts.append(mutation)
- return pkts
-
- def load_assets(self):
- if self.local_options:
- self.dst = self.local_options['dst']
- self.port = int(self.local_options['port'])
- if self.local_options['pcap']:
- self.pcapfile = self.local_options['pcap']
- if not self.port or not self.dst:
- pass
-
- return {}
-
-#chinatrigger = ChinaTriggerTest(None, None, None)
-
diff --git a/ooni/plugins/daphn3.py b/ooni/plugins/daphn3.py
deleted file mode 100644
index bf4d60d..0000000
--- a/ooni/plugins/daphn3.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, endpoints
-
-from ooni.plugoo import reports
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
- def connectionMade(self):
- self.next_state()
-
-class Daphn3ClientFactory(protocol.ClientFactory):
- protocol = Daphn3ClientProtocol
- mutator = None
- steps = None
- test = None
-
- def buildProtocol(self, addr):
- p = self.protocol()
- p.factory = self
- p.test = self.test
-
- if self.steps:
- p.steps = self.steps
-
- if not self.mutator:
- self.mutator = daphn3.Mutator(p.steps)
-
- else:
- print "Moving on to next mutation"
- self.mutator.next()
-
- p.mutator = self.mutator
- p.current_state = self.mutator.state()
- return p
-
- def clientConnectionFailed(self, reason):
- print "We failed connecting the the OONIB"
- print "Cannot perform test. Perhaps it got blocked?"
- print "Please report this to tor-assistants(a)torproject.org"
- self.test.result['error'] = ('Failed in connecting to OONIB', reason)
- self.test.end(d)
-
- def clientConnectionLost(self, reason):
- print "Connection Lost."
-
-class daphn3Args(usage.Options):
- optParameters = [['pcap', 'f', None,
- 'PCAP to read for generating the YAML output'],
-
- ['output', 'o', 'daphn3.yaml',
- 'What file should be written'],
-
- ['yaml', 'y', None,
- 'The input file to the test'],
-
- ['host', 'h', None, 'Target Hostname'],
- ['port', 'p', None, 'Target port number'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-class daphn3Test(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "daphn3"
- description = "daphn3"
- requirements = None
- options = daphn3Args
- blocking = False
-
- local_options = None
-
- steps = None
-
- def initialize(self):
- if not self.local_options:
- self.end()
- return
-
- self.factory = Daphn3ClientFactory()
- self.factory.test = self
-
- if self.local_options['pcap']:
- self.tool = True
-
- elif self.local_options['yaml']:
- self.steps = daphn3.read_yaml(self.local_options['yaml'])
-
- else:
- log.msg("Not enough inputs specified to the test")
- self.end()
-
- def runTool(self):
- import yaml
- pcap = daphn3.read_pcap(self.local_options['pcap'])
- f = open(self.local_options['output'], 'w')
- f.write(yaml.dump(pcap))
- f.close()
-
- def control(self, exp_res, args):
- try:
- mutation = self.factory.mutator.get(0)
- self.result['censored'] = False
- except:
- mutation = None
-
- return {'mutation_number': args['mutation'],
- 'value': mutation}
-
- def _failure(self, *argc, **kw):
- self.result['censored'] = True
- self.result['error'] = ('Failed in connecting', (argc, kw))
- self.end()
-
- def experiment(self, args):
- log.msg("Doing mutation %s" % args['mutation'])
- self.factory.steps = self.steps
- host = self.local_options['host']
- port = int(self.local_options['port'])
- log.msg("Connecting to %s:%s" % (host, port))
-
- if self.ended:
- return
-
- endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
- d = endpoint.connect(self.factory)
- d.addErrback(self._failure)
- return d
-
- def load_assets(self):
- if not self.local_options:
- return {}
- if not self.steps:
- print "Error: No assets!"
- self.end()
- return {}
- mutations = 0
- for x in self.steps:
- mutations += len(x['data'])
- return {'mutation': range(mutations)}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#daphn3test = daphn3Test(None, None, None)
diff --git a/ooni/plugins/domclass.py b/ooni/plugins/domclass.py
deleted file mode 100644
index 3080c40..0000000
--- a/ooni/plugins/domclass.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/usr/bin/env python
-#-*- encoding: utf-8 -*-
-#
-# domclass
-# ********
-#
-# :copyright: (c) 2012 by Arturo Filastò
-# :license: see LICENSE for more details.
-#
-# how this works
-# --------------
-#
-# This classifier uses the DOM structure of a website to determine how similar
-# the two sites are.
-# The procedure we use is the following:
-# * First we parse all the DOM tree of the web page and we build a list of
-# TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
-# (html, a), (a, b), (html, c)).
-#
-# * We then use this information to build a matrix (M) where m[i][j] = P(of
-# transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
-# Note: M is a square matrix that is number_of_tags wide.
-#
-# * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
-#
-# * The corelation between page A and B is given via this formula:
-# correlation = dot_product(e_A, e_B), where e_A and e_B are
-# resepectively the eigenvalues for the probability matrix A and the
-# probability matrix B.
-#
-
-try:
- import numpy
-except:
- print "Error numpy not installed!"
-
-import yaml
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.http import HTTPTest
-
-class domclassArgs(usage.Options):
- optParameters = [['output', 'o', None, 'Output to write'],
- ['file', 'f', None, 'Corpus file'],
- ['fileb', 'b', None, 'Corpus file'],
- ['urls', 'u', None, 'URL List'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-# All HTML4 tags
-# XXX add link to W3C page where these came from
-alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
- 'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
- 'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
- 'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
- 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
- 'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
- 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
- 'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
- 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
- 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
-
-# Reduced subset of only the most common tags
-commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
- 'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
- 'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
- 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
- 'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
- 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
- 'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
- 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
- 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
-
-# The tags we are intested in using for our analysis
-thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
- 'H3', 'H4', 'IFRAME ', 'INPUT',
- 'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
- 'STYLE', 'TR']
-
-def compute_probability_matrix(dataset):
- """
- Compute the probability matrix based on the input dataset.
-
- :dataset: an array of pairs representing the parent child relationships.
- """
- import itertools
- ret = {}
- matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
-
- for data in dataset:
- x = data[0].upper()
- y = data[1].upper()
- try:
- x = thetags.index(x)
- except:
- x = len(thetags)
-
- try:
- y = thetags.index(y)
- except:
- y = len(thetags)
-
- matrix[x,y] += 1
-
- for x in xrange(len(thetags) + 1):
- possibilities = 0
- for y in matrix[x]:
- possibilities += y
-
- for i in xrange(len(matrix[x])):
- if possibilities != 0:
- matrix[x][i] = matrix[x][i]/possibilities
-
- return matrix
-
-def compute_eigenvalues(matrix):
- """
- Returns the eigenvalues of the supplied square matrix.
-
- :matrix: must be a square matrix and diagonalizable.
- """
- return numpy.linalg.eigvals(matrix)
-
-def readDOM(content=None, filename=None):
- """
- Parses the DOM of the HTML page and returns an array of parent, child
- pairs.
-
- :content: the content of the HTML page to be read.
-
- :filename: the filename to be read from for getting the content of the
- page.
- """
- from bs4 import BeautifulSoup
-
- if filename:
- f = open(filename)
- content = ''.join(f.readlines())
- f.close()
-
- dom = BeautifulSoup(content)
- couples = []
- for x in dom.findAll():
- couples.append((str(x.parent.name), str(x.name)))
-
- return couples
-
-class domclassTest(HTTPTest):
- implements(IPlugin, ITest)
-
- shortName = "domclass"
- description = "domclass"
- requirements = None
- options = domclassArgs
- blocking = False
-
- follow_redirects = True
- #tool = True
-
- def runTool(self):
- site_a = readDOM(filename=self.local_options['file'])
- site_b = readDOM(filename=self.local_options['fileb'])
- a = {}
- a['matrix'] = compute_probability_matrix(site_a)
- a['eigen'] = compute_eigenvalues(a['matrix'])
-
- self.result['eigenvalues'] = a['eigen']
- b = {}
- b['matrix'] = compute_probability_matrix(site_b)
- b['eigen'] = compute_eigenvalues(b['matrix'])
-
- #print "A: %s" % a
- #print "B: %s" % b
- correlation = numpy.vdot(a['eigen'],b['eigen'])
- correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
- correlation = (correlation + 1)/2
- print "Corelation: %s" % correlation
- self.end()
- return a
-
- def processResponseBody(self, data):
- site_a = readDOM(data)
- #site_b = readDOM(self.local_options['fileb'])
- a = {}
- a['matrix'] = compute_probability_matrix(site_a)
- a['eigen'] = compute_eigenvalues(a['matrix'])
-
-
- if len(data) == 0:
- self.result['eigenvalues'] = None
- self.result['matrix'] = None
- else:
- self.result['eigenvalues'] = a['eigen']
- #self.result['matrix'] = a['matrix']
- #self.result['content'] = data[:200]
- #b = compute_matrix(site_b)
- print "A: %s" % a
- return a['eigen']
-
- def load_assets(self):
- if self.local_options:
- if self.local_options['file']:
- self.tool = True
- return {}
- elif self.local_options['urls']:
- return {'url': Asset(self.local_options['urls'])}
- else:
- self.end()
- return {}
- else:
- return {}
-
-#domclass = domclassTest(None, None, None)
diff --git a/ooni/plugins/httpt.py b/ooni/plugins/httpt.py
deleted file mode 100644
index 358f1ea..0000000
--- a/ooni/plugins/httpt.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import http
-from ooni.utils import log
-
-class httptArgs(usage.Options):
- optParameters = [['urls', 'f', None, 'Urls file'],
- ['url', 'u', 'http://torproject.org/', 'Test single site'],
- ['resume', 'r', 0, 'Resume at this index'],
- ['rules', 'y', None, 'Specify the redirect rules file']]
-
-class httptTest(http.HTTPTest):
- implements(IPlugin, ITest)
-
- shortName = "httpt"
- description = "httpt"
- requirements = None
- options = httptArgs
- blocking = False
-
-
- def testPattern(self, value, pattern, type):
- if type == 'eq':
- return value == pattern
- elif type == 're':
- import re
- if re.match(pattern, value):
- return True
- else:
- return False
- else:
- return None
-
- def testPatterns(self, patterns, location):
- test_result = False
-
- if type(patterns) == list:
- for pattern in patterns:
- test_result |= self.testPattern(location, pattern['value'], pattern['type'])
- else:
- test_result |= self.testPattern(location, patterns['value'], patterns['type'])
-
- return test_result
-
- def testRules(self, rules, location):
- result = {}
- blocked = False
- for rule, value in rules.items():
- current_rule = {}
- current_rule['name'] = value['name']
- current_rule['patterns'] = value['patterns']
- current_rule['test'] = self.testPatterns(value['patterns'], location)
- blocked |= current_rule['test']
- result[rule] = current_rule
- result['blocked'] = blocked
- return result
-
- def processRedirect(self, location):
- self.result['redirect'] = None
- try:
- rules_file = self.local_options['rules']
- import yaml
- rules = yaml.load(open(rules_file))
- log.msg("Testing rules %s" % rules)
- redirect = self.testRules(rules, location)
- self.result['redirect'] = redirect
- except TypeError:
- log.msg("No rules file. Got a redirect, but nothing to do.")
-
-
- def control(self, experiment_result, args):
- print self.response
- print self.request
- # What you return here ends up inside of the report.
- log.msg("Running control")
- return {}
-
- def load_assets(self):
- if self.local_options and self.local_options['urls']:
- return {'url': Asset(self.local_options['urls'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#httpt = httptTest(None, None, None)
diff --git a/ooni/plugins/tcpconnect.py b/ooni/plugins/tcpconnect.py
deleted file mode 100644
index 7758a9e..0000000
--- a/ooni/plugins/tcpconnect.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from ooni.plugoo.interface import ITest
-from ooni.plugoo.tests import OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-
-class tcpconnectArgs(usage.Options):
- optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
- ['resume', 'r', 0, 'Resume at this index']]
-
-class tcpconnectTest(OONITest):
- implements(IPlugin, ITest)
-
- shortName = "tcpconnect"
- description = "tcpconnect"
- requirements = None
- options = tcpconnectArgs
- blocking = False
-
- def experiment(self, args):
- try:
- host, port = args['asset'].split(':')
- except:
- raise Exception("Error in parsing asset. Wrong format?")
- class DummyFactory(Factory):
- def buildProtocol(self, addr):
- return Protocol()
-
- def gotProtocol(p):
- p.transport.loseConnection()
- log.msg("Got a connection!")
- log.msg(str(p))
- return {'result': True, 'target': [host, port]}
-
- def gotError(err):
- log.msg("Had error :(")
- log.msg(err)
- return {'result': False, 'target': [host, port]}
-
- # What you return here gets handed as input to control
- point = TCP4ClientEndpoint(self.reactor, host, int(port))
- d = point.connect(DummyFactory())
- d.addCallback(gotProtocol)
- d.addErrback(gotError)
- return d
-
- def load_assets(self):
- if self.local_options:
- return {'asset': Asset(self.local_options['asset'])}
- else:
- return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#tcpconnect = tcpconnectTest(None, None, None)
1
0

03 Nov '12
commit 3350885b7d3c24795b5ccf9e1fbeee379ebcecd0
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sat Nov 3 01:19:53 2012 +0000
* Updated the TODO file. PLEASE READ IT.
---
TODO | 110 ++++++++++++++++++++++++++++++++++++++++++++++++++++++------------
1 files changed, 90 insertions(+), 20 deletions(-)
diff --git a/TODO b/TODO
index 2686ef7..63d950c 100644
--- a/TODO
+++ b/TODO
@@ -1,26 +1,12 @@
This is a list of things to be done on ooni-probe.
-Once you have completed something you should add a
-note to this file stating what you have done under
-the item.
+Once you have completed something you should add a brief note to this file
+stating what you have done under the item. If you discover needed tasks, feel
+free to add them, but also keep in mind that OONI is mostly using the Tor Trac
+instance, and the main ticket for OONI which all tests should be organized
+under is here:
-Migrate code from old
----------------------
-
-Migrate all the interesting parts of the old code to the new.
-
-It's important to make the new code asych and based on Twisted.
-It should respect the design goals of the new ooni-probe model.
-
-New things to develop
----------------------
-
-These are either components specific to the new refactor of ooni
-or that we haven't yet figured out how they should work.
-
-* Design and implement the Node Factory
-
-* Design and implement the Network Node and the Code Exec node classes
+ https://trac.torproject.org/projects/tor/ticket/5869
New things to test
------------------
@@ -36,3 +22,87 @@ New things to test
nowhere is this presented to someone trying to run a test. So, the informing
users/testers bit can be worked on, and the testing. Obviously we're going
to want something more robust that a 20 LOC Makefile pretty fast.
+
+Finalization of API design
+--------------------------
+
+* The nettest.TestCase should have an interface.
+
+ I know that there is a push away from using zope.interfaces, but I think
+ it is actually *highly* necessary for ensuring that subclasses implement
+ the required functions, and also that they do not improperly override
+ necessary functions, for them to run.
+
+ Personally, I am quite annoyed when I subclass a class from Twisted and
+ override a public method, and it breaks things (when nothing in their
+ documentation informed me that it would break things) and I have to spend
+ half an hour digging through their code to figure out precisely what is
+ needed externally from the function I'm overriding. Others should not have
+ to do this with our code.
+
+* The nettest.TestCase should have a twisted.python.usage.Options subclass and
+ interface as well, even if the instantiation of that subclass is handled by
+ the ooni.oonicli or the ooni.runner. There is more functionality to
+ usage.Options that we should expose than merely "optParameters", for
+ instance the "coerceOptions" parameter validation methods, or the
+ "postOptions" configuration.
+
+New things to develop
+---------------------
+
+These are either components specific to the new refactor of ooni
+or that we haven't yet figured out how they should work.
+
+* Finish implementing the backend collection code.
+
+ o PCAP READER/WRITER:
+ This should be quite simple...see scapy.all.wrpcap and
+ scapy.all.rdpcap. However, we have been warned by other projects that
+ this does *not* scale well. For example, see:
+ https://github.com/isislovecruft/switzerland/blob/master/switzerland/client…
+ Which is a circular ring buffer specifically for libpcap, to avoid kernel
+ buffer overflows due to a high number of incoming packets. I expect this
+ to only be an issue on substantially high-bandwidth nodes...though that
+ is what we'll be dealing with when we deploy on Mlab.
+
+ o PCAP UPLOADER:
+ This also sounds simple, and is, until you begin to deal with things like
+ persistence. What we really need is rsync, written in python, or at least
+ some cross-platform implementation. I (Isis speaking) am the current
+ maintainer of pyrsync, BUT DO NOT USE PYRSYNC. It is only an
+ implementation of the rsync *algorithm* for diffs, it is not rsync the
+ program. Also, it is BROKEN AND I DO NOT MAINTAIN IT. If you want to
+ maintain it, please take it off my hands.
+
+* Useability:
+
+ o UNITTESTS. Pronto.
+
+ o DOCUMENTATION. If you found something that confused you, or still
+ confuses you, and you couldn't find the answer within fifteen seconds,
+ then that thing is not well documented. Make it better, or at least mark
+ it with an "XXX document me!" tag.
+
+* Persistence:
+
+ o We need some type of scheduler/cron thing which will background the tests
+ so that they don't take up a terminal, and can be configured to run
+ certain tests at timed intervals.
+
+ o The Reporter will probably need to be updated to handle knowing when *a
+ test* has completed, but that the scheduler is still running.
+
+Migrate code from old
+---------------------
+
+Migrate all the interesting parts of the old code to the new. This is mostly
+finished, but there still are things in the /old-to-be-ported directory which
+might be of use. At this point, because we have gone through several version
+of the API design, many of them are entirely unusable, and merely the general
+idea remains.
+
+It's important to make the new code asych and based on Twisted. It should
+respect the design goals of the new ooni-probe model. Also, importing new,
+non-standard libraries should be discussed first, if the new test is to be
+used in the core of OONI (packaging scapy and twisted already makes our
+codebase quite large).
\ No newline at end of file
1
0

[ooni-probe/master] * Removing some of the old old old code. Everything that I've deleted is
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 559171b5c4ac91f780d96ddbcce4664ab06a2654
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sat Nov 3 01:16:27 2012 +0000
* Removing some of the old old old code. Everything that I've deleted is
something I've read through and decided either has a replacement or else is
useless in the current context.
* These file in particular are all like 10 line bash scripts which, in most
cases, don't do anything at all because the files they modify/wget/parse
whatever do not exist.
---
old_scripts/README | 47 -------------------------------------------
old_scripts/TODO | 6 -----
old_scripts/dns-checker.sh | 7 ------
old_scripts/host-prep.sh | 20 ------------------
old_scripts/run-tests.sh | 11 ----------
old_scripts/twitter-test.sh | 33 ------------------------------
6 files changed, 0 insertions(+), 124 deletions(-)
diff --git a/old_scripts/README b/old_scripts/README
deleted file mode 100644
index 4903479..0000000
--- a/old_scripts/README
+++ /dev/null
@@ -1,47 +0,0 @@
- "Marco!"
-
- "Polo!"
-
- * * *
-
-The marco.py script tries to figure out who's out there. It does this
-by trying to do a ssl handshake with a lot of Tor servers in parallel.
-If it succeeds, it records their certificates. If it fails, it records
-why.
-
-WHAT YOU MIGHT NEED:
-
- - I tested it with Python 2.6, and I think it should work with Python 2.5.
- If your Python is older than that, it won't work.
-
-HOW TO USE IT:
-
- - Edit the top of marco.py to make sure you like the defaults. You
- can adjust the timeout, where it writes stuff, and how many servers
- it tests in parallel.
-
- - Run marco.py with one or more networkstatus files as command-line
- arguments. If an addr:port appears more than once, marco will only
- test it once.
-
-HOW TO READ THE OUTPUT:
-
- - Marco will generate a file called marco.out full of lines like:
- ADDR:PORT STATUS MESSAGE.
-
- STATUS will be one of:
- "ok" -- everything is fine
- "noconnect" -- we couldn't open a TCP socket.
- "nohandshake" -- we couldn't do a TLS handshake.
- "err" -- we got an unexpected internal error
-
- MESSAGE will say more about what went wrong.
-
- The lines will be in the order that Marco received answers. If you want
- them to be sorted by something else, you'll need to do that yourself.
-
- - If you have Python 2.6, Marco will also generate a file called
- marco_certs.out, containing every TLS cert that it got for an "ok"
- server. We can use this later to make sure identity keys were correct.
-
- If you only have Python 2.5, Marco will only get the DN for the cert.
diff --git a/old_scripts/TODO b/old_scripts/TODO
deleted file mode 100644
index c24a16f..0000000
--- a/old_scripts/TODO
+++ /dev/null
@@ -1,6 +0,0 @@
-
-- Run anywhere, even older pythons.
-
-- Wrap the ssl stuff into its own class.
-
-- Decode certificates and detect MITM.
diff --git a/old_scripts/dns-checker.sh b/old_scripts/dns-checker.sh
deleted file mode 100644
index 9096c7f..0000000
--- a/old_scripts/dns-checker.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-for host in `cat twitter-host-list.txt`
-do
-echo "Trying to resolve: $host"
-host -t any $host
-done
diff --git a/old_scripts/host-prep.sh b/old_scripts/host-prep.sh
deleted file mode 100644
index b8f62d7..0000000
--- a/old_scripts/host-prep.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-SUITE="`lsb_release -c|cut -f2`";
-apt-get -y install tcptraceroute traceroute iputils-ping wget dnsutils \
- python-openssl rsync openssl libevent-1.4-2 zlib1g openssh-server
-
-# Lets make sure we can run these programs without ever becoming root again
-chmod 4755 `which tcptraceroute`
-chmod 4755 `which traceroute`
-
-# Install Tor from the Tor repo here...
-#cp /etc/apt/sources.list /etc/apt/sources.list.bkp
-#cat << "EOF" >> /etc/apt/sources.list
-#deb http://deb.torproject.org/torproject.org $SOURCE main
-#deb http://deb.torproject.org/torproject.org experimental-$SOURCE main
-#EOF
-#
-#gpg --keyserver keys.gnupg.net --recv 886DDD89
-#gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add -
-#apt-get update
-#apt-get install tor tor-geoipdb
diff --git a/old_scripts/run-tests.sh b/old_scripts/run-tests.sh
deleted file mode 100644
index 44d1c5a..0000000
--- a/old_scripts/run-tests.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-DATE="`date -u`";
-cd ~/.probe/logs/;
-~/.probe/bin/marco.py ~/.probe/logs/cached-consensus 2>&1 >> ~/.probe/logs/run-tests-marco-"$DATE".log;
-~/.probe/bin/dirconntest.sh 2>&1 >> ~/.probe/logs/run-tests-dirconntest-"$DATE".log;
-
-for host in `cat ~/.probe/logs/hosts.txt`;
-do
- ~/.probe/bin/generic-host-test.sh $host > 2>&1 >> ~/.probe/logs/generic-host-test-"$DATE".log;
-done;
diff --git a/old_scripts/twitter-test.sh b/old_scripts/twitter-test.sh
deleted file mode 100644
index 5dfcb41..0000000
--- a/old_scripts/twitter-test.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-#
-# A quick hack to (tcp)traceroute to a list of hosts
-#
-
-echo "tcp/conntest v0.6"
-date -R
-echo
-/sbin/ifconfig -a
-echo
-/sbin/route -n
-echo
-
-echo "Testing Twitter IP addresses..."
-for ip in `cat twitter-ip-list.txt|grep 1`
-do
- echo "Testing $ip"
- tcptraceroute -m 6 -w 1 $ip 80
- tcptraceroute -m 6 -w 1 $ip 0
- tcptraceroute -m 6 -w 1 $ip 123
- tcptraceroute -m 6 -w 1 $ip 443
-done
-echo "Various traceroute attempts"
-for ip in `cat twitter-ip-list.txt|grep 1`
-do
- traceroute -A $ip
- traceroute -A -I $ip
- traceroute -A -U $ip
-done
-
-wget -q -O- https://check.torproject.org|grep "IP address"
-echo
-date -R
1
0
commit aa57669fa826c6146a8028ed571c531ab2a2bc66
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 17:07:08 2012 +0000
* Moar refaktorzingz.
---
old-to-be-ported-code/TODO.plgoons | 79 ----
old-to-be-ported-code/ooni-probe.diff | 358 -------------------
old-to-be-ported-code/ooni/.DS_Store | Bin 15364 -> 0 bytes
old-to-be-ported-code/ooni/__init__.py | 12 -
old-to-be-ported-code/ooni/command.py | 250 -------------
old-to-be-ported-code/ooni/dns_poisoning.py | 43 ---
old-to-be-ported-code/ooni/dnsooni.py | 356 ------------------
old-to-be-ported-code/ooni/helpers.py | 38 --
old-to-be-ported-code/ooni/http.py | 306 ----------------
old-to-be-ported-code/ooni/input.py | 33 --
old-to-be-ported-code/ooni/namecheck.py | 39 --
.../ooni/plugins/dnstest_plgoo.py | 84 -----
old-to-be-ported-code/ooni/plugins/http_plgoo.py | 70 ----
old-to-be-ported-code/ooni/plugins/marco_plgoo.py | 377 --------------------
old-to-be-ported-code/ooni/plugins/proxy_plgoo.py | 69 ----
.../ooni/plugins/simple_dns_plgoo.py | 35 --
old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py | 278 --------------
old-to-be-ported-code/ooni/plugins/tor.py | 80 ----
old-to-be-ported-code/ooni/plugins/torrc | 9 -
old-to-be-ported-code/ooni/plugooni.py | 106 ------
old-to-be-ported-code/ooni/transparenthttp.py | 41 ---
21 files changed, 0 insertions(+), 2663 deletions(-)
diff --git a/old-to-be-ported-code/TODO.plgoons b/old-to-be-ported-code/TODO.plgoons
deleted file mode 100644
index ace2a10..0000000
--- a/old-to-be-ported-code/TODO.plgoons
+++ /dev/null
@@ -1,79 +0,0 @@
-We should implement the following as plugoons:
-
-dns_plgoo.py - Various DNS checks
-
-As a start - we should perform a known good check against a name or list of
-names. As input, we should take an ip address, a name or a list of names for
-testing; we also take dns servers for experiment or control data. For output we
-emit UDP or TCP packets - we should support proxying these requests when
-possible as is the case with TCP but probably not with UDP for certain DNS
-request types.
-
-http_plgoo.py - Various HTTP checks
-
-We should compare two pages and see if we have identical properties.
-At the very least, we should print the important differences - perhaps
-with a diff like output? We should look for fingerprints in URLS that are
-returned. We should detect 302 re-direction.
-
-As input, we should take an ip address, a name or a list of names for testing;
-we also take a list of headers such as random user agent strings and so on.
-We should emit TCP packets and ensure that we do not leak DNS for connections
-that we expect to proxy to a remote network.
-
-latency_plgoo.py - Measure latency for a host or a list of hosts
-
-As input, we should take an ip address, a name or a list of names for testing;
-We should measure the mean latency from the ooni-probe to the host with various
-traceroute tests. We should also measure the latency between the ooni-probe and
-a given server for any other protocol that is request and response oriented;
-HTTP latency may be calculated by simply tracking the delta between requests
-and responses.
-
-tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
-
-tcptrace_plgoo.py should allow for both stray and in-connection traceroute
-modes.
-
-udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
-- it may also be nice to simply make a random A record request in a DNS packet
-and use it as the payload for a UDP traceroute.
-
-reversetrace_plgoo.py should give a remote host the client's IP and return the
-output of a traceroute to that IP from the remote host. It will need a remote
-component if run against a web server. It would not need a remote component if
-run against route-views - we can simply telnet over Tor and ask it to trace to
-our detected client IP.
-
-keyword_plgoo.py should take a keyword or a list of keywords for use as a
-payload in a varity of protocols. This should be protocol aware - dns keyword
-filtering requires a sniffer to catch stray packets after the censor wins the
-race. HTTP payloads in open connections may be similar and in practice, we'll
-have to find tune it.
-
-icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
-servers. They have dozens of tests and to implement this plgoo, we'll need to
-add many things to ooni. More details here:
-http://netalyzr.icsi.berkeley.edu/faq.html
-http://netalyzr.icsi.berkeley.edu/json/id=example-session
-
-HTML output:
-http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-JSON output:
-http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-Netalyzer log:
-http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
-
-sniffer_plgoo.py - We need a generic method for capturing packets during a full
-run - this may be better as a core ooni-probe feature but we should implement
-packet capture in a plugin if it is done no where else.
-
-nmap_plgoo.py - We should take a list of hosts and run nmap against each of
-these hosts; many hosts are collected during testing and they should be scanned
-with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
-more reasonable.
-
diff --git a/old-to-be-ported-code/ooni-probe.diff b/old-to-be-ported-code/ooni-probe.diff
deleted file mode 100644
index fc61d3f..0000000
--- a/old-to-be-ported-code/ooni-probe.diff
+++ /dev/null
@@ -1,358 +0,0 @@
-diff --git a/TODO b/TODO
-index c2e19af..51fa559 100644
---- a/TODO
-+++ b/TODO
-@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
- Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
-
- http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&pas…
-+
-+VIA Rail Via header:
-+
-+HTTP/1.0 301 Moved Permanently
-+Location: http://www.google.com/
-+Content-Type: text/html; charset=UTF-8
-+Date: Sat, 23 Jul 2011 02:21:30 GMT
-+Expires: Mon, 22 Aug 2011 02:21:30 GMT
-+Cache-Control: public, max-age=2592000
-+Server: gws
-+Content-Length: 219
-+X-XSS-Protection: 1; mode=block
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: MISS from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Connection: close
-+
-+<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
-+<TITLE>301 Moved</TITLE></HEAD><BODY>
-+<H1>301 Moved</H1>
-+The document has moved
-+<A HREF="http://www.google.com/">here</A>.
-+</BODY></HTML>
-+
-+
-+blocked site:
-+
-+HTTP/1.0 302 Moved Temporarily
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:17 GMT
-+Content-Length: 0
-+Location: http://10.66.66.66/denied.html
-+
-+invalid request response:
-+
-+$ nc 8.8.8.8 80
-+hjdashjkdsahjkdsa
-+HTTP/1.0 400 Bad Request
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:44 GMT
-+Content-Type: text/html
-+Content-Length: 1178
-+Expires: Sat, 23 Jul 2011 02:22:44 GMT
-+X-Squid-Error: ERR_INVALID_REQ 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to process the request:
-+<PRE>
-+hjdashjkdsahjkdsa
-+
-+</PRE>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Invalid Request
-+</STRONG>
-+</UL>
-+
-+<P>
-+Some aspect of the HTTP Request is invalid. Possible problems:
-+<UL>
-+<LI>Missing or unknown request method
-+<LI>Missing URL
-+<LI>Missing HTTP Identifier (HTTP/1.0)
-+<LI>Request is too large
-+<LI>Content-Length missing for POST or PUT requests
-+<LI>Illegal character in hostname; underscores are not allowed
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+nc 10.66.66.66 80
-+GET cache_object://localhost/info HTTP/1.0
-+HTTP/1.0 403 Forbidden
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:25:56 GMT
-+Content-Type: text/html
-+Content-Length: 1061
-+Expires: Sat, 23 Jul 2011 02:25:56 GMT
-+X-Squid-Error: ERR_ACCESS_DENIED 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to retrieve the URL:
-+<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Access Denied.
-+</STRONG>
-+<P>
-+Access control configuration prevents your request from
-+being allowed at this time. Please contact your service provider if
-+you feel this is incorrect.
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>.
-+
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+
-diff --git a/ooni/command.py b/ooni/command.py
-index 361190f..df1a58c 100644
---- a/ooni/command.py
-+++ b/ooni/command.py
-@@ -13,6 +13,7 @@ import ooni.captive_portal
- import ooni.namecheck
- import ooni.dns_poisoning
- import ooni.dns_cc_check
-+import ooni.transparenthttp
-
- class Command():
- def __init__(self, args):
-@@ -48,6 +49,15 @@ class Command():
- help="run captiveportal tests"
- )
-
-+ # --transhttp
-+ def cb_transhttp(option, opt, value, oparser):
-+ self.action = opt[2:]
-+ optparser.add_option(
-+ "--transhttp",
-+ action="callback", callback=cb_transhttp,
-+ help="run Transparent HTTP tests"
-+ )
-+
- # --dns
- def cb_dnstests(option, opt, value, oparser):
- self.action = opt[2:]
-@@ -122,7 +132,7 @@ class Command():
- if (not self.action):
- raise optparse.OptionError(
- 'is required',
-- '--dns | --dnsbulk | --captiveportal | --help | --version'
-+ '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
- )
-
- except optparse.OptionError, err:
-@@ -138,6 +148,10 @@ class Command():
- captive_portal = ooni.captive_portal.CaptivePortal
- captive_portal(self).main()
-
-+ def transhttp(self):
-+ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
-+ transparent_http(self).main()
-+
- def dns(self):
- dnstests = ooni.namecheck.DNS
- dnstests(self).main()
-diff --git a/ooni/dns.py b/ooni/dns.py
-index 95da6ef..90d50bd 100644
---- a/ooni/dns.py
-+++ b/ooni/dns.py
-@@ -8,7 +8,7 @@ from socket import gethostbyname
- import ooni.common
-
- # apt-get install python-dns
--import DNS
-+import dns
- import random
-
- """ Wrap gethostbyname """
-diff --git a/ooni/http.py b/ooni/http.py
-index 62365bb..bb72001 100644
---- a/ooni/http.py
-+++ b/ooni/http.py
-@@ -7,8 +7,14 @@
- from socket import gethostbyname
- import ooni.common
- import urllib2
-+import httplib
-+from urlparse import urlparse
-+from pprint import pprint
- import pycurl
-+import random
-+import string
- import re
-+from BeautifulSoup import BeautifulSoup
-
- # By default, we'll be Torbutton's UA
- default_ua = { 'User-Agent' :
-@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
- default_proxy_host = "127.0.0.1"
- default_proxy_port = "9050"
-
--
--
--
--
--
--
--
--
--
--
--
--
--
--
-+#class HTTPResponse(object):
-+# def __init__(self):
-
-
- """A very basic HTTP fetcher that uses Tor by default and returns a curl
-@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
- http_code = getinfo(pycurl.HTTP_CODE)
- return response, http_code
-
--"""A very basic HTTP fetcher that returns a urllib3 response object."""
-+"""A very basic HTTP fetcher that returns a urllib2 response object."""
- def http_fetch(url,
- headers= default_ua,
- label="generic HTTP fetch"):
-@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
- else:
- return True
-
-+def http_request(self, method, url, path=None):
-+ """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
-+ purl = urlparse(url)
-+ host = purl.netloc
-+ conn = httplib.HTTPConnection(host, 80)
-+ if path is None:
-+ path = purl.path
-+ conn.request(method, purl.path)
-+ response = conn.getresponse()
-+ headers = dict(response.getheaders())
-+ self.headers = headers
-+ self.data = response.read()
-+ return True
-+
-+def search_headers(self, s_headers, url):
-+ if http_request(self, "GET", url):
-+ headers = self.headers
-+ else:
-+ return None
-+ result = {}
-+ for h in s_headers.items():
-+ result[h[0]] = h[0] in headers
-+ return result
-+
-+def http_header_match_dict(experimental_url, dict_header):
-+ result = {}
-+ url_header = http_get_header_dict(experimental_url)
-+
-+# XXX for testing
-+# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
-+
-+def search_squid_headers(self):
-+ url = "http://securityfocus.org/blabla"
-+ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
-+ ret = search_headers(self, s_headers, url)
-+ for i in ret.items():
-+ if i[1] is True:
-+ return False
-+ return True
-+
-+def random_bad_request(self):
-+ url = "http://securityfocus.org/blabla"
-+ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
-+ if http_request(self, r_str, url):
-+ return True
-+ else:
-+ return None
-+
-+def squid_search_bad_request(self):
-+ if random_bad_request(self):
-+ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
-+ for i in s_headers.items():
-+ if i[0] in self.headers:
-+ return False
-+ return True
-+ else:
-+ return None
-+
-+def squid_cacheobject_request(self):
-+ url = "http://securityfocus.org/blabla"
-+ if http_request(self, "GET", url, "cache_object://localhost/info"):
-+ soup = BeautifulSoup(self.data)
-+ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
-+ return False
-+ else:
-+ return True
-+ else:
-+ return None
-+
-+
- def MSHTTP_CP_Tests(self):
- experiment_url = "http://www.msftncsi.com/ncsi.txt"
- expectedResponse = "Microsoft NCSI" # Only this - nothing more
-@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
-
- # Google ChromeOS fetches this url in guest mode
- # and they expect the user to authenticate
-- def googleChromeOSHTTPTest(self):
-- print "noop"
-- #url = "http://www.google.com/"
-+def googleChromeOSHTTPTest(self):
-+ print "noop"
-+ #url = "http://www.google.com/"
-+
-+def SquidHeader_TransparentHTTP_Tests(self):
-+ return search_squid_headers(self)
-+
-+def SquidBadRequest_TransparentHTTP_Tests(self):
-+ squid_cacheobject_request(self)
-+ return squid_search_bad_request(self)
-+
-+def SquidCacheobject_TransparentHTTP_Tests(self):
-+ return squid_cacheobject_request(self)
-+
-+
diff --git a/old-to-be-ported-code/ooni/.DS_Store b/old-to-be-ported-code/ooni/.DS_Store
deleted file mode 100644
index f5738a5..0000000
Binary files a/old-to-be-ported-code/ooni/.DS_Store and /dev/null differ
diff --git a/old-to-be-ported-code/ooni/__init__.py b/old-to-be-ported-code/ooni/__init__.py
deleted file mode 100644
index 8f1b96e..0000000
--- a/old-to-be-ported-code/ooni/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-"""\
-This is your package, 'ooni'.
-
-It was provided by the package, `package`.
-
-Please change this documentation, and write this module!
-"""
-
-__version__ = '0.0.1'
-
-# If you run 'make test', this is your failing test.
-# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/old-to-be-ported-code/ooni/command.py b/old-to-be-ported-code/ooni/command.py
deleted file mode 100644
index e5f8f9f..0000000
--- a/old-to-be-ported-code/ooni/command.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# -*- coding: utf-8
-"""\
-Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
-"""
-
-import os
-import sys
-import re
-import optparse
-
-# Only include high level ooni tests at this time
-import ooni.captive_portal
-import ooni.namecheck
-import ooni.dns_poisoning
-import ooni.dns_cc_check
-import ooni.transparenthttp
-import ooni.helpers
-import ooni.plugooni
-import ooni.input
-
-class Command():
- def __init__(self, args):
- sys.argv = sys.argv[0:1]
- sys.argv.extend(args)
- self.startup_options()
-
- def startup_options(self):
- self.action = None
- self.from_ = None
- self.to = None
- self.parser = None
- self.emitter = None
- self.emit_header = None
- self.emit_trailer = None
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.randomize = True
- self.cc = None
- self.hostname = None
- self.listfile = None
- self.listplugooni = False
- self.plugin_name = "all"
- self.controlproxy = None # "socks4a://127.0.0.1:9050/"
- self.experimentproxy = None
-
- usage = """
-
- 'ooni' is the Open Observatory of Network Interference
-
- command line usage: ooni-probe [options]"""
-
- optparser = optparse.OptionParser(usage=usage)
-
- # --plugin
- def cb_plugin(option, opt, value, oparser):
- self.action = opt[2:]
- self.plugin_name = str(value)
- optparser.add_option(
- "--plugin", type="string",
- action="callback", callback=cb_plugin,
- help="run the Plugooni plgoo plugin specified"
- )
-
- # --listplugins
- def cb_list_plugins(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--listplugins",
- action="callback", callback=cb_list_plugins,
- help="list available Plugooni as plgoos plugin names"
- )
-
- # --captiveportal
- def cb_captiveportal(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--captiveportal",
- action="callback", callback=cb_captiveportal,
- help="run vendor emulated captiveportal tests"
- )
-
- # --transhttp
- def cb_transhttp(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--transhttp",
- action="callback", callback=cb_transhttp,
- help="run Transparent HTTP tests"
- )
-
- # --dns
- def cb_dnstests(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dns",
- action="callback", callback=cb_dnstests,
- help="run fixed generic dns tests"
- )
-
- # --dnsbulk
- def cb_dnsbulktests(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dnsbulk",
- action="callback", callback=cb_dnsbulktests,
- help="run bulk DNS tests in random.shuffle() order"
- )
-
- # --dns-cc-check
- def cb_dnscccheck(option, opt, value, oparser):
- self.action = opt[2:]
- optparser.add_option(
- "--dnscccheck",
- action="callback", callback=cb_dnscccheck,
- help="run cc specific bulk DNS tests in random.shuffle() order"
- )
-
- # --cc [country code]
- def cb_cc(option, opt, value, optparser):
- # XXX: We should check this against a list of supported county codes
- # and then return the matching value from the list into self.cc
- self.cc = str(value)
- optparser.add_option(
- "--cc", type="string",
- action="callback", callback=cb_cc,
- help="set a specific county code -- default is None",
- )
-
- # --list [url/hostname/ip list in file]
- def cb_list(option, opt, value, optparser):
- self.listfile = os.path.expanduser(value)
- if not os.path.isfile(self.listfile):
- print "Wrong file '" + value + "' in --list."
- sys.exit(1)
- optparser.add_option(
- "--list", type="string",
- action="callback", callback=cb_list,
- help="file to read from -- default is None",
- )
-
- # --url [url/hostname/ip]
- def cb_host(option, opt, value, optparser):
- self.hostname = str(value)
- optparser.add_option(
- "--url", type="string",
- action="callback", callback=cb_host,
- help="set URL/hostname/IP for use in tests -- default is None",
- )
-
- # --controlproxy [scheme://host:port]
- def cb_controlproxy(option, opt, value, optparser):
- self.controlproxy = str(value)
- optparser.add_option(
- "--controlproxy", type="string",
- action="callback", callback=cb_controlproxy,
- help="proxy to be used as a control -- default is None",
- )
-
- # --experimentproxy [scheme://host:port]
- def cb_experimentproxy(option, opt, value, optparser):
- self.experimentproxy = str(value)
- optparser.add_option(
- "--experimentproxy", type="string",
- action="callback", callback=cb_experimentproxy,
- help="proxy to be used for experiments -- default is None",
- )
-
-
-
- # --randomize
- def cb_randomize(option, opt, value, optparser):
- self.randomize = bool(int(value))
- optparser.add_option(
- "--randomize", type="choice",
- choices=['0', '1'], metavar="0|1",
- action="callback", callback=cb_randomize,
- help="randomize host order -- default is on",
- )
-
- # XXX TODO:
- # pause/resume scans for dns_BULK_DNS_Tests()
- # setting of control/experiment resolver
- # setting of control/experiment proxy
- #
-
- def cb_version(option, opt, value, oparser):
- self.action = 'version'
- optparser.add_option(
- "-v", "--version",
- action="callback", callback=cb_version,
- help="print ooni-probe version"
- )
-
- # parse options
- (opts, args) = optparser.parse_args()
-
- # validate options
- try:
- if (args):
- raise optparse.OptionError('extra arguments found', args)
- if (not self.action):
- raise optparse.OptionError(
- 'RTFS', 'required arguments missing'
- )
-
- except optparse.OptionError, err:
- sys.stderr.write(str(err) + '\n\n')
- optparser.print_help()
- sys.exit(1)
-
- def version(self):
- print """
-ooni-probe pre-alpha
-Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
-See: https://www.torproject.org/ooni/
-
-"""
-
- def run(self):
- getattr(self, self.action)()
-
- def plugin(self):
- plugin_run = ooni.plugooni.Plugooni
- plugin_run(self).run(self)
-
- def listplugins(self):
- plugin_run = ooni.plugooni.Plugooni
- plugin_run(self).list_plugoons()
-
- def captiveportal(self):
- captive_portal = ooni.captive_portal.CaptivePortal
- captive_portal(self).main()
-
- def transhttp(self):
- transparent_http = ooni.transparenthttp.TransparentHTTPProxy
- transparent_http(self).main()
-
- def dns(self):
- dnstests = ooni.namecheck.DNS
- dnstests(self).main()
-
- def dnsbulk(self):
- dnstests = ooni.dns_poisoning.DNSBulk
- dnstests(self).main()
-
- def dnscccheck(self):
- dnstests = ooni.dns_cc_check.DNSBulk
- dnstests(self).main()
-
diff --git a/old-to-be-ported-code/ooni/dns_poisoning.py b/old-to-be-ported-code/ooni/dns_poisoning.py
deleted file mode 100644
index 939391e..0000000
--- a/old-to-be-ported-code/ooni/dns_poisoning.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-class DNSBulk():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.randomize = args.randomize
- self.debug = False
-
- def DNS_Tests(self):
- print "DNS tampering detection for list of domains:"
- filter_name = "_DNS_BULK_Tests"
- tests = [ooni.dnsooni]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/ooni/dnsooni.py b/old-to-be-ported-code/ooni/dnsooni.py
deleted file mode 100644
index bfdfe51..0000000
--- a/old-to-be-ported-code/ooni/dnsooni.py
+++ /dev/null
@@ -1,356 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-
-from socket import gethostbyname
-import ooni.common
-
-# requires python-dns
-# (pydns.sourceforge.net)
-try:
- import DNS
-# Mac OS X needs this
-except:
- try:
- import dns as DNS
- except:
- pass # Never mind, let's break later.
-import random
-from pprint import pprint
-
-""" Wrap gethostbyname """
-def dns_resolve(hostname):
- try:
- resolved_host = gethostbyname(hostname)
- return resolved_host
- except:
- return False
-
-"""Perform a resolution on test_hostname and compare it with the expected
- control_resolved ip address. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True; otherwise
- it returns False.
-"""
-def dns_resolve_match(experiment_hostname, control_resolved,
- label="generic DNS comparison"):
- experiment_resolved = dns_resolve(experiment_hostname)
- if experiment_resolved == False:
- return None
- if experiment_resolved:
- if str(experiment_resolved) != str(control_resolved):
- print label + " control " + str(control_resolved) + " data does not " \
- "match experiment response: " + str(experiment_resolved)
- return False
- return True
-
-def generic_DNS_resolve(experiment_hostname, experiment_resolver):
- if experiment_resolver == None:
- req = DNS.Request(name=experiment_hostname) # local resolver
- else:
- req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
- resolved_data = req.req().answers
- return resolved_data
-
-""" Return a list of all known censors. """
-def load_list_of_known_censors(known_proxy_file=None):
- proxyfile = "proxy-lists/ips.txt"
- known_proxy_file = open(proxyfile, 'r', 1)
- known_proxy_list = []
- for known_proxy in known_proxy_file.readlines():
- known_proxy_list.append(known_proxy)
- known_proxy_file.close()
- known_proxy_count = len(known_proxy_list)
- print "Loading " + str(known_proxy_count) + " known proxies..."
- return known_proxy_list, known_proxy_count
-
-def load_list_of_test_hosts(hostfile=None):
- if hostfile == None:
- hostfile="censorship-lists/norwegian-dns-blacklist.txt"
- host_list_file = open(hostfile, 'r', 1)
- host_list = []
- for host_name in host_list_file.readlines():
- if host_name.isspace():
- continue
- else:
- host_list.append(host_name)
- host_list_file.close()
- host_count = len(host_list)
- #print "Loading " + str(host_count) + " test host names..."
- return host_list, host_count
-
-""" Return True with a list of censors if we find a known censor from
- known_proxy_list in the experiment_data DNS response. Otherwise return
- False and None. """
-def contains_known_censors(known_proxy_list, experiment_data):
- match = False
- proxy_list = []
- for answer in range(len(experiment_data)):
- for known_proxy in known_proxy_list:
- if answer == known_proxy:
- print "CONFLICT: known proxy discovered: " + str(known_proxy),
- proxy_list.append(known_proxy)
- match = True
- return match, proxy_list
-
-""" Return True and the experiment response that failed to match."""
-def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
- known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
- conflict_list = []
- conflict = False
- if known_proxy_found:
- print "known proxy discovered: " + str(known_proxies)
- for answer in range(len(control_data)):
- if control_data[answer]['data'] == experiment_data:
- print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
- continue
- else:
- conflict = True
- conflict_list.append(experiment_data)
- #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
- return conflict, conflict_list
-
-def dns_DNS_BULK_Tests(self, hostfile=None,
- known_good_resolver="8.8.8.8", test_resolver=None):
- tampering = False # By default we'll pretend the internet is nice
- tampering_list = []
- host_list, host_count = load_list_of_test_hosts()
- known_proxies, proxy_count = load_list_of_known_censors()
- check_count = 1
- if test_resolver == None:
- DNS.ParseResolvConf() # Set the local resolver as our default
- if self.randomize:
- random.shuffle(host_list) # This makes our list non-sequential for now
- for host_name in host_list:
- host_name = host_name.strip()
- print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
- print "Resolving with control resolver..."
- print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
- print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
- # XXX TODO - we need to keep track of the status of these requests and then resume them
- while True:
- try:
- control_data = generic_DNS_resolve(host_name, known_good_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "control resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Resolving with experiment resolver..."
- while True:
- try:
- experiment_data = generic_DNS_resolve(host_name, test_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "experiment resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Comparing control and experiment...",
- tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
- if tampering:
- tampering_list.append(conflicts)
- print "Conflicts with " + str(host_name) + " : " + str(conflicts)
- check_count = check_count + 1
- host_list.close()
- return tampering
-
-""" Attempt to resolve random_hostname and return True and None if empty. If an
- address is returned we return False and the returned address.
-"""
-def dns_response_empty(random_hostname):
- response = dns_resolve(random_hostname)
- if response == False:
- return True, None
- return False, response
-
-def dns_multi_response_empty(count, size):
- for i in range(count):
- randName = ooni.common._randstring(size)
- response_empty, response_ip = dns_response_empty(randName)
- if response_empty == True and response_ip == None:
- responses_are_empty = True
- else:
- print label + " " + randName + " found with value " + str(response_ip)
- responses_are_empty = False
- return responses_are_empty
-
-""" Attempt to resolve one random host name per tld in tld_list where the
- hostnames are random strings with a length between min_length and
- max_length. Return True if list is empty, otherwise return False."""
-def dns_list_empty(tld_list, min_length, max_length,
- label="generic DNS list test"):
- for tld in tld_list:
- randName = ooni.common._randstring(min_length, max_length) + tld
- response_empty, response_ip = dns_response_empty(randName)
- return response_empty
-
-# Known bad test
-# Test for their DNS breakage and their HTTP MITM
-# "Family Shield" is 208.67.222.123 and 208.67.220.123
-# returns 67.215.65.130 for filtered sites like kink.com
-# block.opendns.com is a block page where users are redirected
-# 208.67.216.135 208.67.217.135 are the block pages currently point
-# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
-# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
-# whatever the HOST header says
-# Amusingly - their Server header is: "OpenDNS Guide"
-""" Return True if we are not being directed as known OpenDNS block pages."""
-def OpenDNS_DNS_Tests(self):
- return OpenDNS_Censorship_DNS_TESTS(self)
- return OpenDNS_NXDomain_DNS_TESTS(self)
-
-def OpenDNS_Censorship_DNS_TESTS(self):
- known_filter = "67.215.65.130"
- randName = ooni.common._randstring(10)
- redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
- if redirected:
- return False
- else:
- return True
-
-def OpenDNS_NXDomain_DNS_TESTS(self):
- known_filter = "67.215.65.132"
- randName = ooni.common._randstring(10)
- redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
- if redirected:
- return False
- else:
- return True
-
-"""Returns True if the experiment_url returns the well known Italian block page."""
-def cc_DNS_Tests_it(self):
- tampering = False # By default we'll pretend the internet is nice
- tampering_list = []
- conflicts = []
- known_good_resolver = "8.8.8.8"
- host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
- known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
- known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
-
- check_count = 1
- DNS.ParseResolvConf()
- # Set the local resolver as our default
- if self.randomize:
- random.shuffle(host_list) # This makes our list non-sequential for now
- print "We're testing (" + str(host_count) + ") URLs"
- print "We're looking for (" + str(known_block_count) + ") block pages"
- print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
- for test_resolver in known_censoring_resolvers:
- test_resolver = test_resolver.strip()
- for host_name in host_list:
- host_name = host_name.strip()
- print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
- print "Testing " + host_name + " with control resolver: " + known_good_resolver
- print "Testing " + host_name + " with experiment resolver: " + test_resolver
- while True:
- try:
- control_data = generic_DNS_resolve(host_name, known_good_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "control resolver appears to be failing..."
- break
- except:
- print "Timeout; looping!"
- continue
-
- while True:
- try:
- experiment_data = generic_DNS_resolve(host_name, test_resolver)
- break
- except KeyboardInterrupt:
- print "bailing out..."
- exit()
- except DNS.Base.DNSError:
- print "experiment resolver appears to be failing..."
- continue
- except:
- print "Timeout; looping!"
- continue
-
- print "Comparing control and experiment...",
- tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
- if tampering:
- tampering_list.append(conflicts)
- print "Conflicts with " + str(host_name) + " : " + str(conflicts)
- check_count = check_count + 1
-
- host_list.close()
- return tampering
-
-
-## XXX TODO
-## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
-
-""" Returns True if the experiment_host returns a well known Australian filter
- IP address."""
-def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
- # http://www.robtex.com/ip/61.88.88.88.html
- # http://requests.optus.net.au/dns/
- known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
- known_censoring_resolvers = ["61.88.88.88"] # Optus
- for resolver in known_censoring_resolvers:
- blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
- if blocked:
- return True
-
-"""Returns True if experiment_hostname as resolved by experiment_resolver
- resolves to control_data. Returns False if there is no match or None if the
- attempt fails."""
-def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
- control_data):
- req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
- resolved_data = s.req().answers
- for answer in range(len(resolved_data)):
- if resolved_data[answer]['data'] == control_data:
- return True
- return False
-
-# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
-# details
-""" Return True if Tor would consider the network fine; False if it's hostile
- and has no signs of DNS tampering. """
-def Tor_DNS_Tests(self):
- response_rfc2606_empty = RFC2606_DNS_Tests(self)
- tor_tld_list = ["", ".com", ".org", ".net"]
- response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
- return response_tor_empty | response_rfc2606_empty
-
-""" Return True if RFC2606 would consider the network hostile; False if it's all
- clear and has no signs of DNS tampering. """
-def RFC2606_DNS_Tests(self):
- tld_list = [".invalid", ".test"]
- return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
-
-""" Return True if googleChromeDNSTest would consider the network OK."""
-def googleChrome_CP_Tests(self):
- maxGoogleDNSTests = 3
- GoogleDNSTestSize = 10
- return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
- GoogleDNSTestSize)
-def googleChrome_DNS_Tests(self):
- return googleChrome_CP_Tests(self)
-
-""" Return True if MSDNSTest would consider the network OK."""
-def MSDNS_CP_Tests(self):
- experimentHostname = "dns.msftncsi.com"
- expectedResponse = "131.107.255.255"
- return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
-
-def MSDNS_DNS_Tests(self):
- return MSDNS_CP_Tests(self)
diff --git a/old-to-be-ported-code/ooni/helpers.py b/old-to-be-ported-code/ooni/helpers.py
deleted file mode 100644
index 514e65f..0000000
--- a/old-to-be-ported-code/ooni/helpers.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-
-import ooni.common
-import pycurl
-import random
-import zipfile
-import os
-from xml.dom import minidom
-try:
- from BeautifulSoup import BeautifulSoup
-except:
- pass # Never mind, let's break later.
-
-def get_random_url(self):
- filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
- fp = zipfile.ZipFile(filepath, "r")
- fp.open("top-1m.csv")
- content = fp.read("top-1m.csv")
- return "http://" + random.choice(content.split("\n")).split(",")[1]
-
-"""Pick a random header and use that for the request"""
-def get_random_headers(self):
- filepath = os.getcwd() + "/test-lists/whatheaders.xml"
- headers = []
- content = open(filepath, "r").read()
- soup = BeautifulSoup(content)
- measurements = soup.findAll('measurement')
- i = random.randint(0,len(measurements))
- for vals in measurements[i].findAll('header'):
- name = vals.find('name').string
- value = vals.find('value').string
- if name != "host":
- headers.append((name, value))
- return headers
diff --git a/old-to-be-ported-code/ooni/http.py b/old-to-be-ported-code/ooni/http.py
deleted file mode 100644
index 61abad4..0000000
--- a/old-to-be-ported-code/ooni/http.py
+++ /dev/null
@@ -1,306 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-#
-
-from socket import gethostbyname
-import ooni.common
-import ooni.helpers
-import ooni.report
-import urllib2
-import httplib
-from urlparse import urlparse
-from pprint import pprint
-import pycurl
-import random
-import string
-import re
-from pprint import pprint
-try:
- from BeautifulSoup import BeautifulSoup
-except:
- pass # Never mind, let's break later.
-
-# By default, we'll be Torbutton's UA
-default_ua = { 'User-Agent' :
- 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
-
-# Use pycurl to connect over a proxy
-PROXYTYPE_SOCKS5 = 5
-default_proxy_type = PROXYTYPE_SOCKS5
-default_proxy_host = "127.0.0.1"
-default_proxy_port = "9050"
-
-#class HTTPResponse(object):
-# def __init__(self):
-
-
-"""A very basic HTTP fetcher that uses Tor by default and returns a curl
- object."""
-def http_proxy_fetch(url, headers, proxy_type=5,
- proxy_host="127.0.0.1",
- proxy_port=9050):
- request = pycurl.Curl()
- request.setopt(pycurl.PROXY, proxy_host)
- request.setopt(pycurl.PROXYPORT, proxy_port)
- request.setopt(pycurl.PROXYTYPE, proxy_type)
- request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
- request.setopt(pycurl.URL, url)
- response = request.perform()
- http_code = getinfo(pycurl.HTTP_CODE)
- return response, http_code
-
-"""A very basic HTTP fetcher that returns a urllib2 response object."""
-def http_fetch(url,
- headers= default_ua,
- label="generic HTTP fetch"):
- request = urllib2.Request(url, None, headers)
- response = urllib2.urlopen(request)
- return response
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
- control_result. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True with the http
- status code; otherwise it returns False.
-"""
-def http_content_match(experimental_url, control_result,
- headers= { 'User-Agent' : default_ua },
- label="generic HTTP content comparison"):
- request = urllib2.Request(experimental_url, None, headers)
- response = urllib2.urlopen(request)
- responseContents = response.read()
- responseCode = response.code
- if responseContents != False:
- if str(responseContents) != str(control_result):
- print label + " control " + str(control_result) + " data does not " \
- "match experiment response: " + str(responseContents)
- return False, responseCode
- return True, responseCode
- else:
- print "HTTP connection appears to have failed"
- return False, False
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
- control_result as a regex. Optionally, a label may be set to customize
- output. If the experiment matches the control, this returns True with the HTTP
- status code; otherwise it returns False.
-"""
-def http_content_fuzzy_match(experimental_url, control_result,
- headers= { 'User-Agent' : default_ua },
- label="generic HTTP content comparison"):
- request = urllib2.Request(experimental_url, None, headers)
- response = urllib2.urlopen(request)
- responseContents = response.read()
- responseCode = response.code
- pattern = re.compile(control_result)
- match = pattern.search(responseContents)
- if responseContents != False:
- if not match:
- print label + " control " + str(control_result) + " data does not " \
- "match experiment response: " + str(responseContents)
- return False, responseCode
- return True, responseCode
- else:
- print "HTTP connection appears to have failed"
- return False, False
-
-"""Compare two HTTP status codes as integers and return True if they match."""
-def http_status_code_match(experiment_code, control_code):
- if int(experiment_code) != int(control_code):
- return False
- return True
-
-"""Compare two HTTP status codes as integers and return True if they don't match."""
-def http_status_code_no_match(experiment_code, control_code):
- if http_status_code_match(experiment_code, control_code):
- return False
- return True
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it matches, False if it does not."""
-def http_header_match(experiment_url, control_header, control_result):
- response = http_fetch(url, label=label)
- remote_header = response.get_header(control_header)
- if str(remote_header) == str(control_result):
- return True
- else:
- return False
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it does not matche, False if it does."""
-def http_header_no_match(experiment_url, control_header, control_result):
- match = http_header_match(experiment_url, control_header, control_result)
- if match:
- return False
- else:
- return True
-
-def send_browser_headers(self, browser, conn):
- headers = ooni.helpers.get_random_headers(self)
- for h in headers:
- conn.putheader(h[0], h[1])
- conn.endheaders()
- return True
-
-def http_request(self, method, url, path=None):
- purl = urlparse(url)
- host = purl.netloc
- conn = httplib.HTTPConnection(host, 80)
- conn.connect()
- if path is None:
- path = purl.path
- conn.putrequest(method, purl.path)
- send_browser_headers(self, None, conn)
- response = conn.getresponse()
- headers = dict(response.getheaders())
- self.headers = headers
- self.data = response.read()
- return True
-
-def search_headers(self, s_headers, url):
- if http_request(self, "GET", url):
- headers = self.headers
- else:
- return None
- result = {}
- for h in s_headers.items():
- result[h[0]] = h[0] in headers
- return result
-
-# XXX for testing
-# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
-
-"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
-def search_squid_headers(self):
- test_name = "squid header"
- self.logger.info("RUNNING %s test" % test_name)
- url = ooni.helpers.get_random_url(self)
- s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
- ret = search_headers(self, s_headers, url)
- for i in ret.items():
- if i[1] is True:
- self.logger.info("the %s test returned False" % test_name)
- return False
- self.logger.info("the %s test returned True" % test_name)
- return True
-
-def random_bad_request(self):
- url = ooni.helpers.get_random_url(self)
- r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
- if http_request(self, r_str, url):
- return True
- else:
- return None
-
-"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
-def squid_search_bad_request(self):
- test_name = "squid bad request"
- self.logger.info("RUNNING %s test" % test_name)
- if random_bad_request(self):
- s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
- for i in s_headers.items():
- if i[0] in self.headers:
- self.logger.info("the %s test returned False" % test_name)
- return False
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- self.logger.warning("the %s test returned failed" % test_name)
- return None
-
-"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
-def squid_cacheobject_request(self):
- url = ooni.helpers.get_random_url(self)
- test_name = "squid cacheobject"
- self.logger.info("RUNNING %s test" % test_name)
- if http_request(self, "GET", url, "cache_object://localhost/info"):
- soup = BeautifulSoup(self.data)
- if soup.find('strong') and soup.find('strong').string == "Access Denied.":
- self.logger.info("the %s test returned False" % test_name)
- return False
- else:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- self.logger.warning("the %s test failed" % test_name)
- return None
-
-
-def MSHTTP_CP_Tests(self):
- test_name = "MS HTTP Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- experiment_url = "http://www.msftncsi.com/ncsi.txt"
- expectedResponse = "Microsoft NCSI" # Only this - nothing more
- expectedResponseCode = "200" # Must be this - nothing else
- label = "MS HTTP"
- headers = { 'User-Agent' : 'Microsoft NCSI' }
- content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
- headers, label)
- status_match = http_status_code_match(expectedResponseCode,
- experiment_code)
- if status_match and content_match:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-def AppleHTTP_CP_Tests(self):
- test_name = "Apple HTTP Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- experiment_url = "http://www.apple.com/library/test/success.html"
- expectedResponse = "Success" # There is HTML that contains this string
- expectedResponseCode = "200"
- label = "Apple HTTP"
- headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
- 'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
- ' Mobile/1A543a Safari/419.3' }
- content_match, experiment_code = http_content_fuzzy_match(
- experiment_url, expectedResponse, headers)
- status_match = http_status_code_match(expectedResponseCode,
- experiment_code)
- if status_match and content_match:
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- print label + "content match:" + str(content_match) + " status match:" + str(status_match)
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-def WC3_CP_Tests(self):
- test_name = "W3 Captive Portal"
- self.logger.info("RUNNING %s test" % test_name)
- url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
- draftResponseCode = "428"
- label = "WC3 draft-nottingham-http-portal"
- response = http_fetch(url, label=label)
- responseCode = response.code
- if http_status_code_no_match(responseCode, draftResponseCode):
- self.logger.info("the %s test returned True" % test_name)
- return True
- else:
- print label + " experiment would conclude that the network is filtered."
- print label + " status match:" + status_match
- self.logger.info("the %s test returned False" % test_name)
- return False
-
-# Google ChromeOS fetches this url in guest mode
-# and they expect the user to authenticate
-def googleChromeOSHTTPTest(self):
- print "noop"
- #url = "http://www.google.com/"
-
-def SquidHeader_TransparentHTTP_Tests(self):
- return search_squid_headers(self)
-
-def SquidBadRequest_TransparentHTTP_Tests(self):
- return squid_search_bad_request(self)
-
-def SquidCacheobject_TransparentHTTP_Tests(self):
- return squid_cacheobject_request(self)
-
-
diff --git a/old-to-be-ported-code/ooni/input.py b/old-to-be-ported-code/ooni/input.py
deleted file mode 100644
index c32ab48..0000000
--- a/old-to-be-ported-code/ooni/input.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-
-class file:
- def __init__(self, name=None):
- if name:
- self.name = name
-
- def simple(self, name=None):
- """ Simple file parsing method:
- Read a file line by line and output an array with all it's lines, without newlines
- """
- if name:
- self.name = name
- output = []
- try:
- f = open(self.name, "r")
- for line in f.readlines():
- output.append(line.strip())
- return output
- except:
- return output
-
- def csv(self, name=None):
- if name:
- self.name = name
-
- def yaml(self, name):
- if name:
- self.name = name
-
- def consensus(self, name):
- if name:
- self.name = name
diff --git a/old-to-be-ported-code/ooni/namecheck.py b/old-to-be-ported-code/ooni/namecheck.py
deleted file mode 100644
index 1a2a3f0..0000000
--- a/old-to-be-ported-code/ooni/namecheck.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs multiple DNS tests.
-
-import sys
-import ooni.dnsooni
-
-class DNS():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.randomize = args.randomize
-
- def DNS_Tests(self):
- print "DNS tampering detection:"
- filter_name = "_DNS_Tests"
- tests = [ooni.dnsooni]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
-
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/ooni/plugins/__init__.py b/old-to-be-ported-code/ooni/plugins/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/old-to-be-ported-code/ooni/plugins/dnstest_plgoo.py b/old-to-be-ported-code/ooni/plugins/dnstest_plgoo.py
deleted file mode 100644
index 04782d4..0000000
--- a/old-to-be-ported-code/ooni/plugins/dnstest_plgoo.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from pprint import pprint
-from twisted.internet import reactor, endpoints
-from twisted.names import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class DNSTestPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ""
- self.control_response = []
-
- def response_split(self, response):
- a = []
- b = []
- for i in response:
- a.append(i[0])
- b.append(i[1])
-
- return a,b
-
- def cb(self, type, hostname, dns_server, value):
- if self.control_response is None:
- self.control_response = []
- if type == 'control' and self.control_response != value:
- print "%s %s" % (dns_server, value)
- self.control_response.append((dns_server,value))
- pprint(self.control_response)
- if type == 'experiment':
- pprint(self.control_response)
- _, res = self.response_split(self.control_response)
- if value not in res:
- print "res (%s) : " % value
- pprint(res)
- print "---"
- print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
-
- else:
- print "%s appears to be clean on %s" % (hostname, dns_server)
- self.r2.servers = [('212.245.158.66',53)]
- print "HN: %s %s" % (hostname, value)
-
- def err(self, pck, error):
- pprint(pck)
- error.printTraceback()
- reactor.stop()
- print "error!"
- pass
-
- def ooni_main(self, args):
- self.experimentalproxy = ''
- self.test_hostnames = ['dio.it']
- self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
- self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
-
- self.control_res = []
- self.control_response = None
-
- self.r1 = client.Resolver(None, [self.control_dns.pop()])
- self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
-
- for hostname in self.test_hostnames:
- for dns_server in self.control_dns:
- self.r1.servers = [dns_server]
- f = self.r1.getHostByName(hostname)
- pck = (hostname, dns_server)
- f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
- for dns_server in self.experiment_dns:
- self.r2.servers = [dns_server]
- pck = (hostname, dns_server)
- f = self.r2.getHostByName(hostname)
- f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
- reactor.run()
-
diff --git a/old-to-be-ported-code/ooni/plugins/http_plgoo.py b/old-to-be-ported-code/ooni/plugins/http_plgoo.py
deleted file mode 100644
index 021e863..0000000
--- a/old-to-be-ported-code/ooni/plugins/http_plgoo.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ''
-
- def cb(self, type, content):
- print "got %d bytes from %s" % (len(content), type) # DEBUG
- if not self.buf:
- self.buf = content
- else:
- if self.buf == content:
- print "SUCCESS"
- else:
- print "FAIL"
- reactor.stop()
-
- def endpoint(self, scheme, host, port):
- ep = None
- if scheme == 'http':
- ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
- elif scheme == 'https':
- ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
- return ep
-
- def ooni_main(self):
- # We don't have the Command object so cheating for now.
- url = 'http://check.torproject.org/'
- self.controlproxy = 'socks4a://127.0.0.1:9050'
- self.experimentalproxy = ''
-
- if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
- return None
- scheme, host, port, path = client._parse(url)
-
- ctrl_dest = self.endpoint(scheme, host, port)
- if not ctrl_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- if self.controlproxy:
- _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
- control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
- else:
- control = ctrl_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('control', x))
- control.connect(f)
-
- exp_dest = self.endpoint(scheme, host, port)
- if not exp_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- # FIXME: use the experiment proxy if there is one
- experiment = exp_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('experiment', x))
- experiment.connect(f)
-
- reactor.run()
-
diff --git a/old-to-be-ported-code/ooni/plugins/marco_plgoo.py b/old-to-be-ported-code/ooni/plugins/marco_plgoo.py
deleted file mode 100644
index cb63df7..0000000
--- a/old-to-be-ported-code/ooni/plugins/marco_plgoo.py
+++ /dev/null
@@ -1,377 +0,0 @@
-#!/usr/bin/python
-# Copyright 2009 The Tor Project, Inc.
-# License at end of file.
-#
-# This tests connections to a list of Tor nodes in a given Tor consensus file
-# while also recording the certificates - it's not a perfect tool but complete
-# or even partial failure should raise alarms.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
- from ooni.plugooni import Plugoo
-except:
- print "Error importing Plugoo"
-
-try:
- from ooni.common import Storage
-except:
- print "Error importing Storage"
-
-try:
- from ooni import output
-except:
- print "Error importing output"
-
-try:
- from ooni import input
-except:
- print "Error importing output"
-
-
-
-ssl = OpenSSL = None
-
-try:
- import ssl
-except ImportError:
- pass
-
-if ssl is None:
- try:
- import OpenSSL.SSL
- import OpenSSL.crypto
- except ImportError:
- pass
-
-if ssl is None and OpenSSL is None:
- if socket.ssl:
- print """Your Python is too old to have the ssl module, and you haven't
-installed pyOpenSSL. I'll try to work with what you've got, but I can't
-record certificates so well."""
- else:
- print """Your Python has no OpenSSL support. Upgrade to 2.6, install
-pyOpenSSL, or both."""
- sys.exit(1)
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-HANDSHAKING = "nohandshake"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-def clean_pem_cert(cert):
- idx = cert.find('-----END')
- if idx > 1 and cert[idx-1] != '\n':
- cert = cert.replace('-----END','\n-----END')
- return cert
-
-def record((addr,port), state, extra=None, cert=None):
- LOCK.acquire()
- try:
- OUT.append({'addr' : addr,
- 'port' : port,
- 'state' : state,
- 'extra' : extra})
- if cert:
- CERT_OUT.append({'addr' : addr,
- 'port' : port,
- 'clean_cert' : clean_pem_cert(cert)})
- finally:
- LOCK.release()
-
-def probe(address,theCtx=None):
- sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- logging.info("Opening socket to %s",address)
- try:
- s.connect(address)
- except IOError, e:
- logging.info("Error %s from socket connect.",e)
- record(address, CONNECTING, e)
- s.close()
- return
- logging.info("Socket to %s open. Launching SSL handshake.",address)
- if ssl:
- try:
- s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
- # "MARCO!"
- s.do_handshake()
- except IOError, e:
- logging.info("Error %s from ssl handshake",e)
- record(address, HANDSHAKING, e)
- s.close()
- sock.close()
- return
- cert = s.getpeercert(True)
- if cert != None:
- cert = ssl.DER_cert_to_PEM_cert(cert)
- elif OpenSSL:
- try:
- s = OpenSSL.SSL.Connection(theCtx, s)
- s.set_connect_state()
- s.setblocking(True)
- s.do_handshake()
- cert = s.get_peer_certificate()
- if cert != None:
- cert = OpenSSL.crypto.dump_certificate(
- OpenSSL.crypto.FILETYPE_PEM, cert)
- except IOError, e:
- logging.info("Error %s from OpenSSL handshake",e)
- record(address, HANDSHAKING, e)
- s.close()
- sock.close()
- return
- else:
- try:
- s = socket.ssl(s)
- s.write('a')
- cert = s.server()
- except IOError, e:
- logging.info("Error %s from socket.ssl handshake",e)
- record(address, HANDSHAKING, e)
- sock.close()
- return
-
- logging.info("SSL handshake with %s finished",address)
- # "POLO!"
- record(address,OK, cert=cert)
- if (ssl or OpenSSL):
- s.close()
- sock.close()
-
-def parseNetworkstatus(ns):
- for line in ns:
- if line.startswith('r '):
- r = line.split()
- yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
- for line in cd:
- if line.startswith('router '):
- r = line.split()
- yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
- done = False
- logging.info("Launching thread.")
-
- if OpenSSL is not None:
- context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
- else:
- context = None
-
- while True:
- LOCK.acquire()
- try:
- if addrList:
- print "Starting test %d/%d"%(
- 1+origLength-len(addrList),origLength)
- addr = addrList.pop()
- else:
- return
- finally:
- LOCK.release()
-
- try:
- logging.info("Launching probe for %s",addr)
- probe(addr, context)
- except Exception, e:
- logging.info("Unexpected error from %s",addr)
- record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
- ts = []
- origLen = len(addrList)
- for num in xrange(nThreads):
- t = threading.Thread(target=worker, args=(addrList,origLen))
- t.setName("Th#%s"%num)
- ts.append(t)
- t.start()
- for t in ts:
- logging.info("Joining thread %s",t.getName())
- t.join()
-
-def main(self, args):
- # BEGIN
- # This logic should be present in more or less all plugoos
- global OUT
- global CERT_OUT
- global OUT_DATA
- global CERT_OUT_DATA
- OUT_DATA = []
- CERT_OUT_DATA = []
-
- try:
- OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
- except:
- print "No output file given. quitting..."
- return -1
-
- try:
- CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
- except:
- print "No output cert file given. quitting..."
- return -1
-
- logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
- datefmt="%b %d %H:%M:%S",
- level=logging.INFO,
- filename=args.log)
- logging.info("============== STARTING NEW LOG")
- # END
-
- if ssl is not None:
- methodName = "ssl"
- elif OpenSSL is not None:
- methodName = "OpenSSL"
- else:
- methodName = "socket"
- logging.info("Running marco with method '%s'", methodName)
-
- addresses = []
-
- if args.input.ips:
- for fn in input.file(args.input.ips).simple():
- a, b = fn.split(":")
- addresses.append( (a,int(b)) )
-
- elif args.input.consensus:
- for fn in args:
- print fn
- for a,b in parseNetworkstatus(open(args.input.consensus)):
- addresses.append( (a,b) )
-
- if args.input.randomize:
- # Take a random permutation of the set the knuth way!
- for i in range(0, len(addresses)):
- j = random.randint(0, i)
- addresses[i], addresses[j] = addresses[j], addresses[i]
-
- if len(addresses) == 0:
- logging.error("No input source given, quiting...")
- return -1
-
- addresses = list(addresses)
-
- if not args.input.randomize:
- addresses.sort()
-
- runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
- def __init__(self):
- self.name = ""
-
- self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
- "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
-
- self.input = Storage()
- self.input.ip = None
- try:
- c_file = os.path.expanduser("~/.tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- try:
- c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- if not self.input.consensus:
- print "Error importing consensus file"
- sys.exit(1)
-
- self.output = Storage()
- self.output.main = 'reports/marco-1.yamlooni'
- self.output.certificates = 'reports/marco_certs-1.out'
-
- # XXX This needs to be moved to a proper function
- # refactor, refactor and ... refactor!
- if os.path.exists(self.output.main):
- basedir = "/".join(self.output.main.split("/")[:-1])
- fn = self.output.main.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- if os.path.exists(self.output.certificates):
- basedir = "/".join(self.output.certificates.split("/")[:-1])
- fn = self.output.certificates.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- # We require for Tor to already be running or have recently run
- self.args = Storage()
- self.args.input = self.input
- self.args.output = self.output
- self.args.log = 'reports/marco.log'
-
- def ooni_main(self, cmd):
- self.args.input.randomize = cmd.randomize
- self.args.input.ips = cmd.listfile
- main(self, self.args)
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >> sys.stderr, ("This script takes one or more networkstatus "
- "files as an argument.")
- self = None
- main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-# * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/ooni/plugins/proxy_plgoo.py b/old-to-be-ported-code/ooni/plugins/proxy_plgoo.py
deleted file mode 100644
index b2b4d0f..0000000
--- a/old-to-be-ported-code/ooni/plugins/proxy_plgoo.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/python
-
-import sys
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
- def __init__(self):
- self.name = ""
- self.type = ""
- self.paranoia = ""
- self.modules_to_import = []
- self.output_dir = ""
- self.buf = ''
-
- def cb(self, type, content):
- print "got %d bytes from %s" % (len(content), type) # DEBUG
- if not self.buf:
- self.buf = content
- else:
- if self.buf == content:
- print "SUCCESS"
- else:
- print "FAIL"
- reactor.stop()
-
- def endpoint(self, scheme, host, port):
- ep = None
- if scheme == 'http':
- ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
- elif scheme == 'https':
- from twisted.internet import ssl
- ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
- ssl.ClientContextFactory())
- return ep
-
- def ooni_main(self, cmd):
- # We don't have the Command object so cheating for now.
- url = cmd.hostname
-
- # FIXME: validate that url is on the form scheme://host[:port]/path
- scheme, host, port, path = client._parse(url)
-
- ctrl_dest = self.endpoint(scheme, host, port)
- if not ctrl_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- if cmd.controlproxy:
- assert scheme != 'https', "no support for proxied https atm, sorry"
- _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
- control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
- print "proxy: ", proxy_host, proxy_port
- else:
- control = ctrl_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('control', x))
- control.connect(f)
-
- exp_dest = self.endpoint(scheme, host, port)
- if not exp_dest:
- raise Exception('unsupported scheme %s in %s' % (scheme, url))
- # FIXME: use the experiment proxy if there is one
- experiment = exp_dest
- f = client.HTTPClientFactory(url)
- f.deferred.addCallback(lambda x: self.cb('experiment', x))
- experiment.connect(f)
-
- reactor.run()
diff --git a/old-to-be-ported-code/ooni/plugins/simple_dns_plgoo.py b/old-to-be-ported-code/ooni/plugins/simple_dns_plgoo.py
deleted file mode 100644
index 87d3684..0000000
--- a/old-to-be-ported-code/ooni/plugins/simple_dns_plgoo.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-from ooni.plugooni import Plugoo
-
-class DNSBulkPlugin(Plugoo):
- def __init__(self):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.randomize = True # Pass this down properly
- self.debug = False
-
- def DNS_Tests(self):
- print "DNS tampering detection for list of domains:"
- tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
- self.run_tests(tests)
-
- def magic_main(self):
- self.run_plgoo_tests("_Tests")
-
- def ooni_main(self, args):
- self.magic_main()
-
diff --git a/old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py b/old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py
deleted file mode 100644
index 01dee81..0000000
--- a/old-to-be-ported-code/ooni/plugins/tcpcon_plgoo.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/python
-# Copyright 2011 The Tor Project, Inc.
-# License at end of file.
-#
-# This is a modified version of the marco plugoo. Given a list of #
-# IP:port addresses, this plugoo will attempt a TCP connection with each
-# host and write the results to a .yamlooni file.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
- from ooni.plugooni import Plugoo
-except:
- print "Error importing Plugoo"
-
-try:
- from ooni.common import Storage
-except:
- print "Error importing Storage"
-
-try:
- from ooni import output
-except:
- print "Error importing output"
-
-try:
- from ooni import input
-except:
- print "Error importing output"
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-# We will want to log the IP address, the port and the state
-def record((addr,port), state, extra=None):
- LOCK.acquire()
- try:
- OUT.append({'addr' : addr,
- 'port' : port,
- 'state' : state,
- 'extra' : extra})
- finally:
- LOCK.release()
-
-# For each IP address in the list, open a socket, write to the log and
-# then close the socket
-def probe(address,theCtx=None):
- sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- logging.info("Opening socket to %s",address)
- try:
- s.connect(address)
- except IOError, e:
- logging.info("Error %s from socket connect.",e)
- record(address, CONNECTING, e)
- s.close()
- return
- logging.info("Socket to %s open. Successfully launched TCP handshake.",address)
- record(address, OK)
- s.close()
-
-def parseNetworkstatus(ns):
- for line in ns:
- if line.startswith('r '):
- r = line.split()
- yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
- for line in cd:
- if line.startswith('router '):
- r = line.split()
- yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
- done = False
- context = None
-
- while True:
- LOCK.acquire()
- try:
- if addrList:
- print "Starting test %d/%d"%(
- 1+origLength-len(addrList),origLength)
- addr = addrList.pop()
- else:
- return
- finally:
- LOCK.release()
-
- try:
- logging.info("Launching probe for %s",addr)
- probe(addr, context)
- except Exception, e:
- logging.info("Unexpected error from %s",addr)
- record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
- ts = []
- origLen = len(addrList)
- for num in xrange(nThreads):
- t = threading.Thread(target=worker, args=(addrList,origLen))
- t.setName("Th#%s"%num)
- ts.append(t)
- t.start()
- for t in ts:
- t.join()
-
-def main(self, args):
- # BEGIN
- # This logic should be present in more or less all plugoos
- global OUT
- global OUT_DATA
- OUT_DATA = []
-
- try:
- OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
- except:
- print "No output file given. quitting..."
- return -1
-
- logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
- datefmt="%b %d %H:%M:%S",
- level=logging.INFO,
- filename=args.log)
- logging.info("============== STARTING NEW LOG")
- # END
-
- methodName = "socket"
- logging.info("Running tcpcon with method '%s'", methodName)
-
- addresses = []
-
- if args.input.ips:
- for fn in input.file(args.input.ips).simple():
- a, b = fn.split(":")
- addresses.append( (a,int(b)) )
-
- elif args.input.consensus:
- for fn in args:
- print fn
- for a,b in parseNetworkstatus(open(args.input.consensus)):
- addresses.append( (a,b) )
-
- if args.input.randomize:
- # Take a random permutation of the set the knuth way!
- for i in range(0, len(addresses)):
- j = random.randint(0, i)
- addresses[i], addresses[j] = addresses[j], addresses[i]
-
- if len(addresses) == 0:
- logging.error("No input source given, quiting...")
- return -1
-
- addresses = list(addresses)
-
- if not args.input.randomize:
- addresses.sort()
-
- runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
- def __init__(self):
- self.name = ""
-
- self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
- "os" ]
-
- self.input = Storage()
- self.input.ip = None
- try:
- c_file = os.path.expanduser("~/.tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- try:
- c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
- open(c_file)
- self.input.consensus = c_file
- except:
- pass
-
- if not self.input.consensus:
- print "Error importing consensus file"
- sys.exit(1)
-
- self.output = Storage()
- self.output.main = 'reports/tcpcon-1.yamlooni'
- self.output.certificates = 'reports/tcpcon_certs-1.out'
-
- # XXX This needs to be moved to a proper function
- # refactor, refactor and ... refactor!
- if os.path.exists(self.output.main):
- basedir = "/".join(self.output.main.split("/")[:-1])
- fn = self.output.main.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- if os.path.exists(self.output.certificates):
- basedir = "/".join(self.output.certificates.split("/")[:-1])
- fn = self.output.certificates.split("/")[-1].split(".")
- ext = fn[1]
- name = fn[0].split("-")[0]
- i = fn[0].split("-")[1]
- i = int(i) + 1
- self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
- # We require for Tor to already be running or have recently run
- self.args = Storage()
- self.args.input = self.input
- self.args.output = self.output
- self.args.log = 'reports/tcpcon.log'
-
- def ooni_main(self, cmd):
- self.args.input.randomize = cmd.randomize
- self.args.input.ips = cmd.listfile
- main(self, self.args)
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >> sys.stderr, ("This script takes one or more networkstatus "
- "files as an argument.")
- self = None
- main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-# * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/ooni/plugins/tor.py b/old-to-be-ported-code/ooni/plugins/tor.py
deleted file mode 100644
index 0d95d4d..0000000
--- a/old-to-be-ported-code/ooni/plugins/tor.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import re
-import os.path
-import signal
-import subprocess
-import socket
-import threading
-import time
-import logging
-
-from pytorctl import TorCtl
-
-torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
-# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
-
-class ThreadProc(threading.Thread):
- def __init__(self, cmd):
- threading.Thread.__init__(self)
- self.cmd = cmd
- self.proc = None
-
- def run(self):
- print "running"
- try:
- self.proc = subprocess.Popen(self.cmd,
- shell = False, stdout = subprocess.PIPE,
- stderr = subprocess.PIPE)
-
- except OSError:
- logging.fatal('cannot execute command')
-
-class Tor:
- def __init__(self):
- self.start()
-
- def check(self):
- conn = TorCtl.connect()
- if conn != None:
- conn.close()
- return True
-
- return False
-
-
- def start(self):
- if not os.path.exists(torrc):
- raise OSError("torrc doesn't exist (%s)" % torrc)
-
- tor_cmd = ["tor", "-f", torrc]
-
- torproc = ThreadProc(tor_cmd)
- torproc.run()
-
- bootstrap_line = re.compile("Bootstrapped 100%: ")
-
- while True:
- if torproc.proc == None:
- time.sleep(1)
- continue
-
- init_line = torproc.proc.stdout.readline().strip()
-
- if not init_line:
- torproc.proc.kill()
- return False
-
- if bootstrap_line.search(init_line):
- break
-
- return True
-
- def stop(self):
- if not self.check():
- return
-
- conn = TorCtl.connect()
- if conn != None:
- conn.send_signal("SHUTDOWN")
- conn.close()
-
-t = Tor()
diff --git a/old-to-be-ported-code/ooni/plugins/torrc b/old-to-be-ported-code/ooni/plugins/torrc
deleted file mode 100644
index b9ffc80..0000000
--- a/old-to-be-ported-code/ooni/plugins/torrc
+++ /dev/null
@@ -1,9 +0,0 @@
-SocksPort 9050
-ControlPort 9051
-VirtualAddrNetwork 10.23.47.0/10
-AutomapHostsOnResolve 1
-TransPort 9040
-TransListenAddress 127.0.0.1
-DNSPort 5353
-DNSListenAddress 127.0.0.1
-
diff --git a/old-to-be-ported-code/ooni/plugooni.py b/old-to-be-ported-code/ooni/plugooni.py
deleted file mode 100644
index 17f17b3..0000000
--- a/old-to-be-ported-code/ooni/plugooni.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-#
-# Plugooni, ooni plugin module for loading plgoo files.
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-# Arturo Filasto' <art(a)fuffa.org>
-
-import sys
-import os
-
-import imp, pkgutil, inspect
-
-class Plugoo:
- def __init__(self, name, plugin_type, paranoia, author):
- self.name = name
- self.author = author
- self.type = plugin_type
- self.paranoia = paranoia
-
- """
- Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
- Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
- """
- def get_tests_by_filter(self, filters, plugins):
- ret_functions = []
-
- for plugin in plugins:
- for function_ptr in dir(plugin):
- if function_ptr.endswith(filters):
- ret_functions.append((plugin,function_ptr))
- return ret_functions
-
- """
- Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
- 'dirty' and 'failed'.
- Run the tests and print 'clean','dirty' or 'failed' according to the test result.
- """
- def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
- for test in tests:
- filter_result = getattr(test[0], test[1])(self)
- if filter_result == True:
- print test[1] + ": " + clean
- elif filter_result == None:
- print test[1] + ": " + failed
- else:
- print test[1] + ": " + dirty
-
- """
- Find all the tests belonging to plgoo 'self' and run them.
- We know the tests when we see them because they end in 'filter'.
- """
- def run_plgoo_tests(self, filter):
- for function_ptr in dir(self):
- if function_ptr.endswith(filter):
- getattr(self, function_ptr)()
-
-PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
-RESERVED_NAMES = [ "skel_plgoo" ]
-
-class Plugooni():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.loadall = True
- self.plugin_name = args.plugin_name
- self.listfile = args.listfile
-
- self.plgoo_found = False
-
- # Print all the plugoons to stdout.
- def list_plugoons(self):
- print "Plugooni list:"
- for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
- if name not in RESERVED_NAMES:
- print "\t%s" %(name.split("_")[0])
-
- # Return name of the plgoo class of a plugin.
- # We know because it always ends with "Plugin".
- def get_plgoo_class(self,plugin):
- for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
- if memb.__name__.endswith("Plugin"):
- return memb
-
- # This function is responsible for loading and running the plugoons
- # the user wants to run.
- def run(self, command_object):
- print "Plugooni: the ooni plgoo plugin module loader"
-
- # iterate all modules
- for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
- # see if this module should be loaded
- if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
- self.plgoo_found = True # we found at least one plgoo!
-
- file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
- # load module
- plugin = imp.load_module(name, file, pathname, desc)
- # instantiate plgoo class and call its ooni_main()
- self.get_plgoo_class(plugin)().ooni_main(command_object)
-
- # if we couldn't find the plgoo; whine to the user
- if self.plgoo_found is False:
- print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
-
-if __name__ == '__main__':
- self.main()
diff --git a/old-to-be-ported-code/ooni/transparenthttp.py b/old-to-be-ported-code/ooni/transparenthttp.py
deleted file mode 100644
index 19cc5d0..0000000
--- a/old-to-be-ported-code/ooni/transparenthttp.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-#
-# Captive Portal Detection With Multi-Vendor Emulation
-# by Jacob Appelbaum <jacob(a)appelbaum.net>
-#
-# This module performs multiple tests that match specific vendor
-# mitm proxies
-
-import sys
-import ooni.http
-import ooni.report
-
-class TransparentHTTPProxy():
- def __init__(self, args):
- self.in_ = sys.stdin
- self.out = sys.stdout
- self.debug = False
- self.logger = ooni.report.Log().logger
-
- def TransparentHTTPProxy_Tests(self):
- print "Transparent HTTP Proxy:"
- filter_name = "_TransparentHTTP_Tests"
- tests = [ooni.http]
- for test in tests:
- for function_ptr in dir(test):
- if function_ptr.endswith(filter_name):
- filter_result = getattr(test, function_ptr)(self)
- if filter_result == True:
- print function_ptr + " thinks the network is clean"
- elif filter_result == None:
- print function_ptr + " failed"
- else:
- print function_ptr + " thinks the network is dirty"
-
- def main(self):
- for function_ptr in dir(self):
- if function_ptr.endswith("_Tests"):
- getattr(self, function_ptr)()
-
-if __name__ == '__main__':
- self.main()
1
0

[ooni-probe/master] * Moved the /assets directory to /lists. I find that seeing a directory named
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 7a34cb4bb9dd2d0f6b2d6deadb459501d72cecf5
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sat Nov 3 01:20:51 2012 +0000
* Moved the /assets directory to /lists. I find that seeing a directory named
"assets" does not immediately inform me that it is full of txt files with
lists of keywords, servers, hostnames and such to use as test inputs. Not
that "lists" is a final decision, but I think it's a little better. Feel
free to come up with something better! :)
---
lists/captive_portal_tests.txt.good | 4 +
lists/cctld.txt | 511 ++++++++++++++++++++++
lists/dns_servers.txt.bak | 6 +
lists/dns_servers.txt.bak2 | 1 +
lists/example_exp_list.txt | 3 +
lists/major_isp_dns_servers.txt | 796 +++++++++++++++++++++++++++++++++++
lists/short_hostname_list.txt | 7 +
lists/tld-list-cc.txt | 511 ++++++++++++++++++++++
lists/tld-list-mozilla.txt | 5 +
lists/top-1m.txt.bak2 | 11 +
10 files changed, 1855 insertions(+), 0 deletions(-)
diff --git a/lists/captive_portal_tests.txt.good b/lists/captive_portal_tests.txt.good
new file mode 100644
index 0000000..1bd016f
--- /dev/null
+++ b/lists/captive_portal_tests.txt.good
@@ -0,0 +1,4 @@
+
+http://ooni.nu, Open Observatory of Network Interference, 200
+http://www.patternsinthevoid.net/2CDB8B35pub.asc, mQINBE5qkHABEADVnasCm9w9hUff1E4iKnzcAdp4lx6XU5USmYdwKg2RQt2VFqWQ, 200
+http://www.google.com, Search the world's information, 200
diff --git a/lists/cctld.txt b/lists/cctld.txt
new file mode 100644
index 0000000..57e0cc8
--- /dev/null
+++ b/lists/cctld.txt
@@ -0,0 +1,511 @@
+.ac = Ascension Island
+
+.ad = Andorra
+
+.ae = United Arab Emirates
+
+.af = Afghanistan
+
+.ag = Antigua and Barbuda
+
+.ai = Anguilla
+
+.al = Albania
+
+.am = Armenia
+
+.an = Netherlands Antilles
+
+.ao = Angola
+
+.aq = Antarctica - no registrar
+
+.ar = Argentina
+
+.as = American Samoa
+
+.at = Austria
+
+.au = Australia
+
+.aw = Aruba - no registrar
+
+.ax = Aland Islands
+
+.az = Azerbaijan - no registrar
+
+.ba = Bosnia and Herzegovina
+
+.bb = Barbados
+
+.bd = Bangladesh - no registrar
+
+.be = Belgium
+
+.bf = Burkina Faso - no registrar
+
+.bg = Bulgaria
+
+.bh = Bahrain
+
+.bi = Burundi
+
+.bj = Benin ... (little info) DETAILS
+
+.bm = Bermuda
+
+.bn = Brunei Darussalam
+
+.bo = Bolivia
+
+.br = Brazil
+
+.bs = Bahamas
+
+.bt = Bhutan
+
+.bv = Bouvet Island - not in use
+
+.bw = Botswana - no registrar
+
+.by = Belarus
+
+.bz = Belize
+
+.ca = Canada
+
+.cc = Cocos (Keeling) Islands
+
+.cd = The Democratic Republic of the Congo
+
+.cf = Central African Republic - no registrar
+
+.cg = Republic of Congo
+
+.ch = Switzerland
+
+.ci = Cote d'Ivoire
+
+.ck = Cook Islands
+
+.cl = Chile
+
+.cm = Cameroon - no registrar - wildcarded
+
+.cn = China
+
+.co = Colombia
+
+.cr = Costa Rica
+
+.cs = (former) Serbia and Montenegro - no registrar - see: .me
+(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
+
+.cu = Cuba - no registrar
+
+.cv = Cape Verde - no registrar
+
+.cx = Christmas Island
+
+.cy = Cyprus
+
+.cz = Czech Republic
+
+.dd = East Germany (obsolete)
+
+.de = Germany
+
+.dj = Djibouti - no information
+
+.dk = Denmark
+
+.dm = Dominica
+
+.do = Dominican Republic
+
+.dz = Algeria - no registrar
+
+.ec = Ecuador
+
+.ee = Estonia
+
+.eg = Egypt - DETAILS
+
+.eh = Western Sahara - no registrar
+
+.er = Eritrea - no registrar
+
+.es = Spain
+
+.et = Ethiopia
+
+.eu = European Union - DETAILS
+
+.fi = Finland
+
+.fj = Fiji
+
+.fk = Falkland Islands (Malvinas)
+
+.fm = Micronesia, Federal State of
+
+.fo = Faroe Islands
+
+.fr = France
+
+.ga = Gabon - no registrar
+
+.gb = Great Britain (United Kingdom) - reserved, see .uk
+
+.gd = Grenada
+
+.ge = Georgia
+
+.gf = French Guiana
+
+.gg = Guernsey
+
+.gh = Ghana
+
+.gi = Gibraltar
+
+.gl = Greenland
+
+.gm = Gambia
+
+.gn = Guinea
+
+.gp = Guadeloupe - no information
+
+.gq = Equatorial Guinea - no information
+
+.gr = Greece
+
+.gs = South Georgia and the
+South Sandwich Islands
+
+.gt = Guatemala
+
+.gu = Guam
+
+.gw = Guinea-Bissau - no registrar
+
+.gy = Guyana - no registrar
+
+.hk = Hong Kong
+
+.hm = Heard and McDonald Islands
+
+.hn = Honduras
+
+.hr = Croatia/Hrvatska
+
+.ht = Haiti - no registrar
+
+.hu = Hungary
+
+.id = Indonesia - no information
+
+.ie = Ireland
+
+.il = Israel
+
+.im = Isle of Man
+
+.in = India
+
+.io = British Indian Ocean Territory
+
+.iq = Iraq - no registrar
+
+.ir = Islamic Republic of Iran
+
+.is = Iceland
+
+.it = Italy
+
+.je = Jersey
+
+.jm = Jamaica - no registrar
+
+.jo = Jordan
+
+.jp = Japan
+
+.ke = Kenya
+
+.kg = Kyrgyzstan - no registrar
+
+.kh = Cambodia
+
+.ki = Kiribati
+
+.km = Comoros
+
+.kn = Saint Kitts and Nevis - no registrar
+
+.kp = Democratic People's Republic of Korea
+(North) - no registrar
+
+.kr = Republic of Korea (South)
+
+.kw = Kuwait - no registrar
+
+.ky = Cayman Islands
+
+.kz = Kazakhstan
+
+.la = Lao People's Democratic Republic (Laos)
+... DETAILS
+
+.lb = Lebanon
+
+.lc = Saint Lucia
+
+.li = Liechtenstein
+
+.lk = Sri Lanka
+
+.lr = Liberia
+
+.ls = Lesotho - no registrar
+
+.lt = Lithuania
+
+.lu = Luxembourg
+
+.lv = Latvia
+
+.ly = Libyan Arab Jamahiriya (Libya)
+
+.ma = Morocco
+
+.mc = Monaco
+
+.md = Moldova
+
+.me = Montenegro
+
+.mg = Madagascar
+
+.mh = Marshall Islands
+
+.mk = Macedonia
+
+.ml = Mali - no information
+
+.mm = Myanmar (formerly Burma) - no registrar
+
+.mn = Mongolia
+
+.mo = Macau
+
+.mp = Northern Mariana Islands
+
+.mq = Martinique - no information
+
+.mr = Mauritania
+
+.ms = Montserrat
+
+.mt = Malta
+
+.mu = Mauritius
+
+.mv = Maldives - no registrar
+
+.mw = Malawi
+
+.mx = Mexico
+
+.my = Malaysia
+
+.mz = Mozambique - no registrar
+
+.na = Namibia
+
+.nc = New Caledonia
+
+.ne = Niger - no information
+
+.nf = Norfolk Island
+
+.ng = Nigeria
+
+.ni = Nicaragua
+
+.nl = Netherlands
+
+.no = Norway
+
+.np = Nepal
+
+.nr = Nauru
+
+.nu = Niue
+
+.nz = New Zealand
+
+.om = Oman - Omantel.net.om not functioning
+
+.pa = Panama
+
+.pe = Peru
+
+.pf = French Polynesia - no registrar
+
+.pg = Papua New Guinea - no registrar
+
+.ph = Philippines
+
+.pk = Pakistan
+
+.pl = Poland
+
+.pm = Saint Pierre and Miquelon - not available
+
+.pn = Pitcairn Island
+
+.pr = Puerto Rico
+
+.ps = Palestinian Territories
+
+.pt = Portugal
+
+.pw = Palau
+
+.py = Paraguay
+
+.qa = Qatar
+
+.re = Reunion Island
+
+.ro = Romania
+
+.rs = Serbia - no registrar
+
+.ru = Russian Federation
+
+.rw = Rwanda
+
+.sa = Saudi Arabia
+
+.sb = Solomon Islands
+
+.sc = Seychelles
+
+.sd = Sudan
+
+.se = Sweden
+
+.sg = Singapore
+
+.sh = Saint Helena
+
+.si = Slovenia
+
+.sj = Svalbard and Jan Mayen Islands - not in use
+
+.sk = Slovak Republic
+
+.sl = Sierra Leone
+
+.sm = San Marino
+
+.sn = Senegal - no registrar
+
+.so = Somalia - no registrar
+
+.sr = Suriname
+
+.st = Sao Tome and Principe
+
+.su = Soviet Union
+
+.sv = El Salvador
+
+.sy = Syrian Arab Republic
+
+.sz = Swaziland
+
+.tc = Turks and Caicos Islands - no registrar
+
+.td = Chad - no registrar
+
+.tf = French Southern Territories - no registrar
+
+.tg = Togo
+
+.th = Thailand
+
+.tj = Tajikistan
+
+.tk = Tokelau
+
+.tl = Timor-Leste
+
+.tm = Turkmenistan
+
+.tn = Tunisia
+
+.to = Tonga
+
+.tp = East Timor - Closed. See: Timor-Leste
+
+.tr = Turkey
+
+.tt = Trinidad and Tobago
+
+.tv = Tuvalu
+
+.tw = Taiwan
+
+.tz = Tanzania
+
+.ua = Ukraine
+
+.ug = Uganda
+
+.uk = United Kingdom
+
+.um = United States Minor Outlying Islands
+- Withdrawn, no domains exist.
+
+.us = United States (USA)
+
+.uy = Uruguay
+
+.uz = Uzbekistan
+
+.va = Holy See (Vatican City State)- no registrar
+
+.vc = Saint Vincent and the Grenadines
+
+.ve = Venezuela
+
+.vg = British Virgin Islands
+
+.vi = U.S. Virgin Islands
+
+.vn = Vietnam
+
+.vu = Vanuatu
+
+.wf = Wallis and Futuna Islands - no registrar
+
+.ws = Western Samoa
+
+.ye = Yemen - no registrar
+
+.yt = Mayotte - no registrar
+
+.yu = Yugoslavia Withdrawn in favor of .me and .rs
+
+.za = South Africa
+
+.zm = Zambia - no registrar
+
+.zr = Zaire - Obsolete
+now: The Democratic Republic of the Congo (.cd)
+
+.zw = Zimbabwe - no registrar
diff --git a/lists/dns_servers.txt.bak b/lists/dns_servers.txt.bak
new file mode 100644
index 0000000..844e8d5
--- /dev/null
+++ b/lists/dns_servers.txt.bak
@@ -0,0 +1,6 @@
+209.244.0.3
+209.244.0.4
+208.67.222.222
+208.67.220.220
+156.154.70.1
+156.154.71.1
diff --git a/lists/dns_servers.txt.bak2 b/lists/dns_servers.txt.bak2
new file mode 100644
index 0000000..0c4b6f6
--- /dev/null
+++ b/lists/dns_servers.txt.bak2
@@ -0,0 +1 @@
+192.168.1.1
diff --git a/lists/example_exp_list.txt b/lists/example_exp_list.txt
new file mode 100644
index 0000000..42ab976
--- /dev/null
+++ b/lists/example_exp_list.txt
@@ -0,0 +1,3 @@
+86.59.30.36
+38.229.72.14
+38.229.72.16
diff --git a/lists/major_isp_dns_servers.txt b/lists/major_isp_dns_servers.txt
new file mode 100644
index 0000000..36b8098
--- /dev/null
+++ b/lists/major_isp_dns_servers.txt
@@ -0,0 +1,796 @@
+######################################
+## ISP DNS SERVERS BY COUNTRY
+######################################
+## USA
+######################################
+##
+## AT&T
+68.94.156.1
+68.94.157.1
+##
+## ACS Alaska
+209.193.4.7
+209.112.160.2
+##
+## AOL
+205.188.146.145
+##
+## Century Link
+207.14.235.234
+67.238.98.162
+74.4.19.187
+##
+## Charter
+24.296.64.53
+##
+## Cincinnati Bell, ZoomTown
+216.68.4.10
+216.68.5.10
+##
+## Cincinnati Bell, Fuze
+216.68.1.100
+216.68.2.100
+##
+## Comcast, General DNS Servers
+## West Coast
+68.87.85.98
+2001:558:1004:6:68:87:85:98
+## East Coast
+68.87.64.146
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Albuquerque
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Atlanta
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Augusta
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Battle Creek
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+##
+## Comcast, Charleston
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Chattanooga
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Chesterfield
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, Chicago
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Colorado
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Connecticut
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Dallas
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, East Tennessee
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Elyria
+68.87.75.194
+68.87.64.146
+2001:558:1001:C:68:87:75:194
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Fort Wayne
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Fresno
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Hattiesburg-Laurel
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Huntsville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Illinois
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Independence
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Indianapolis
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Jacksonville
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Knoxville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Lake County
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Little Rock
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Los Angeles
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Massachusetts
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Meridian
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+## Comcast, Miami
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Michigan
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+## Comcast, Minnesota
+68.87.77.130
+68.87.72.130
+2001:558:1016:C:68:87:77:130
+2001:558:100E:4:68:87:72:130
+##
+## Comcast, Mobile
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Muncie
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Naples
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Nashville
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, New England
+68.87.71.226
+68.87.73.242
+2001:558:1000:E:68:87:71:226
+2001:558:1010:8:68:87:73:242
+##
+## Comcast, Olathe
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Oregon
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Paducah
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Panama City
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Pennsylvania
+68.87.75.194
+68.87.64.146
+2001:558:1001:C:68:87:75:194
+2001:558:1002:B:68:87:64:146
+##
+## Comcast, Philadelphia
+68.87.64.146
+68.87.75.194
+2001:558:1002:B:68:87:64:146
+2001:558:1001:C:68:87:75:194
+##
+## Comcast, Pima
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Richmond
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, Sacramento
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, San Francisco Bay Area
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Savannah
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, South Bend
+68.87.72.130
+68.87.77.130
+2001:558:100E:4:68:87:72:130
+2001:558:1016:C:68:87:77:130
+##
+## Comcast, Spokane
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Stockton
+68.87.76.178
+68.87.78.130
+2001:558:1014:F:68:87:76:178
+2001:558:1018:6:68:87:78:130
+##
+## Comcast, Tallahassee
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Comcast, Texas
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Tuscaloosa
+68.87.68.162
+68.87.74.162
+2001:558:100A:4:68:87:68:162
+2001:558:1012:6:68:87:74:162
+##
+## Comcast, Utah
+68.87.85.98
+68.87.69.146
+2001:558:1004:6:68:87:85:98
+2001:558:100C:D:68:87:69:146
+##
+## Comcast, Washington
+68.87.69.146
+68.87.85.98
+2001:558:100C:D:68:87:69:146
+2001:558:1004:6:68:87:85:98
+##
+## Comcast, Washington DC
+68.87.73.242
+68.87.71.226
+2001:558:1010:8:68:87:73:242
+2001:558:1000:E:68:87:71:226
+##
+## Comcast, West Florida
+68.87.74.162
+68.87.68.162
+2001:558:1012:6:68:87:74:162
+2001:558:100A:4:68:87:68:162
+##
+## Earthlink
+207.69.188.185
+207.69.188.186
+207.69.188.187
+##
+############################
+## UK
+############################
+##
+## AAISP
+217.169.20.20
+217.169.20.21
+2001:8b0::2020
+2001:8b0::2021
+##
+## AOL Broadband
+64.12.51.132
+149.174.221.8
+205.188.157.232
+205.188.146.145
+##
+## BE Unlimited
+87.194.0.51
+87.194.0.52
+87.194.0.66
+87.194.0.67
+##
+## BT Broadband
+62.6.40.178
+62.6.40.162
+194.72.9.38
+194.72.9.34
+194.72.0.98
+194.72.0.114
+194.74.65.68
+194.74.65.69
+##
+## Bulldog Broadband North
+212.158.248.5
+212.158.248.6
+##
+## Bulldog Broadband South
+83.146.21.5
+83.146.21.6
+##
+## Bytel
+80.76.204.35
+80.76.200.69
+##
+## Clara.net
+195.8.69.7
+195.8.69.12
+##
+## Datanet
+80.68.34.6
+77.241.177.2
+80.68.34.8
+##
+## Demon Internet
+158.152.1.58
+158.152.1.43
+##
+## Eclipse Internet
+212.104.130.9
+212.104.130.65
+##
+## Entanet
+195.74.102.146
+195.74.102.147
+##
+## Exa Networks
+82.219.4.24
+82.219.4.25
+##
+## Fast
+78.143.192.10
+78.143.192.20
+##
+## Freedom 2 Surf
+194.106.56.6
+194.106.33.42
+##
+## IDNet
+212.69.36.3
+212.69.36.2
+212.69.40.2
+##
+## Karoo
+212.50.160.100
+213.249.130.100
+##
+## Madasafish
+80.189.94.2
+80.189.92.2
+##
+## Merula
+217.146.97.10
+217.146.105.2
+##
+## Metronet
+213.162.97.65
+213.162.97.66
+##
+## Namesco
+195.7.224.57
+195.7.224.143
+##
+## NewNet
+212.87.64.10
+212.87.64.11
+##
+## Nildram
+213.208.106.212
+213.208.106.213
+##
+## O2
+87.194.0.51
+87.194.0.52
+87.194.0.66
+87.194.0.67
+##
+## Onetel
+212.67.96.129
+212.67.96.130
+##
+## Onyx
+194.176.65.5
+195.97.231.31
+##
+## Oosha
+213.190.161.254
+213.190.161.250
+213.190.160.9
+##
+## Orange
+195.92.195.94
+195.92.195.95
+##
+## Pipex
+62.241.160.200
+158.43.240.4
+212.74.112.66
+212.74.112.67
+##
+## PlusNet
+212.159.13.49
+212.159.13.50
+212.159.6.9
+212.159.6.10
+##
+## Powernet
+195.60.0.1
+195.60.0.5
+##
+## Prodigy
+198.83.19.241
+198.83.19.244
+207.115.59.241
+207.115.59.244
+##
+## SAQ
+195.2.130.209
+195.2.156.67
+##
+## Scotnet
+217.16.223.30
+217.16.223.31
+##
+## Sky Broadband
+87.86.189.16
+87.86.189.17
+195.40.1.36
+##
+## Skymarket
+212.84.173.66
+212.84.173.82
+##
+## Supanet
+213.40.66.126
+213.40.130.126
+##
+## TalkTalk
+62.24.199.13
+62.24.199.23
+62.24.128.18
+62.24.128.17
+##
+## Tesco
+194.168.4.100
+194.168.8.100
+##
+## Timewarp
+217.149.108.10
+217.149.108.11
+##
+## Timico
+195.54.225.10
+195.54.226.10
+##
+## Tiscali
+212.74.112.66
+212.74.112.67
+80.255.252.50
+80.255.252.58
+##
+## Topletter
+77.95.114.100
+77.95.112.1
+##
+## UK Online
+212.135.1.36
+195.40.1.36
+##
+## Utility Warehouse
+62.24.128.17
+62.24.128.18
+##
+## UTV Internet
+194.46.192.141
+194.46.192.142
+##
+## Virgin Media
+194.168.4.100
+194.168.8.100
+##
+## VISPA
+62.24.228.9
+62.24.228.10
+##
+## Zen Internet
+212.23.3.100
+212.23.6.100
+##
+####################################
+## NEW ZEALAND
+####################################
+##
+## Xtra
+202.27.158.40
+202.27.156.72
+##
+####################################
+## AUSTRALIA
+####################################
+##
+## AANet, Victoria
+203.24.100.125
+203.123.94.40
+##
+## AANet, South Australia
+203.24.100.125
+203.123.69.15
+##
+## AANet, Western Australia
+203.24.100.125
+202.76.136.40
+##
+## AANet, Queensland
+203.24.100.125
+202.76.170.40
+##
+## AANet, New South Wales
+203.24.100.125
+203.123.69.15
+##
+## AAPT, New South Wales
+192.189.54.33
+203.8.183.1
+##
+## AAPT, Victoria
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Queensland
+192.189.54.33
+203.8.183.1
+##
+## AAPT, Tasmania
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Australian Capital Territory
+192.189.54.33
+203.8.183.1
+##
+## AAPT, South Australia
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Northern Territory
+192.189.54.17
+203.8.183.1
+##
+## AAPT, Western Australia
+192.189.54.17
+203.8.183.1
+##
+## Adam
+122.49.191.252
+122.49.191.253
+##
+## Amnet
+203.161.127.1
+203.153.224.42
+##
+## Comcen
+203.23.236.66
+203.23.236.69
+##
+## Dodo
+203.220.32.121
+203.220.32.122
+203.220.32.123
+##
+## Exetel
+220.233.0.4
+220.233.0.3
+##
+## iiNet
+203.0.178.191
+203.0.178.191
+##
+## Internode
+192.231.203.132
+192.231.203.3
+2001:44b8:1::1
+2001:44b8:2::2
+##
+## iPrimus, New South Wales
+203.134.64.66
+203.134.65.66
+##
+## iPrimus, Victoria
+203.134.24.70
+203.134.26.70
+##
+## iPrimus, Queensland
+203.134.12.90
+203.134.102.90
+##
+## iPrimus, Western Australia
+203.134.17.90
+211.26.25.90
+##
+## Netspace
+210.15.254.240
+210.15.254.241
+##
+## Optus
+211.29.132.12
+198.142.0.51
+##
+## People Telecom, New South Wales
+202.154.123.97
+218.214.227.3
+##
+## People Telecom, Northern Territory
+202.154.92.5
+218.214.228.97
+##
+## People Telecom, Queensland
+218.214.227.3
+202.154.123.97
+##
+## People Telecom, South Australia
+218.214.228.97
+218.214.17.1
+##
+## People Telecom, Victoria
+218.214.17.1
+218.214.228.97
+##
+## People Telecom, Western Australia
+202.154.92.5
+218.214.228.97
+##
+## Spin Internet
+203.23.236.66
+203.23.236.69
+##
+## Telstra BigPond, New South Wales
+61.9.194.49
+61.9.195.193
+##
+## Telstra BigPond, Victoria
+61.9.133.193
+61.9.134.49
+##
+## Telstra BigPond, Queensland
+61.9.211.33
+61.9.211.1
+##
+## Telstra BigPond, Tasmania
+61.9.188.33
+61.9.134.49
+##
+## Telstra BigPond, Australian Capital Territory
+61.9.207.1
+61.9.195.193
+##
+## Telstra BigPond, South Australia
+61.9.226.33
+61.9.194.49
+##
+## Telstra BigPond, Northern Territory
+61.9.226.33
+61.9.194.49
+##
+## Telstra BigPond, Western Australia
+61.9.242.33
+61.9.226.33
+##
+## TPG
+203.12.160.35
+203.12.160.36
+203.12.160.37
+##
+## Westnet
+203.21.20.20
+203.10.1.9
+########################################
diff --git a/lists/short_hostname_list.txt b/lists/short_hostname_list.txt
new file mode 100644
index 0000000..f13c702
--- /dev/null
+++ b/lists/short_hostname_list.txt
@@ -0,0 +1,7 @@
+torproject.org
+google.com
+ooni.nu
+torrentz.eu
+anarchyplanet.org
+riseup.net
+indymedia.org
diff --git a/lists/tld-list-cc.txt b/lists/tld-list-cc.txt
new file mode 100644
index 0000000..57e0cc8
--- /dev/null
+++ b/lists/tld-list-cc.txt
@@ -0,0 +1,511 @@
+.ac = Ascension Island
+
+.ad = Andorra
+
+.ae = United Arab Emirates
+
+.af = Afghanistan
+
+.ag = Antigua and Barbuda
+
+.ai = Anguilla
+
+.al = Albania
+
+.am = Armenia
+
+.an = Netherlands Antilles
+
+.ao = Angola
+
+.aq = Antarctica - no registrar
+
+.ar = Argentina
+
+.as = American Samoa
+
+.at = Austria
+
+.au = Australia
+
+.aw = Aruba - no registrar
+
+.ax = Aland Islands
+
+.az = Azerbaijan - no registrar
+
+.ba = Bosnia and Herzegovina
+
+.bb = Barbados
+
+.bd = Bangladesh - no registrar
+
+.be = Belgium
+
+.bf = Burkina Faso - no registrar
+
+.bg = Bulgaria
+
+.bh = Bahrain
+
+.bi = Burundi
+
+.bj = Benin ... (little info) DETAILS
+
+.bm = Bermuda
+
+.bn = Brunei Darussalam
+
+.bo = Bolivia
+
+.br = Brazil
+
+.bs = Bahamas
+
+.bt = Bhutan
+
+.bv = Bouvet Island - not in use
+
+.bw = Botswana - no registrar
+
+.by = Belarus
+
+.bz = Belize
+
+.ca = Canada
+
+.cc = Cocos (Keeling) Islands
+
+.cd = The Democratic Republic of the Congo
+
+.cf = Central African Republic - no registrar
+
+.cg = Republic of Congo
+
+.ch = Switzerland
+
+.ci = Cote d'Ivoire
+
+.ck = Cook Islands
+
+.cl = Chile
+
+.cm = Cameroon - no registrar - wildcarded
+
+.cn = China
+
+.co = Colombia
+
+.cr = Costa Rica
+
+.cs = (former) Serbia and Montenegro - no registrar - see: .me
+(.cs was also formerly the ISO_3166-1 code for Czechoslovakia, now .cs is closed.)
+
+.cu = Cuba - no registrar
+
+.cv = Cape Verde - no registrar
+
+.cx = Christmas Island
+
+.cy = Cyprus
+
+.cz = Czech Republic
+
+.dd = East Germany (obsolete)
+
+.de = Germany
+
+.dj = Djibouti - no information
+
+.dk = Denmark
+
+.dm = Dominica
+
+.do = Dominican Republic
+
+.dz = Algeria - no registrar
+
+.ec = Ecuador
+
+.ee = Estonia
+
+.eg = Egypt - DETAILS
+
+.eh = Western Sahara - no registrar
+
+.er = Eritrea - no registrar
+
+.es = Spain
+
+.et = Ethiopia
+
+.eu = European Union - DETAILS
+
+.fi = Finland
+
+.fj = Fiji
+
+.fk = Falkland Islands (Malvinas)
+
+.fm = Micronesia, Federal State of
+
+.fo = Faroe Islands
+
+.fr = France
+
+.ga = Gabon - no registrar
+
+.gb = Great Britain (United Kingdom) - reserved, see .uk
+
+.gd = Grenada
+
+.ge = Georgia
+
+.gf = French Guiana
+
+.gg = Guernsey
+
+.gh = Ghana
+
+.gi = Gibraltar
+
+.gl = Greenland
+
+.gm = Gambia
+
+.gn = Guinea
+
+.gp = Guadeloupe - no information
+
+.gq = Equatorial Guinea - no information
+
+.gr = Greece
+
+.gs = South Georgia and the
+South Sandwich Islands
+
+.gt = Guatemala
+
+.gu = Guam
+
+.gw = Guinea-Bissau - no registrar
+
+.gy = Guyana - no registrar
+
+.hk = Hong Kong
+
+.hm = Heard and McDonald Islands
+
+.hn = Honduras
+
+.hr = Croatia/Hrvatska
+
+.ht = Haiti - no registrar
+
+.hu = Hungary
+
+.id = Indonesia - no information
+
+.ie = Ireland
+
+.il = Israel
+
+.im = Isle of Man
+
+.in = India
+
+.io = British Indian Ocean Territory
+
+.iq = Iraq - no registrar
+
+.ir = Islamic Republic of Iran
+
+.is = Iceland
+
+.it = Italy
+
+.je = Jersey
+
+.jm = Jamaica - no registrar
+
+.jo = Jordan
+
+.jp = Japan
+
+.ke = Kenya
+
+.kg = Kyrgyzstan - no registrar
+
+.kh = Cambodia
+
+.ki = Kiribati
+
+.km = Comoros
+
+.kn = Saint Kitts and Nevis - no registrar
+
+.kp = Democratic People's Republic of Korea
+(North) - no registrar
+
+.kr = Republic of Korea (South)
+
+.kw = Kuwait - no registrar
+
+.ky = Cayman Islands
+
+.kz = Kazakhstan
+
+.la = Lao People's Democratic Republic (Laos)
+... DETAILS
+
+.lb = Lebanon
+
+.lc = Saint Lucia
+
+.li = Liechtenstein
+
+.lk = Sri Lanka
+
+.lr = Liberia
+
+.ls = Lesotho - no registrar
+
+.lt = Lithuania
+
+.lu = Luxembourg
+
+.lv = Latvia
+
+.ly = Libyan Arab Jamahiriya (Libya)
+
+.ma = Morocco
+
+.mc = Monaco
+
+.md = Moldova
+
+.me = Montenegro
+
+.mg = Madagascar
+
+.mh = Marshall Islands
+
+.mk = Macedonia
+
+.ml = Mali - no information
+
+.mm = Myanmar (formerly Burma) - no registrar
+
+.mn = Mongolia
+
+.mo = Macau
+
+.mp = Northern Mariana Islands
+
+.mq = Martinique - no information
+
+.mr = Mauritania
+
+.ms = Montserrat
+
+.mt = Malta
+
+.mu = Mauritius
+
+.mv = Maldives - no registrar
+
+.mw = Malawi
+
+.mx = Mexico
+
+.my = Malaysia
+
+.mz = Mozambique - no registrar
+
+.na = Namibia
+
+.nc = New Caledonia
+
+.ne = Niger - no information
+
+.nf = Norfolk Island
+
+.ng = Nigeria
+
+.ni = Nicaragua
+
+.nl = Netherlands
+
+.no = Norway
+
+.np = Nepal
+
+.nr = Nauru
+
+.nu = Niue
+
+.nz = New Zealand
+
+.om = Oman - Omantel.net.om not functioning
+
+.pa = Panama
+
+.pe = Peru
+
+.pf = French Polynesia - no registrar
+
+.pg = Papua New Guinea - no registrar
+
+.ph = Philippines
+
+.pk = Pakistan
+
+.pl = Poland
+
+.pm = Saint Pierre and Miquelon - not available
+
+.pn = Pitcairn Island
+
+.pr = Puerto Rico
+
+.ps = Palestinian Territories
+
+.pt = Portugal
+
+.pw = Palau
+
+.py = Paraguay
+
+.qa = Qatar
+
+.re = Reunion Island
+
+.ro = Romania
+
+.rs = Serbia - no registrar
+
+.ru = Russian Federation
+
+.rw = Rwanda
+
+.sa = Saudi Arabia
+
+.sb = Solomon Islands
+
+.sc = Seychelles
+
+.sd = Sudan
+
+.se = Sweden
+
+.sg = Singapore
+
+.sh = Saint Helena
+
+.si = Slovenia
+
+.sj = Svalbard and Jan Mayen Islands - not in use
+
+.sk = Slovak Republic
+
+.sl = Sierra Leone
+
+.sm = San Marino
+
+.sn = Senegal - no registrar
+
+.so = Somalia - no registrar
+
+.sr = Suriname
+
+.st = Sao Tome and Principe
+
+.su = Soviet Union
+
+.sv = El Salvador
+
+.sy = Syrian Arab Republic
+
+.sz = Swaziland
+
+.tc = Turks and Caicos Islands - no registrar
+
+.td = Chad - no registrar
+
+.tf = French Southern Territories - no registrar
+
+.tg = Togo
+
+.th = Thailand
+
+.tj = Tajikistan
+
+.tk = Tokelau
+
+.tl = Timor-Leste
+
+.tm = Turkmenistan
+
+.tn = Tunisia
+
+.to = Tonga
+
+.tp = East Timor - Closed. See: Timor-Leste
+
+.tr = Turkey
+
+.tt = Trinidad and Tobago
+
+.tv = Tuvalu
+
+.tw = Taiwan
+
+.tz = Tanzania
+
+.ua = Ukraine
+
+.ug = Uganda
+
+.uk = United Kingdom
+
+.um = United States Minor Outlying Islands
+- Withdrawn, no domains exist.
+
+.us = United States (USA)
+
+.uy = Uruguay
+
+.uz = Uzbekistan
+
+.va = Holy See (Vatican City State)- no registrar
+
+.vc = Saint Vincent and the Grenadines
+
+.ve = Venezuela
+
+.vg = British Virgin Islands
+
+.vi = U.S. Virgin Islands
+
+.vn = Vietnam
+
+.vu = Vanuatu
+
+.wf = Wallis and Futuna Islands - no registrar
+
+.ws = Western Samoa
+
+.ye = Yemen - no registrar
+
+.yt = Mayotte - no registrar
+
+.yu = Yugoslavia Withdrawn in favor of .me and .rs
+
+.za = South Africa
+
+.zm = Zambia - no registrar
+
+.zr = Zaire - Obsolete
+now: The Democratic Republic of the Congo (.cd)
+
+.zw = Zimbabwe - no registrar
diff --git a/lists/tld-list-mozilla.txt b/lists/tld-list-mozilla.txt
new file mode 100644
index 0000000..7902eee
--- /dev/null
+++ b/lists/tld-list-mozilla.txt
@@ -0,0 +1,5 @@
+--2012-05-19 13:07:53-- https://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_na…
+Resolving mxr.mozilla.org (mxr.mozilla.org) 63.245.215.42
+Connecting to mxr.mozilla.org (mxr.mozilla.org)|63.245.215.42|:443... connected.
+ERROR: The certificate of `mxr.mozilla.org' is not trusted.
+ERROR: The certificate of `mxr.mozilla.org' hasn't got a known issuer.
diff --git a/lists/top-1m.txt.bak2 b/lists/top-1m.txt.bak2
new file mode 100644
index 0000000..293e661
--- /dev/null
+++ b/lists/top-1m.txt.bak2
@@ -0,0 +1,11 @@
+1,torproject.org
+2,google.com
+3,facebook.com
+4,youtube.com
+5,yahoo.com
+6,baidu.com
+7,wikipedia.org
+8,live.com
+9,blogspot.com
+10,twitter.com
+11,qq.com
1
0

03 Nov '12
commit dc3393fef26d2d9b03e6403f46909de00f55bf17
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Fri Nov 2 17:47:00 2012 +0000
* Moved old code to /old-to-be-ported code.
---
.../old-api/.ropeproject/config.py | 85 +++++
.../old-api/.ropeproject/globalnames | Bin 0 -> 108 bytes
old-to-be-ported-code/old-api/.ropeproject/history | 1 +
.../old-api/.ropeproject/objectdb | Bin 0 -> 741 bytes
old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt | 8 +
old-to-be-ported-code/old-api/chinatrigger.py | 140 ++++++++
old-to-be-ported-code/old-api/daphn3.py | 152 ++++++++
old-to-be-ported-code/old-api/domclass.py | 216 +++++++++++
old-to-be-ported-code/old-api/dropin.cache | 243 +++++++++++++
old-to-be-ported-code/old-api/httpt.py | 94 +++++
old-to-be-ported-code/old-api/tcpconnect.py | 65 ++++
old-to-be-ported-code/old-api/tcpscan.py | 84 +++++
old-to-be-ported-code/very-old/TODO.plgoons | 79 ++++
old-to-be-ported-code/very-old/TO_BE_PORTED | 14 +
old-to-be-ported-code/very-old/ooni-probe.diff | 358 +++++++++++++++++++
old-to-be-ported-code/very-old/ooni/#namecheck.py# | 39 ++
old-to-be-ported-code/very-old/ooni/.DS_Store | Bin 0 -> 15364 bytes
old-to-be-ported-code/very-old/ooni/__init__.py | 12 +
old-to-be-ported-code/very-old/ooni/command.py | 250 +++++++++++++
.../very-old/ooni/dns_poisoning.py | 43 +++
old-to-be-ported-code/very-old/ooni/dnsooni.py | 356 ++++++++++++++++++
old-to-be-ported-code/very-old/ooni/helpers.py | 38 ++
old-to-be-ported-code/very-old/ooni/http.py | 306 ++++++++++++++++
old-to-be-ported-code/very-old/ooni/input.py | 33 ++
old-to-be-ported-code/very-old/ooni/namecheck.py | 39 ++
.../very-old/ooni/plugins/dnstest_plgoo.py | 84 +++++
.../very-old/ooni/plugins/http_plgoo.py | 70 ++++
.../very-old/ooni/plugins/marco_plgoo.py | 377 ++++++++++++++++++++
.../very-old/ooni/plugins/proxy_plgoo.py | 69 ++++
.../very-old/ooni/plugins/simple_dns_plgoo.py | 35 ++
.../very-old/ooni/plugins/tcpcon_plgoo.py | 278 ++++++++++++++
old-to-be-ported-code/very-old/ooni/plugins/tor.py | 80 ++++
old-to-be-ported-code/very-old/ooni/plugins/torrc | 9 +
old-to-be-ported-code/very-old/ooni/plugooni.py | 106 ++++++
.../very-old/ooni/transparenthttp.py | 41 +++
old-to-be-ported-code/very-old/traceroute.py | 108 ++++++
36 files changed, 3912 insertions(+), 0 deletions(-)
diff --git a/old-to-be-ported-code/old-api/.ropeproject/config.py b/old-to-be-ported-code/old-api/.ropeproject/config.py
new file mode 100644
index 0000000..ffebcd4
--- /dev/null
+++ b/old-to-be-ported-code/old-api/.ropeproject/config.py
@@ -0,0 +1,85 @@
+# The default ``config.py``
+
+
+def set_prefs(prefs):
+ """This function is called before opening the project"""
+
+ # Specify which files and folders to ignore in the project.
+ # Changes to ignored resources are not added to the history and
+ # VCSs. Also they are not returned in `Project.get_files()`.
+ # Note that ``?`` and ``*`` match all characters but slashes.
+ # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
+ # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
+ # '.svn': matches 'pkg/.svn' and all of its children
+ # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
+ # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
+ prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
+ '.hg', '.svn', '_svn', '.git']
+
+ # Specifies which files should be considered python files. It is
+ # useful when you have scripts inside your project. Only files
+ # ending with ``.py`` are considered to be python files by
+ # default.
+ #prefs['python_files'] = ['*.py']
+
+ # Custom source folders: By default rope searches the project
+ # for finding source folders (folders that should be searched
+ # for finding modules). You can add paths to that list. Note
+ # that rope guesses project source folders correctly most of the
+ # time; use this if you have any problems.
+ # The folders should be relative to project root and use '/' for
+ # separating folders regardless of the platform rope is running on.
+ # 'src/my_source_folder' for instance.
+ #prefs.add('source_folders', 'src')
+
+ # You can extend python path for looking up modules
+ #prefs.add('python_path', '~/python/')
+
+ # Should rope save object information or not.
+ prefs['save_objectdb'] = True
+ prefs['compress_objectdb'] = False
+
+ # If `True`, rope analyzes each module when it is being saved.
+ prefs['automatic_soa'] = True
+ # The depth of calls to follow in static object analysis
+ prefs['soa_followed_calls'] = 0
+
+ # If `False` when running modules or unit tests "dynamic object
+ # analysis" is turned off. This makes them much faster.
+ prefs['perform_doa'] = True
+
+ # Rope can check the validity of its object DB when running.
+ prefs['validate_objectdb'] = True
+
+ # How many undos to hold?
+ prefs['max_history_items'] = 32
+
+ # Shows whether to save history across sessions.
+ prefs['save_history'] = True
+ prefs['compress_history'] = False
+
+ # Set the number spaces used for indenting. According to
+ # :PEP:`8`, it is best to use 4 spaces. Since most of rope's
+ # unit-tests use 4 spaces it is more reliable, too.
+ prefs['indent_size'] = 4
+
+ # Builtin and c-extension modules that are allowed to be imported
+ # and inspected by rope.
+ prefs['extension_modules'] = []
+
+ # Add all standard c-extensions to extension_modules list.
+ prefs['import_dynload_stdmods'] = True
+
+ # If `True` modules with syntax errors are considered to be empty.
+ # The default value is `False`; When `False` syntax errors raise
+ # `rope.base.exceptions.ModuleSyntaxError` exception.
+ prefs['ignore_syntax_errors'] = False
+
+ # If `True`, rope ignores unresolvable imports. Otherwise, they
+ # appear in the importing namespace.
+ prefs['ignore_bad_imports'] = False
+
+
+def project_opened(project):
+ """This function is called after opening the project"""
+ # Do whatever you like here!
diff --git a/old-to-be-ported-code/old-api/.ropeproject/globalnames b/old-to-be-ported-code/old-api/.ropeproject/globalnames
new file mode 100644
index 0000000..2877ef5
Binary files /dev/null and b/old-to-be-ported-code/old-api/.ropeproject/globalnames differ
diff --git a/old-to-be-ported-code/old-api/.ropeproject/history b/old-to-be-ported-code/old-api/.ropeproject/history
new file mode 100644
index 0000000..fcd9c96
--- /dev/null
+++ b/old-to-be-ported-code/old-api/.ropeproject/history
@@ -0,0 +1 @@
+]q(]q]qe.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/.ropeproject/objectdb b/old-to-be-ported-code/old-api/.ropeproject/objectdb
new file mode 100644
index 0000000..f276839
Binary files /dev/null and b/old-to-be-ported-code/old-api/.ropeproject/objectdb differ
diff --git a/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt b/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
new file mode 100644
index 0000000..f4c0084
--- /dev/null
+++ b/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
@@ -0,0 +1,8 @@
+7/10/2012
+
+All new tests will be moved to the directory /nettests/.
+
+Tests that are in this directory are either here for historical reasons or have
+not yet been properly tested and fully supporting the new API.
+
+A.
diff --git a/old-to-be-ported-code/old-api/chinatrigger.py b/old-to-be-ported-code/old-api/chinatrigger.py
new file mode 100644
index 0000000..cf4bcb3
--- /dev/null
+++ b/old-to-be-ported-code/old-api/chinatrigger.py
@@ -0,0 +1,140 @@
+import random
+import string
+import struct
+import time
+
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, defer
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.scapyproto import ScapyTest
+
+from ooni.lib.txscapy import txsr, txsend
+
+class scapyArgs(usage.Options):
+ optParameters = [['dst', 'd', None, 'Specify the target address'],
+ ['port', 'p', None, 'Specify the target port'],
+ ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
+ ]
+
+class ChinaTriggerTest(ScapyTest):
+ """
+ This test is a OONI based implementation of the C tool written
+ by Philipp Winter to engage chinese probes in active scanning.
+
+ Example of running it:
+ ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
+ """
+ implements(IPlugin, ITest)
+
+ shortName = "chinatrigger"
+ description = "Triggers the chinese probes into scanning"
+ requirements = ['root']
+ options = scapyArgs
+ blocking = False
+
+ receive = True
+ pcapfile = 'example_scapy.pcap'
+ timeout = 5
+
+ def initialize(self, reactor=None):
+ if not self.reactor:
+ from twisted.internet import reactor
+ self.reactor = reactor
+
+ @staticmethod
+ def set_random_servername(pkt):
+ ret = pkt[:121]
+ for i in range(16):
+ ret += random.choice(string.ascii_lowercase)
+ ret += pkt[121+16:]
+ return ret
+
+ @staticmethod
+ def set_random_time(pkt):
+ ret = pkt[:11]
+ ret += struct.pack('!I', int(time.time()))
+ ret += pkt[11+4:]
+ return ret
+
+ @staticmethod
+ def set_random_field(pkt):
+ ret = pkt[:15]
+ for i in range(28):
+ ret += chr(random.randint(0, 256))
+ ret += pkt[15+28:]
+ return ret
+
+ @staticmethod
+ def mutate(pkt, idx):
+ """
+ Slightly changed mutate function.
+ """
+ ret = pkt[:idx-1]
+ mutation = chr(random.randint(0, 256))
+ while mutation == pkt[idx]:
+ mutation = chr(random.randint(0, 256))
+ ret += mutation
+ ret += pkt[idx:]
+ return ret
+
+ @staticmethod
+ def set_all_random_fields(pkt):
+ pkt = ChinaTriggerTest.set_random_servername(pkt)
+ pkt = ChinaTriggerTest.set_random_time(pkt)
+ pkt = ChinaTriggerTest.set_random_field(pkt)
+ return pkt
+
+ def build_packets(self, *args, **kw):
+ """
+ Override this method to build scapy packets.
+ """
+ from scapy.all import IP, TCP
+ pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+ "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+ "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+ "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+ "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+ "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+ "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+ "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+ "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+ "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+ "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+ "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+ "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+ "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+ "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+ "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+ "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+ "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+ "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+ "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+ "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+ "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+ "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+ "\x00\x00"
+
+ pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+ pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+ for x in range(len(pkt)):
+ mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+ pkts.append(mutation)
+ return pkts
+
+ def load_assets(self):
+ if self.local_options:
+ self.dst = self.local_options['dst']
+ self.port = int(self.local_options['port'])
+ if self.local_options['pcap']:
+ self.pcapfile = self.local_options['pcap']
+ if not self.port or not self.dst:
+ pass
+
+ return {}
+
+#chinatrigger = ChinaTriggerTest(None, None, None)
+
diff --git a/old-to-be-ported-code/old-api/daphn3.py b/old-to-be-ported-code/old-api/daphn3.py
new file mode 100644
index 0000000..bf4d60d
--- /dev/null
+++ b/old-to-be-ported-code/old-api/daphn3.py
@@ -0,0 +1,152 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, endpoints
+
+from ooni.plugoo import reports
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+ def connectionMade(self):
+ self.next_state()
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+ protocol = Daphn3ClientProtocol
+ mutator = None
+ steps = None
+ test = None
+
+ def buildProtocol(self, addr):
+ p = self.protocol()
+ p.factory = self
+ p.test = self.test
+
+ if self.steps:
+ p.steps = self.steps
+
+ if not self.mutator:
+ self.mutator = daphn3.Mutator(p.steps)
+
+ else:
+ print "Moving on to next mutation"
+ self.mutator.next()
+
+ p.mutator = self.mutator
+ p.current_state = self.mutator.state()
+ return p
+
+ def clientConnectionFailed(self, reason):
+ print "We failed connecting the the OONIB"
+ print "Cannot perform test. Perhaps it got blocked?"
+ print "Please report this to tor-assistants(a)torproject.org"
+ self.test.result['error'] = ('Failed in connecting to OONIB', reason)
+ self.test.end(d)
+
+ def clientConnectionLost(self, reason):
+ print "Connection Lost."
+
+class daphn3Args(usage.Options):
+ optParameters = [['pcap', 'f', None,
+ 'PCAP to read for generating the YAML output'],
+
+ ['output', 'o', 'daphn3.yaml',
+ 'What file should be written'],
+
+ ['yaml', 'y', None,
+ 'The input file to the test'],
+
+ ['host', 'h', None, 'Target Hostname'],
+ ['port', 'p', None, 'Target port number'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+class daphn3Test(OONITest):
+ implements(IPlugin, ITest)
+
+ shortName = "daphn3"
+ description = "daphn3"
+ requirements = None
+ options = daphn3Args
+ blocking = False
+
+ local_options = None
+
+ steps = None
+
+ def initialize(self):
+ if not self.local_options:
+ self.end()
+ return
+
+ self.factory = Daphn3ClientFactory()
+ self.factory.test = self
+
+ if self.local_options['pcap']:
+ self.tool = True
+
+ elif self.local_options['yaml']:
+ self.steps = daphn3.read_yaml(self.local_options['yaml'])
+
+ else:
+ log.msg("Not enough inputs specified to the test")
+ self.end()
+
+ def runTool(self):
+ import yaml
+ pcap = daphn3.read_pcap(self.local_options['pcap'])
+ f = open(self.local_options['output'], 'w')
+ f.write(yaml.dump(pcap))
+ f.close()
+
+ def control(self, exp_res, args):
+ try:
+ mutation = self.factory.mutator.get(0)
+ self.result['censored'] = False
+ except:
+ mutation = None
+
+ return {'mutation_number': args['mutation'],
+ 'value': mutation}
+
+ def _failure(self, *argc, **kw):
+ self.result['censored'] = True
+ self.result['error'] = ('Failed in connecting', (argc, kw))
+ self.end()
+
+ def experiment(self, args):
+ log.msg("Doing mutation %s" % args['mutation'])
+ self.factory.steps = self.steps
+ host = self.local_options['host']
+ port = int(self.local_options['port'])
+ log.msg("Connecting to %s:%s" % (host, port))
+
+ if self.ended:
+ return
+
+ endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
+ d = endpoint.connect(self.factory)
+ d.addErrback(self._failure)
+ return d
+
+ def load_assets(self):
+ if not self.local_options:
+ return {}
+ if not self.steps:
+ print "Error: No assets!"
+ self.end()
+ return {}
+ mutations = 0
+ for x in self.steps:
+ mutations += len(x['data'])
+ return {'mutation': range(mutations)}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#daphn3test = daphn3Test(None, None, None)
diff --git a/old-to-be-ported-code/old-api/domclass.py b/old-to-be-ported-code/old-api/domclass.py
new file mode 100644
index 0000000..3080c40
--- /dev/null
+++ b/old-to-be-ported-code/old-api/domclass.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+#-*- encoding: utf-8 -*-
+#
+# domclass
+# ********
+#
+# :copyright: (c) 2012 by Arturo Filastò
+# :license: see LICENSE for more details.
+#
+# how this works
+# --------------
+#
+# This classifier uses the DOM structure of a website to determine how similar
+# the two sites are.
+# The procedure we use is the following:
+# * First we parse all the DOM tree of the web page and we build a list of
+# TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
+# (html, a), (a, b), (html, c)).
+#
+# * We then use this information to build a matrix (M) where m[i][j] = P(of
+# transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
+# Note: M is a square matrix that is number_of_tags wide.
+#
+# * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
+#
+# * The corelation between page A and B is given via this formula:
+# correlation = dot_product(e_A, e_B), where e_A and e_B are
+# resepectively the eigenvalues for the probability matrix A and the
+# probability matrix B.
+#
+
+try:
+ import numpy
+except:
+ print "Error numpy not installed!"
+
+import yaml
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.http import HTTPTest
+
+class domclassArgs(usage.Options):
+ optParameters = [['output', 'o', None, 'Output to write'],
+ ['file', 'f', None, 'Corpus file'],
+ ['fileb', 'b', None, 'Corpus file'],
+ ['urls', 'u', None, 'URL List'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+# All HTML4 tags
+# XXX add link to W3C page where these came from
+alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
+ 'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+ 'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
+ 'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+ 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
+ 'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+ 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
+ 'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+ 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+ 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
+
+# Reduced subset of only the most common tags
+commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+ 'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
+ 'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+ 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
+ 'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+ 'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
+ 'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+ 'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+ 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
+
+# The tags we are intested in using for our analysis
+thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
+ 'H3', 'H4', 'IFRAME ', 'INPUT',
+ 'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
+ 'STYLE', 'TR']
+
+def compute_probability_matrix(dataset):
+ """
+ Compute the probability matrix based on the input dataset.
+
+ :dataset: an array of pairs representing the parent child relationships.
+ """
+ import itertools
+ ret = {}
+ matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
+
+ for data in dataset:
+ x = data[0].upper()
+ y = data[1].upper()
+ try:
+ x = thetags.index(x)
+ except:
+ x = len(thetags)
+
+ try:
+ y = thetags.index(y)
+ except:
+ y = len(thetags)
+
+ matrix[x,y] += 1
+
+ for x in xrange(len(thetags) + 1):
+ possibilities = 0
+ for y in matrix[x]:
+ possibilities += y
+
+ for i in xrange(len(matrix[x])):
+ if possibilities != 0:
+ matrix[x][i] = matrix[x][i]/possibilities
+
+ return matrix
+
+def compute_eigenvalues(matrix):
+ """
+ Returns the eigenvalues of the supplied square matrix.
+
+ :matrix: must be a square matrix and diagonalizable.
+ """
+ return numpy.linalg.eigvals(matrix)
+
+def readDOM(content=None, filename=None):
+ """
+ Parses the DOM of the HTML page and returns an array of parent, child
+ pairs.
+
+ :content: the content of the HTML page to be read.
+
+ :filename: the filename to be read from for getting the content of the
+ page.
+ """
+ from bs4 import BeautifulSoup
+
+ if filename:
+ f = open(filename)
+ content = ''.join(f.readlines())
+ f.close()
+
+ dom = BeautifulSoup(content)
+ couples = []
+ for x in dom.findAll():
+ couples.append((str(x.parent.name), str(x.name)))
+
+ return couples
+
+class domclassTest(HTTPTest):
+ implements(IPlugin, ITest)
+
+ shortName = "domclass"
+ description = "domclass"
+ requirements = None
+ options = domclassArgs
+ blocking = False
+
+ follow_redirects = True
+ #tool = True
+
+ def runTool(self):
+ site_a = readDOM(filename=self.local_options['file'])
+ site_b = readDOM(filename=self.local_options['fileb'])
+ a = {}
+ a['matrix'] = compute_probability_matrix(site_a)
+ a['eigen'] = compute_eigenvalues(a['matrix'])
+
+ self.result['eigenvalues'] = a['eigen']
+ b = {}
+ b['matrix'] = compute_probability_matrix(site_b)
+ b['eigen'] = compute_eigenvalues(b['matrix'])
+
+ #print "A: %s" % a
+ #print "B: %s" % b
+ correlation = numpy.vdot(a['eigen'],b['eigen'])
+ correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
+ correlation = (correlation + 1)/2
+ print "Corelation: %s" % correlation
+ self.end()
+ return a
+
+ def processResponseBody(self, data):
+ site_a = readDOM(data)
+ #site_b = readDOM(self.local_options['fileb'])
+ a = {}
+ a['matrix'] = compute_probability_matrix(site_a)
+ a['eigen'] = compute_eigenvalues(a['matrix'])
+
+
+ if len(data) == 0:
+ self.result['eigenvalues'] = None
+ self.result['matrix'] = None
+ else:
+ self.result['eigenvalues'] = a['eigen']
+ #self.result['matrix'] = a['matrix']
+ #self.result['content'] = data[:200]
+ #b = compute_matrix(site_b)
+ print "A: %s" % a
+ return a['eigen']
+
+ def load_assets(self):
+ if self.local_options:
+ if self.local_options['file']:
+ self.tool = True
+ return {}
+ elif self.local_options['urls']:
+ return {'url': Asset(self.local_options['urls'])}
+ else:
+ self.end()
+ return {}
+ else:
+ return {}
+
+#domclass = domclassTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/dropin.cache b/old-to-be-ported-code/old-api/dropin.cache
new file mode 100755
index 0000000..65c2187
--- /dev/null
+++ b/old-to-be-ported-code/old-api/dropin.cache
@@ -0,0 +1,243 @@
+(dp1
+S'tcpconnect'
+p2
+ccopy_reg
+_reconstructor
+p3
+(ctwisted.plugin
+CachedDropin
+p4
+c__builtin__
+object
+p5
+NtRp6
+(dp7
+S'moduleName'
+p8
+S'ooni.plugins.tcpconnect'
+p9
+sS'description'
+p10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p11
+sS'plugins'
+p12
+(lp13
+g3
+(ctwisted.plugin
+CachedPlugin
+p14
+g5
+NtRp15
+(dp16
+S'provided'
+p17
+(lp18
+ctwisted.plugin
+IPlugin
+p19
+acooni.plugoo.interface
+ITest
+p20
+asS'dropin'
+p21
+g6
+sS'name'
+p22
+S'tcpconnect'
+p23
+sg10
+NsbasbsS'domclass'
+p24
+g3
+(g4
+g5
+NtRp25
+(dp26
+g8
+S'ooni.plugins.domclass'
+p27
+sg10
+Nsg12
+(lp28
+g3
+(g14
+g5
+NtRp29
+(dp30
+g17
+(lp31
+g19
+ag20
+asg21
+g25
+sg22
+S'domclass'
+p32
+sg10
+NsbasbsS'bridget'
+p33
+g3
+(g4
+g5
+NtRp34
+(dp35
+g8
+S'ooni.plugins.bridget'
+p36
+sg10
+Nsg12
+(lp37
+g3
+(g14
+g5
+NtRp38
+(dp39
+g17
+(lp40
+g19
+ag20
+asg21
+g34
+sg22
+S'bridget'
+p41
+sg10
+S"\n XXX fill me in\n\n :ivar config:\n An :class:`ooni.lib.txtorcon.TorConfig` instance.\n :ivar relays:\n A list of all provided relays to test.\n :ivar bridges:\n A list of all provided bridges to test.\n :ivar socks_port:\n Integer for Tor's SocksPort.\n :ivar control_port:\n Integer for Tor's ControlPort.\n :ivar transport:\n String defining the Tor's ClientTransportPlugin, for testing \n a bridge's pluggable transport functionality.\n :ivar tor_binary:\n Path to the Tor binary to use, e.g. '/usr/sbin/tor'\n "
+p42
+sbasbsS'daphn3'
+p43
+g3
+(g4
+g5
+NtRp44
+(dp45
+g8
+S'plugins.daphn3'
+p46
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p47
+sg12
+(lp48
+g3
+(g14
+g5
+NtRp49
+(dp50
+g17
+(lp51
+g19
+ag20
+asg21
+g44
+sg22
+S'daphn3test'
+p52
+sg10
+NsbasbsS'httpt'
+p53
+g3
+(g4
+g5
+NtRp54
+(dp55
+g8
+S'ooni.plugins.httpt'
+p56
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p57
+sg12
+(lp58
+sbsS'chinatrigger'
+p59
+g3
+(g4
+g5
+NtRp60
+(dp61
+g8
+S'plugins.chinatrigger'
+p62
+sg10
+Nsg12
+(lp63
+g3
+(g14
+g5
+NtRp64
+(dp65
+g17
+(lp66
+g19
+ag20
+asg21
+g60
+sg22
+S'chinatrigger'
+p67
+sg10
+S'\n This test is a OONI based implementation of the C tool written\n by Philipp Winter to engage chinese probes in active scanning.\n\n Example of running it:\n ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap\n '
+p68
+sbasbsS'dnstamper'
+p69
+g3
+(g4
+g5
+NtRp70
+(dp71
+g8
+S'ooni.plugins.dnstamper'
+p72
+sg10
+S'\n dnstamper\n *********\n\n This test resolves DNS for a list of domain names, one per line, in the\n file specified in the ooni-config under the setting "dns_experiment". If\n the file is top-1m.txt, the test will be run using Amazon\'s list of top\n one million domains. The experimental dns servers to query should\n be specified one per line in assets/dns_servers.txt.\n\n The test reports censorship if the cardinality of the intersection of\n the query result set from the control server and the query result set\n from the experimental server is zero, which is to say, if the two sets\n have no matching results whatsoever.\n\n NOTE: This test frequently results in false positives due to GeoIP-based\n load balancing on major global sites such as google, facebook, and\n youtube, etc.\n\n :author: Isis Lovecruft, Arturo Filast\xc3\xb2\n :license: see LICENSE for more details\n\n TODO:\n * Finish porting to twisted\n
* Finish the client.Resolver() subclass and test it\n * Use the DNS tests from captiveportal\n * Use plugoo/reports.py for final data\n'
+p73
+sg12
+(lp74
+g3
+(g14
+g5
+NtRp75
+(dp76
+g17
+(lp77
+g19
+ag20
+asg21
+g70
+sg22
+S'dnstamper'
+p78
+sg10
+S'\n XXX fill me in\n '
+p79
+sbasbsS'blocking'
+p80
+g3
+(g4
+g5
+NtRp81
+(dp82
+g8
+S'plugins.blocking'
+p83
+sg10
+Nsg12
+(lp84
+g3
+(g14
+g5
+NtRp85
+(dp86
+g17
+(lp87
+g19
+ag20
+asg21
+g81
+sg22
+S'blocking'
+p88
+sg10
+Nsbasbs.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/httpt.py b/old-to-be-ported-code/old-api/httpt.py
new file mode 100644
index 0000000..358f1ea
--- /dev/null
+++ b/old-to-be-ported-code/old-api/httpt.py
@@ -0,0 +1,94 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import http
+from ooni.utils import log
+
+class httptArgs(usage.Options):
+ optParameters = [['urls', 'f', None, 'Urls file'],
+ ['url', 'u', 'http://torproject.org/', 'Test single site'],
+ ['resume', 'r', 0, 'Resume at this index'],
+ ['rules', 'y', None, 'Specify the redirect rules file']]
+
+class httptTest(http.HTTPTest):
+ implements(IPlugin, ITest)
+
+ shortName = "httpt"
+ description = "httpt"
+ requirements = None
+ options = httptArgs
+ blocking = False
+
+
+ def testPattern(self, value, pattern, type):
+ if type == 'eq':
+ return value == pattern
+ elif type == 're':
+ import re
+ if re.match(pattern, value):
+ return True
+ else:
+ return False
+ else:
+ return None
+
+ def testPatterns(self, patterns, location):
+ test_result = False
+
+ if type(patterns) == list:
+ for pattern in patterns:
+ test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+ else:
+ test_result |= self.testPattern(location, patterns['value'], patterns['type'])
+
+ return test_result
+
+ def testRules(self, rules, location):
+ result = {}
+ blocked = False
+ for rule, value in rules.items():
+ current_rule = {}
+ current_rule['name'] = value['name']
+ current_rule['patterns'] = value['patterns']
+ current_rule['test'] = self.testPatterns(value['patterns'], location)
+ blocked |= current_rule['test']
+ result[rule] = current_rule
+ result['blocked'] = blocked
+ return result
+
+ def processRedirect(self, location):
+ self.result['redirect'] = None
+ try:
+ rules_file = self.local_options['rules']
+ import yaml
+ rules = yaml.load(open(rules_file))
+ log.msg("Testing rules %s" % rules)
+ redirect = self.testRules(rules, location)
+ self.result['redirect'] = redirect
+ except TypeError:
+ log.msg("No rules file. Got a redirect, but nothing to do.")
+
+
+ def control(self, experiment_result, args):
+ print self.response
+ print self.request
+ # What you return here ends up inside of the report.
+ log.msg("Running control")
+ return {}
+
+ def load_assets(self):
+ if self.local_options and self.local_options['urls']:
+ return {'url': Asset(self.local_options['urls'])}
+ else:
+ return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#httpt = httptTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpconnect.py b/old-to-be-ported-code/old-api/tcpconnect.py
new file mode 100644
index 0000000..7758a9e
--- /dev/null
+++ b/old-to-be-ported-code/old-api/tcpconnect.py
@@ -0,0 +1,65 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from ooni.plugoo.interface import ITest
+from ooni.plugoo.tests import OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+
+class tcpconnectArgs(usage.Options):
+ optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
+ ['resume', 'r', 0, 'Resume at this index']]
+
+class tcpconnectTest(OONITest):
+ implements(IPlugin, ITest)
+
+ shortName = "tcpconnect"
+ description = "tcpconnect"
+ requirements = None
+ options = tcpconnectArgs
+ blocking = False
+
+ def experiment(self, args):
+ try:
+ host, port = args['asset'].split(':')
+ except:
+ raise Exception("Error in parsing asset. Wrong format?")
+ class DummyFactory(Factory):
+ def buildProtocol(self, addr):
+ return Protocol()
+
+ def gotProtocol(p):
+ p.transport.loseConnection()
+ log.msg("Got a connection!")
+ log.msg(str(p))
+ return {'result': True, 'target': [host, port]}
+
+ def gotError(err):
+ log.msg("Had error :(")
+ log.msg(err)
+ return {'result': False, 'target': [host, port]}
+
+ # What you return here gets handed as input to control
+ point = TCP4ClientEndpoint(self.reactor, host, int(port))
+ d = point.connect(DummyFactory())
+ d.addCallback(gotProtocol)
+ d.addErrback(gotError)
+ return d
+
+ def load_assets(self):
+ if self.local_options:
+ return {'asset': Asset(self.local_options['asset'])}
+ else:
+ return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#tcpconnect = tcpconnectTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpscan.py b/old-to-be-ported-code/old-api/tcpscan.py
new file mode 100644
index 0000000..b371c88
--- /dev/null
+++ b/old-to-be-ported-code/old-api/tcpscan.py
@@ -0,0 +1,84 @@
+"""
+ TCP Port Scanner
+ ****************
+
+ Does a TCP connect scan on the IP:port pairs.
+
+"""
+import os
+from gevent import socket
+from datetime import datetime
+import socks
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+__plugoo__ = "TCP Port Scanner"
+__desc__ = "This a test template to be used to build your own tests"
+
+class TCPScanAsset(Asset):
+ """
+ This is the asset that should be used by the Test. It will
+ contain all the code responsible for parsing the asset file
+ and should be passed on instantiation to the test.
+ """
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+
+class TCPScan(Test):
+ """
+ The main Test class
+ """
+
+ def experiment(self, *a, **kw):
+ """
+ Fill this up with the tasks that should be performed
+ on the "dirty" network and should be compared with the
+ control.
+ """
+ addr = kw['data']
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ res = False
+ try:
+ self.logger.debug('Doing a connection to %s' % addr)
+ s.connect((addr.split(':')[0], int(addr.split(':')[1])))
+ res = True
+ except socket.error, msg:
+ self.logger.debug('Connection failed to %s: %s' % (addr, msg))
+
+ finally:
+ s.close()
+
+ return {'Time': datetime.now(),
+ 'Address': addr,
+ 'Status': res}
+
+ def control(self):
+ """
+ Fill this up with the control related code.
+ """
+ return True
+
+def run(ooni, asset=None):
+ """
+ This is the function that will be called by OONI
+ and it is responsible for instantiating and passing
+ the arguments to the Test class.
+ """
+ config = ooni.config
+
+ # This the assets array to be passed to the run function of
+ # the test
+ if asset:
+ assets = [TCPScanAsset(asset)]
+ else:
+ assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
+ "tcpscan.txt"))]
+
+ # Instantiate the Test
+ thetest = TCPScan(ooni)
+ ooni.logger.info("starting TCP Scan...")
+ # Run the test with argument assets
+ thetest.run(assets)
+ ooni.logger.info("finished.")
diff --git a/old-to-be-ported-code/very-old/TODO.plgoons b/old-to-be-ported-code/very-old/TODO.plgoons
new file mode 100644
index 0000000..ace2a10
--- /dev/null
+++ b/old-to-be-ported-code/very-old/TODO.plgoons
@@ -0,0 +1,79 @@
+We should implement the following as plugoons:
+
+dns_plgoo.py - Various DNS checks
+
+As a start - we should perform a known good check against a name or list of
+names. As input, we should take an ip address, a name or a list of names for
+testing; we also take dns servers for experiment or control data. For output we
+emit UDP or TCP packets - we should support proxying these requests when
+possible as is the case with TCP but probably not with UDP for certain DNS
+request types.
+
+http_plgoo.py - Various HTTP checks
+
+We should compare two pages and see if we have identical properties.
+At the very least, we should print the important differences - perhaps
+with a diff like output? We should look for fingerprints in URLS that are
+returned. We should detect 302 re-direction.
+
+As input, we should take an ip address, a name or a list of names for testing;
+we also take a list of headers such as random user agent strings and so on.
+We should emit TCP packets and ensure that we do not leak DNS for connections
+that we expect to proxy to a remote network.
+
+latency_plgoo.py - Measure latency for a host or a list of hosts
+
+As input, we should take an ip address, a name or a list of names for testing;
+We should measure the mean latency from the ooni-probe to the host with various
+traceroute tests. We should also measure the latency between the ooni-probe and
+a given server for any other protocol that is request and response oriented;
+HTTP latency may be calculated by simply tracking the delta between requests
+and responses.
+
+tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
+
+tcptrace_plgoo.py should allow for both stray and in-connection traceroute
+modes.
+
+udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
+- it may also be nice to simply make a random A record request in a DNS packet
+and use it as the payload for a UDP traceroute.
+
+reversetrace_plgoo.py should give a remote host the client's IP and return the
+output of a traceroute to that IP from the remote host. It will need a remote
+component if run against a web server. It would not need a remote component if
+run against route-views - we can simply telnet over Tor and ask it to trace to
+our detected client IP.
+
+keyword_plgoo.py should take a keyword or a list of keywords for use as a
+payload in a varity of protocols. This should be protocol aware - dns keyword
+filtering requires a sniffer to catch stray packets after the censor wins the
+race. HTTP payloads in open connections may be similar and in practice, we'll
+have to find tune it.
+
+icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
+servers. They have dozens of tests and to implement this plgoo, we'll need to
+add many things to ooni. More details here:
+http://netalyzr.icsi.berkeley.edu/faq.html
+http://netalyzr.icsi.berkeley.edu/json/id=example-session
+
+HTML output:
+http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+JSON output:
+http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+Netalyzer log:
+http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
+
+sniffer_plgoo.py - We need a generic method for capturing packets during a full
+run - this may be better as a core ooni-probe feature but we should implement
+packet capture in a plugin if it is done no where else.
+
+nmap_plgoo.py - We should take a list of hosts and run nmap against each of
+these hosts; many hosts are collected during testing and they should be scanned
+with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
+more reasonable.
+
diff --git a/old-to-be-ported-code/very-old/TO_BE_PORTED b/old-to-be-ported-code/very-old/TO_BE_PORTED
new file mode 100644
index 0000000..49ce5e0
--- /dev/null
+++ b/old-to-be-ported-code/very-old/TO_BE_PORTED
@@ -0,0 +1,14 @@
+
+The tests in this directory are very old, and have neither been ported to
+Twisted, nor to the new twisted.trial API framework. Although, they are not
+old in the sense of the *seriously old* OONI code which was written two years
+ago.
+
+These tests should be updated at least to use Twisted.
+
+If you want to hack on something care free, feel free to mess with these files
+because it would be difficult to not improve on them.
+
+<(A)3
+isis
+0x2cdb8b35
diff --git a/old-to-be-ported-code/very-old/ooni-probe.diff b/old-to-be-ported-code/very-old/ooni-probe.diff
new file mode 100644
index 0000000..fc61d3f
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni-probe.diff
@@ -0,0 +1,358 @@
+diff --git a/TODO b/TODO
+index c2e19af..51fa559 100644
+--- a/TODO
++++ b/TODO
+@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
+ Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
+
+ http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&pas…
++
++VIA Rail Via header:
++
++HTTP/1.0 301 Moved Permanently
++Location: http://www.google.com/
++Content-Type: text/html; charset=UTF-8
++Date: Sat, 23 Jul 2011 02:21:30 GMT
++Expires: Mon, 22 Aug 2011 02:21:30 GMT
++Cache-Control: public, max-age=2592000
++Server: gws
++Content-Length: 219
++X-XSS-Protection: 1; mode=block
++X-Cache: MISS from cache_server
++X-Cache-Lookup: MISS from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Connection: close
++
++<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
++<TITLE>301 Moved</TITLE></HEAD><BODY>
++<H1>301 Moved</H1>
++The document has moved
++<A HREF="http://www.google.com/">here</A>.
++</BODY></HTML>
++
++
++blocked site:
++
++HTTP/1.0 302 Moved Temporarily
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:17 GMT
++Content-Length: 0
++Location: http://10.66.66.66/denied.html
++
++invalid request response:
++
++$ nc 8.8.8.8 80
++hjdashjkdsahjkdsa
++HTTP/1.0 400 Bad Request
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:44 GMT
++Content-Type: text/html
++Content-Length: 1178
++Expires: Sat, 23 Jul 2011 02:22:44 GMT
++X-Squid-Error: ERR_INVALID_REQ 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to process the request:
++<PRE>
++hjdashjkdsahjkdsa
++
++</PRE>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Invalid Request
++</STRONG>
++</UL>
++
++<P>
++Some aspect of the HTTP Request is invalid. Possible problems:
++<UL>
++<LI>Missing or unknown request method
++<LI>Missing URL
++<LI>Missing HTTP Identifier (HTTP/1.0)
++<LI>Request is too large
++<LI>Content-Length missing for POST or PUT requests
++<LI>Illegal character in hostname; underscores are not allowed
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>.
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++nc 10.66.66.66 80
++GET cache_object://localhost/info HTTP/1.0
++HTTP/1.0 403 Forbidden
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:25:56 GMT
++Content-Type: text/html
++Content-Length: 1061
++Expires: Sat, 23 Jul 2011 02:25:56 GMT
++X-Squid-Error: ERR_ACCESS_DENIED 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to retrieve the URL:
++<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Access Denied.
++</STRONG>
++<P>
++Access control configuration prevents your request from
++being allowed at this time. Please contact your service provider if
++you feel this is incorrect.
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>.
++
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++
+diff --git a/ooni/command.py b/ooni/command.py
+index 361190f..df1a58c 100644
+--- a/ooni/command.py
++++ b/ooni/command.py
+@@ -13,6 +13,7 @@ import ooni.captive_portal
+ import ooni.namecheck
+ import ooni.dns_poisoning
+ import ooni.dns_cc_check
++import ooni.transparenthttp
+
+ class Command():
+ def __init__(self, args):
+@@ -48,6 +49,15 @@ class Command():
+ help="run captiveportal tests"
+ )
+
++ # --transhttp
++ def cb_transhttp(option, opt, value, oparser):
++ self.action = opt[2:]
++ optparser.add_option(
++ "--transhttp",
++ action="callback", callback=cb_transhttp,
++ help="run Transparent HTTP tests"
++ )
++
+ # --dns
+ def cb_dnstests(option, opt, value, oparser):
+ self.action = opt[2:]
+@@ -122,7 +132,7 @@ class Command():
+ if (not self.action):
+ raise optparse.OptionError(
+ 'is required',
+- '--dns | --dnsbulk | --captiveportal | --help | --version'
++ '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
+ )
+
+ except optparse.OptionError, err:
+@@ -138,6 +148,10 @@ class Command():
+ captive_portal = ooni.captive_portal.CaptivePortal
+ captive_portal(self).main()
+
++ def transhttp(self):
++ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
++ transparent_http(self).main()
++
+ def dns(self):
+ dnstests = ooni.namecheck.DNS
+ dnstests(self).main()
+diff --git a/ooni/dns.py b/ooni/dns.py
+index 95da6ef..90d50bd 100644
+--- a/ooni/dns.py
++++ b/ooni/dns.py
+@@ -8,7 +8,7 @@ from socket import gethostbyname
+ import ooni.common
+
+ # apt-get install python-dns
+-import DNS
++import dns
+ import random
+
+ """ Wrap gethostbyname """
+diff --git a/ooni/http.py b/ooni/http.py
+index 62365bb..bb72001 100644
+--- a/ooni/http.py
++++ b/ooni/http.py
+@@ -7,8 +7,14 @@
+ from socket import gethostbyname
+ import ooni.common
+ import urllib2
++import httplib
++from urlparse import urlparse
++from pprint import pprint
+ import pycurl
++import random
++import string
+ import re
++from BeautifulSoup import BeautifulSoup
+
+ # By default, we'll be Torbutton's UA
+ default_ua = { 'User-Agent' :
+@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
+ default_proxy_host = "127.0.0.1"
+ default_proxy_port = "9050"
+
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
++#class HTTPResponse(object):
++# def __init__(self):
+
+
+ """A very basic HTTP fetcher that uses Tor by default and returns a curl
+@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
+ http_code = getinfo(pycurl.HTTP_CODE)
+ return response, http_code
+
+-"""A very basic HTTP fetcher that returns a urllib3 response object."""
++"""A very basic HTTP fetcher that returns a urllib2 response object."""
+ def http_fetch(url,
+ headers= default_ua,
+ label="generic HTTP fetch"):
+@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
+ else:
+ return True
+
++def http_request(self, method, url, path=None):
++ """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
++ purl = urlparse(url)
++ host = purl.netloc
++ conn = httplib.HTTPConnection(host, 80)
++ if path is None:
++ path = purl.path
++ conn.request(method, purl.path)
++ response = conn.getresponse()
++ headers = dict(response.getheaders())
++ self.headers = headers
++ self.data = response.read()
++ return True
++
++def search_headers(self, s_headers, url):
++ if http_request(self, "GET", url):
++ headers = self.headers
++ else:
++ return None
++ result = {}
++ for h in s_headers.items():
++ result[h[0]] = h[0] in headers
++ return result
++
++def http_header_match_dict(experimental_url, dict_header):
++ result = {}
++ url_header = http_get_header_dict(experimental_url)
++
++# XXX for testing
++# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
++
++def search_squid_headers(self):
++ url = "http://securityfocus.org/blabla"
++ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
++ ret = search_headers(self, s_headers, url)
++ for i in ret.items():
++ if i[1] is True:
++ return False
++ return True
++
++def random_bad_request(self):
++ url = "http://securityfocus.org/blabla"
++ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
++ if http_request(self, r_str, url):
++ return True
++ else:
++ return None
++
++def squid_search_bad_request(self):
++ if random_bad_request(self):
++ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
++ for i in s_headers.items():
++ if i[0] in self.headers:
++ return False
++ return True
++ else:
++ return None
++
++def squid_cacheobject_request(self):
++ url = "http://securityfocus.org/blabla"
++ if http_request(self, "GET", url, "cache_object://localhost/info"):
++ soup = BeautifulSoup(self.data)
++ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
++ return False
++ else:
++ return True
++ else:
++ return None
++
++
+ def MSHTTP_CP_Tests(self):
+ experiment_url = "http://www.msftncsi.com/ncsi.txt"
+ expectedResponse = "Microsoft NCSI" # Only this - nothing more
+@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
+
+ # Google ChromeOS fetches this url in guest mode
+ # and they expect the user to authenticate
+- def googleChromeOSHTTPTest(self):
+- print "noop"
+- #url = "http://www.google.com/"
++def googleChromeOSHTTPTest(self):
++ print "noop"
++ #url = "http://www.google.com/"
++
++def SquidHeader_TransparentHTTP_Tests(self):
++ return search_squid_headers(self)
++
++def SquidBadRequest_TransparentHTTP_Tests(self):
++ squid_cacheobject_request(self)
++ return squid_search_bad_request(self)
++
++def SquidCacheobject_TransparentHTTP_Tests(self):
++ return squid_cacheobject_request(self)
++
++
diff --git a/old-to-be-ported-code/very-old/ooni/#namecheck.py# b/old-to-be-ported-code/very-old/ooni/#namecheck.py#
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/#namecheck.py#
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = args.randomize
+
+ def DNS_Tests(self):
+ print "DNS tampering detection:"
+ filter_name = "_DNS_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/.DS_Store b/old-to-be-ported-code/very-old/ooni/.DS_Store
new file mode 100644
index 0000000..f5738a5
Binary files /dev/null and b/old-to-be-ported-code/very-old/ooni/.DS_Store differ
diff --git a/old-to-be-ported-code/very-old/ooni/__init__.py b/old-to-be-ported-code/very-old/ooni/__init__.py
new file mode 100644
index 0000000..8f1b96e
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/__init__.py
@@ -0,0 +1,12 @@
+"""\
+This is your package, 'ooni'.
+
+It was provided by the package, `package`.
+
+Please change this documentation, and write this module!
+"""
+
+__version__ = '0.0.1'
+
+# If you run 'make test', this is your failing test.
+# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/old-to-be-ported-code/very-old/ooni/command.py b/old-to-be-ported-code/very-old/ooni/command.py
new file mode 100644
index 0000000..e5f8f9f
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/command.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8
+"""\
+Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
+"""
+
+import os
+import sys
+import re
+import optparse
+
+# Only include high level ooni tests at this time
+import ooni.captive_portal
+import ooni.namecheck
+import ooni.dns_poisoning
+import ooni.dns_cc_check
+import ooni.transparenthttp
+import ooni.helpers
+import ooni.plugooni
+import ooni.input
+
+class Command():
+ def __init__(self, args):
+ sys.argv = sys.argv[0:1]
+ sys.argv.extend(args)
+ self.startup_options()
+
+ def startup_options(self):
+ self.action = None
+ self.from_ = None
+ self.to = None
+ self.parser = None
+ self.emitter = None
+ self.emit_header = None
+ self.emit_trailer = None
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = True
+ self.cc = None
+ self.hostname = None
+ self.listfile = None
+ self.listplugooni = False
+ self.plugin_name = "all"
+ self.controlproxy = None # "socks4a://127.0.0.1:9050/"
+ self.experimentproxy = None
+
+ usage = """
+
+ 'ooni' is the Open Observatory of Network Interference
+
+ command line usage: ooni-probe [options]"""
+
+ optparser = optparse.OptionParser(usage=usage)
+
+ # --plugin
+ def cb_plugin(option, opt, value, oparser):
+ self.action = opt[2:]
+ self.plugin_name = str(value)
+ optparser.add_option(
+ "--plugin", type="string",
+ action="callback", callback=cb_plugin,
+ help="run the Plugooni plgoo plugin specified"
+ )
+
+ # --listplugins
+ def cb_list_plugins(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--listplugins",
+ action="callback", callback=cb_list_plugins,
+ help="list available Plugooni as plgoos plugin names"
+ )
+
+ # --captiveportal
+ def cb_captiveportal(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--captiveportal",
+ action="callback", callback=cb_captiveportal,
+ help="run vendor emulated captiveportal tests"
+ )
+
+ # --transhttp
+ def cb_transhttp(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--transhttp",
+ action="callback", callback=cb_transhttp,
+ help="run Transparent HTTP tests"
+ )
+
+ # --dns
+ def cb_dnstests(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dns",
+ action="callback", callback=cb_dnstests,
+ help="run fixed generic dns tests"
+ )
+
+ # --dnsbulk
+ def cb_dnsbulktests(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dnsbulk",
+ action="callback", callback=cb_dnsbulktests,
+ help="run bulk DNS tests in random.shuffle() order"
+ )
+
+ # --dns-cc-check
+ def cb_dnscccheck(option, opt, value, oparser):
+ self.action = opt[2:]
+ optparser.add_option(
+ "--dnscccheck",
+ action="callback", callback=cb_dnscccheck,
+ help="run cc specific bulk DNS tests in random.shuffle() order"
+ )
+
+ # --cc [country code]
+ def cb_cc(option, opt, value, optparser):
+ # XXX: We should check this against a list of supported county codes
+ # and then return the matching value from the list into self.cc
+ self.cc = str(value)
+ optparser.add_option(
+ "--cc", type="string",
+ action="callback", callback=cb_cc,
+ help="set a specific county code -- default is None",
+ )
+
+ # --list [url/hostname/ip list in file]
+ def cb_list(option, opt, value, optparser):
+ self.listfile = os.path.expanduser(value)
+ if not os.path.isfile(self.listfile):
+ print "Wrong file '" + value + "' in --list."
+ sys.exit(1)
+ optparser.add_option(
+ "--list", type="string",
+ action="callback", callback=cb_list,
+ help="file to read from -- default is None",
+ )
+
+ # --url [url/hostname/ip]
+ def cb_host(option, opt, value, optparser):
+ self.hostname = str(value)
+ optparser.add_option(
+ "--url", type="string",
+ action="callback", callback=cb_host,
+ help="set URL/hostname/IP for use in tests -- default is None",
+ )
+
+ # --controlproxy [scheme://host:port]
+ def cb_controlproxy(option, opt, value, optparser):
+ self.controlproxy = str(value)
+ optparser.add_option(
+ "--controlproxy", type="string",
+ action="callback", callback=cb_controlproxy,
+ help="proxy to be used as a control -- default is None",
+ )
+
+ # --experimentproxy [scheme://host:port]
+ def cb_experimentproxy(option, opt, value, optparser):
+ self.experimentproxy = str(value)
+ optparser.add_option(
+ "--experimentproxy", type="string",
+ action="callback", callback=cb_experimentproxy,
+ help="proxy to be used for experiments -- default is None",
+ )
+
+
+
+ # --randomize
+ def cb_randomize(option, opt, value, optparser):
+ self.randomize = bool(int(value))
+ optparser.add_option(
+ "--randomize", type="choice",
+ choices=['0', '1'], metavar="0|1",
+ action="callback", callback=cb_randomize,
+ help="randomize host order -- default is on",
+ )
+
+ # XXX TODO:
+ # pause/resume scans for dns_BULK_DNS_Tests()
+ # setting of control/experiment resolver
+ # setting of control/experiment proxy
+ #
+
+ def cb_version(option, opt, value, oparser):
+ self.action = 'version'
+ optparser.add_option(
+ "-v", "--version",
+ action="callback", callback=cb_version,
+ help="print ooni-probe version"
+ )
+
+ # parse options
+ (opts, args) = optparser.parse_args()
+
+ # validate options
+ try:
+ if (args):
+ raise optparse.OptionError('extra arguments found', args)
+ if (not self.action):
+ raise optparse.OptionError(
+ 'RTFS', 'required arguments missing'
+ )
+
+ except optparse.OptionError, err:
+ sys.stderr.write(str(err) + '\n\n')
+ optparser.print_help()
+ sys.exit(1)
+
+ def version(self):
+ print """
+ooni-probe pre-alpha
+Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
+See: https://www.torproject.org/ooni/
+
+"""
+
+ def run(self):
+ getattr(self, self.action)()
+
+ def plugin(self):
+ plugin_run = ooni.plugooni.Plugooni
+ plugin_run(self).run(self)
+
+ def listplugins(self):
+ plugin_run = ooni.plugooni.Plugooni
+ plugin_run(self).list_plugoons()
+
+ def captiveportal(self):
+ captive_portal = ooni.captive_portal.CaptivePortal
+ captive_portal(self).main()
+
+ def transhttp(self):
+ transparent_http = ooni.transparenthttp.TransparentHTTPProxy
+ transparent_http(self).main()
+
+ def dns(self):
+ dnstests = ooni.namecheck.DNS
+ dnstests(self).main()
+
+ def dnsbulk(self):
+ dnstests = ooni.dns_poisoning.DNSBulk
+ dnstests(self).main()
+
+ def dnscccheck(self):
+ dnstests = ooni.dns_cc_check.DNSBulk
+ dnstests(self).main()
+
diff --git a/old-to-be-ported-code/very-old/ooni/dns_poisoning.py b/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
new file mode 100644
index 0000000..939391e
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+class DNSBulk():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.randomize = args.randomize
+ self.debug = False
+
+ def DNS_Tests(self):
+ print "DNS tampering detection for list of domains:"
+ filter_name = "_DNS_BULK_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/dnsooni.py b/old-to-be-ported-code/very-old/ooni/dnsooni.py
new file mode 100644
index 0000000..bfdfe51
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/dnsooni.py
@@ -0,0 +1,356 @@
+#!/usr/bin/env python
+#
+# DNS support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+
+from socket import gethostbyname
+import ooni.common
+
+# requires python-dns
+# (pydns.sourceforge.net)
+try:
+ import DNS
+# Mac OS X needs this
+except:
+ try:
+ import dns as DNS
+ except:
+ pass # Never mind, let's break later.
+import random
+from pprint import pprint
+
+""" Wrap gethostbyname """
+def dns_resolve(hostname):
+ try:
+ resolved_host = gethostbyname(hostname)
+ return resolved_host
+ except:
+ return False
+
+"""Perform a resolution on test_hostname and compare it with the expected
+ control_resolved ip address. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True; otherwise
+ it returns False.
+"""
+def dns_resolve_match(experiment_hostname, control_resolved,
+ label="generic DNS comparison"):
+ experiment_resolved = dns_resolve(experiment_hostname)
+ if experiment_resolved == False:
+ return None
+ if experiment_resolved:
+ if str(experiment_resolved) != str(control_resolved):
+ print label + " control " + str(control_resolved) + " data does not " \
+ "match experiment response: " + str(experiment_resolved)
+ return False
+ return True
+
+def generic_DNS_resolve(experiment_hostname, experiment_resolver):
+ if experiment_resolver == None:
+ req = DNS.Request(name=experiment_hostname) # local resolver
+ else:
+ req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
+ resolved_data = req.req().answers
+ return resolved_data
+
+""" Return a list of all known censors. """
+def load_list_of_known_censors(known_proxy_file=None):
+ proxyfile = "proxy-lists/ips.txt"
+ known_proxy_file = open(proxyfile, 'r', 1)
+ known_proxy_list = []
+ for known_proxy in known_proxy_file.readlines():
+ known_proxy_list.append(known_proxy)
+ known_proxy_file.close()
+ known_proxy_count = len(known_proxy_list)
+ print "Loading " + str(known_proxy_count) + " known proxies..."
+ return known_proxy_list, known_proxy_count
+
+def load_list_of_test_hosts(hostfile=None):
+ if hostfile == None:
+ hostfile="censorship-lists/norwegian-dns-blacklist.txt"
+ host_list_file = open(hostfile, 'r', 1)
+ host_list = []
+ for host_name in host_list_file.readlines():
+ if host_name.isspace():
+ continue
+ else:
+ host_list.append(host_name)
+ host_list_file.close()
+ host_count = len(host_list)
+ #print "Loading " + str(host_count) + " test host names..."
+ return host_list, host_count
+
+""" Return True with a list of censors if we find a known censor from
+ known_proxy_list in the experiment_data DNS response. Otherwise return
+ False and None. """
+def contains_known_censors(known_proxy_list, experiment_data):
+ match = False
+ proxy_list = []
+ for answer in range(len(experiment_data)):
+ for known_proxy in known_proxy_list:
+ if answer == known_proxy:
+ print "CONFLICT: known proxy discovered: " + str(known_proxy),
+ proxy_list.append(known_proxy)
+ match = True
+ return match, proxy_list
+
+""" Return True and the experiment response that failed to match."""
+def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
+ known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
+ conflict_list = []
+ conflict = False
+ if known_proxy_found:
+ print "known proxy discovered: " + str(known_proxies)
+ for answer in range(len(control_data)):
+ if control_data[answer]['data'] == experiment_data:
+ print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
+ continue
+ else:
+ conflict = True
+ conflict_list.append(experiment_data)
+ #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
+ return conflict, conflict_list
+
+def dns_DNS_BULK_Tests(self, hostfile=None,
+ known_good_resolver="8.8.8.8", test_resolver=None):
+ tampering = False # By default we'll pretend the internet is nice
+ tampering_list = []
+ host_list, host_count = load_list_of_test_hosts()
+ known_proxies, proxy_count = load_list_of_known_censors()
+ check_count = 1
+ if test_resolver == None:
+ DNS.ParseResolvConf() # Set the local resolver as our default
+ if self.randomize:
+ random.shuffle(host_list) # This makes our list non-sequential for now
+ for host_name in host_list:
+ host_name = host_name.strip()
+ print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+ print "Resolving with control resolver..."
+ print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
+ print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
+ # XXX TODO - we need to keep track of the status of these requests and then resume them
+ while True:
+ try:
+ control_data = generic_DNS_resolve(host_name, known_good_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "control resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Resolving with experiment resolver..."
+ while True:
+ try:
+ experiment_data = generic_DNS_resolve(host_name, test_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "experiment resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Comparing control and experiment...",
+ tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
+ if tampering:
+ tampering_list.append(conflicts)
+ print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+ check_count = check_count + 1
+ host_list.close()
+ return tampering
+
+""" Attempt to resolve random_hostname and return True and None if empty. If an
+ address is returned we return False and the returned address.
+"""
+def dns_response_empty(random_hostname):
+ response = dns_resolve(random_hostname)
+ if response == False:
+ return True, None
+ return False, response
+
+def dns_multi_response_empty(count, size):
+ for i in range(count):
+ randName = ooni.common._randstring(size)
+ response_empty, response_ip = dns_response_empty(randName)
+ if response_empty == True and response_ip == None:
+ responses_are_empty = True
+ else:
+ print label + " " + randName + " found with value " + str(response_ip)
+ responses_are_empty = False
+ return responses_are_empty
+
+""" Attempt to resolve one random host name per tld in tld_list where the
+ hostnames are random strings with a length between min_length and
+ max_length. Return True if list is empty, otherwise return False."""
+def dns_list_empty(tld_list, min_length, max_length,
+ label="generic DNS list test"):
+ for tld in tld_list:
+ randName = ooni.common._randstring(min_length, max_length) + tld
+ response_empty, response_ip = dns_response_empty(randName)
+ return response_empty
+
+# Known bad test
+# Test for their DNS breakage and their HTTP MITM
+# "Family Shield" is 208.67.222.123 and 208.67.220.123
+# returns 67.215.65.130 for filtered sites like kink.com
+# block.opendns.com is a block page where users are redirected
+# 208.67.216.135 208.67.217.135 are the block pages currently point
+# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
+# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
+# whatever the HOST header says
+# Amusingly - their Server header is: "OpenDNS Guide"
+""" Return True if we are not being directed as known OpenDNS block pages."""
+def OpenDNS_DNS_Tests(self):
+ return OpenDNS_Censorship_DNS_TESTS(self)
+ return OpenDNS_NXDomain_DNS_TESTS(self)
+
+def OpenDNS_Censorship_DNS_TESTS(self):
+ known_filter = "67.215.65.130"
+ randName = ooni.common._randstring(10)
+ redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
+ if redirected:
+ return False
+ else:
+ return True
+
+def OpenDNS_NXDomain_DNS_TESTS(self):
+ known_filter = "67.215.65.132"
+ randName = ooni.common._randstring(10)
+ redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
+ if redirected:
+ return False
+ else:
+ return True
+
+"""Returns True if the experiment_url returns the well known Italian block page."""
+def cc_DNS_Tests_it(self):
+ tampering = False # By default we'll pretend the internet is nice
+ tampering_list = []
+ conflicts = []
+ known_good_resolver = "8.8.8.8"
+ host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
+ known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
+ known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
+
+ check_count = 1
+ DNS.ParseResolvConf()
+ # Set the local resolver as our default
+ if self.randomize:
+ random.shuffle(host_list) # This makes our list non-sequential for now
+ print "We're testing (" + str(host_count) + ") URLs"
+ print "We're looking for (" + str(known_block_count) + ") block pages"
+ print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
+ for test_resolver in known_censoring_resolvers:
+ test_resolver = test_resolver.strip()
+ for host_name in host_list:
+ host_name = host_name.strip()
+ print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+ print "Testing " + host_name + " with control resolver: " + known_good_resolver
+ print "Testing " + host_name + " with experiment resolver: " + test_resolver
+ while True:
+ try:
+ control_data = generic_DNS_resolve(host_name, known_good_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "control resolver appears to be failing..."
+ break
+ except:
+ print "Timeout; looping!"
+ continue
+
+ while True:
+ try:
+ experiment_data = generic_DNS_resolve(host_name, test_resolver)
+ break
+ except KeyboardInterrupt:
+ print "bailing out..."
+ exit()
+ except DNS.Base.DNSError:
+ print "experiment resolver appears to be failing..."
+ continue
+ except:
+ print "Timeout; looping!"
+ continue
+
+ print "Comparing control and experiment...",
+ tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
+ if tampering:
+ tampering_list.append(conflicts)
+ print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+ check_count = check_count + 1
+
+ host_list.close()
+ return tampering
+
+
+## XXX TODO
+## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
+
+""" Returns True if the experiment_host returns a well known Australian filter
+ IP address."""
+def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
+ # http://www.robtex.com/ip/61.88.88.88.html
+ # http://requests.optus.net.au/dns/
+ known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
+ known_censoring_resolvers = ["61.88.88.88"] # Optus
+ for resolver in known_censoring_resolvers:
+ blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
+ if blocked:
+ return True
+
+"""Returns True if experiment_hostname as resolved by experiment_resolver
+ resolves to control_data. Returns False if there is no match or None if the
+ attempt fails."""
+def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
+ control_data):
+ req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
+ resolved_data = s.req().answers
+ for answer in range(len(resolved_data)):
+ if resolved_data[answer]['data'] == control_data:
+ return True
+ return False
+
+# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
+# details
+""" Return True if Tor would consider the network fine; False if it's hostile
+ and has no signs of DNS tampering. """
+def Tor_DNS_Tests(self):
+ response_rfc2606_empty = RFC2606_DNS_Tests(self)
+ tor_tld_list = ["", ".com", ".org", ".net"]
+ response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
+ return response_tor_empty | response_rfc2606_empty
+
+""" Return True if RFC2606 would consider the network hostile; False if it's all
+ clear and has no signs of DNS tampering. """
+def RFC2606_DNS_Tests(self):
+ tld_list = [".invalid", ".test"]
+ return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
+
+""" Return True if googleChromeDNSTest would consider the network OK."""
+def googleChrome_CP_Tests(self):
+ maxGoogleDNSTests = 3
+ GoogleDNSTestSize = 10
+ return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
+ GoogleDNSTestSize)
+def googleChrome_DNS_Tests(self):
+ return googleChrome_CP_Tests(self)
+
+""" Return True if MSDNSTest would consider the network OK."""
+def MSDNS_CP_Tests(self):
+ experimentHostname = "dns.msftncsi.com"
+ expectedResponse = "131.107.255.255"
+ return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
+
+def MSDNS_DNS_Tests(self):
+ return MSDNS_CP_Tests(self)
diff --git a/old-to-be-ported-code/very-old/ooni/helpers.py b/old-to-be-ported-code/very-old/ooni/helpers.py
new file mode 100644
index 0000000..514e65f
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/helpers.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+
+import ooni.common
+import pycurl
+import random
+import zipfile
+import os
+from xml.dom import minidom
+try:
+ from BeautifulSoup import BeautifulSoup
+except:
+ pass # Never mind, let's break later.
+
+def get_random_url(self):
+ filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
+ fp = zipfile.ZipFile(filepath, "r")
+ fp.open("top-1m.csv")
+ content = fp.read("top-1m.csv")
+ return "http://" + random.choice(content.split("\n")).split(",")[1]
+
+"""Pick a random header and use that for the request"""
+def get_random_headers(self):
+ filepath = os.getcwd() + "/test-lists/whatheaders.xml"
+ headers = []
+ content = open(filepath, "r").read()
+ soup = BeautifulSoup(content)
+ measurements = soup.findAll('measurement')
+ i = random.randint(0,len(measurements))
+ for vals in measurements[i].findAll('header'):
+ name = vals.find('name').string
+ value = vals.find('value').string
+ if name != "host":
+ headers.append((name, value))
+ return headers
diff --git a/old-to-be-ported-code/very-old/ooni/http.py b/old-to-be-ported-code/very-old/ooni/http.py
new file mode 100644
index 0000000..59e2abb
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/http.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+#
+
+from socket import gethostbyname
+import ooni.common
+import ooni.helpers
+import ooni.report
+import urllib2
+import httplib
+from urlparse import urlparse
+from pprint import pprint
+import pycurl
+import random
+import string
+import re
+from pprint import pprint
+try:
+ from BeautifulSoup import BeautifulSoup
+except:
+ pass # Never mind, let's break later.
+
+# By default, we'll be Torbutton's UA
+default_ua = { 'User-Agent' :
+ 'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
+
+# Use pycurl to connect over a proxy
+PROXYTYPE_SOCKS5 = 5
+default_proxy_type = PROXYTYPE_SOCKS5
+default_proxy_host = "127.0.0.1"
+default_proxy_port = "9050"
+
+#class HTTPResponse(object):
+# def __init__(self):
+
+
+"""A very basic HTTP fetcher that uses Tor by default and returns a curl
+ object."""
+def http_proxy_fetch(url, headers, proxy_type=5,
+ proxy_host="127.0.0.1",
+ proxy_port=9050):
+ request = pycurl.Curl()
+ request.setopt(pycurl.PROXY, proxy_host)
+ request.setopt(pycurl.PROXYPORT, proxy_port)
+ request.setopt(pycurl.PROXYTYPE, proxy_type)
+ request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
+ request.setopt(pycurl.URL, url)
+ response = request.perform()
+ http_code = getinfo(pycurl.HTTP_CODE)
+ return response, http_code
+
+"""A very basic HTTP fetcher that returns a urllib2 response object."""
+def http_fetch(url,
+ headers= default_ua,
+ label="generic HTTP fetch"):
+ request = urllib2.Request(url, None, headers)
+ response = urllib2.urlopen(request)
+ return response
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+ control_result. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True with the http
+ status code; otherwise it returns False.
+"""
+def http_content_match(experimental_url, control_result,
+ headers= { 'User-Agent' : default_ua },
+ label="generic HTTP content comparison"):
+ request = urllib2.Request(experimental_url, None, headers)
+ response = urllib2.urlopen(request)
+ responseContents = response.read()
+ responseCode = response.code
+ if responseContents != False:
+ if str(responseContents) != str(control_result):
+ print label + " control " + str(control_result) + " data does not " \
+ "match experiment response: " + str(responseContents)
+ return False, responseCode
+ return True, responseCode
+ else:
+ print "HTTP connection appears to have failed"
+ return False, False
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+ control_result as a regex. Optionally, a label may be set to customize
+ output. If the experiment matches the control, this returns True with the HTTP
+ status code; otherwise it returns False.
+"""
+def http_content_fuzzy_match(experimental_url, control_result,
+ headers= { 'User-Agent' : default_ua },
+ label="generic HTTP content comparison"):
+ request = urllib2.Request(experimental_url, None, headers)
+ response = urllib2.urlopen(request)
+ responseContents = response.read()
+ responseCode = response.code
+ pattern = re.compile(control_result)
+ match = pattern.search(responseContents)
+ if responseContents != False:
+ if not match:
+ print label + " control " + str(control_result) + " data does not " \
+ "match experiment response: " + str(responseContents)
+ return False, responseCode
+ return True, responseCode
+ else:
+ print "HTTP connection appears to have failed"
+ return False, False
+
+"""Compare two HTTP status codes as integers and return True if they match."""
+def http_status_code_match(experiment_code, control_code):
+ if int(experiment_code) != int(control_code):
+ return False
+ return True
+
+"""Compare two HTTP status codes as integers and return True if they don't match."""
+def http_status_code_no_match(experiment_code, control_code):
+ if http_status_code_match(experiment_code, control_code):
+ return False
+ return True
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it matches, False if it does not."""
+def http_header_match(experiment_url, control_header, control_result):
+ response = http_fetch(url, label=label)
+ remote_header = response.get_header(control_header)
+ if str(remote_header) == str(control_result):
+ return True
+ else:
+ return False
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it does not matche, False if it does."""
+def http_header_no_match(experiment_url, control_header, control_result):
+ match = http_header_match(experiment_url, control_header, control_result)
+ if match:
+ return False
+ else:
+ return True
+
+def send_browser_headers(self, browser, conn):
+ headers = ooni.helpers.get_random_headers(self)
+ for h in headers:
+ conn.putheader(h[0], h[1])
+ conn.endheaders()
+ return True
+
+def http_request(self, method, url, path=None):
+ purl = urlparse(url)
+ host = purl.netloc
+ conn = httplib.HTTPConnection(host, 80)
+ conn.connect()
+ if path is None:
+ path = purl.path
+ conn.putrequest(method, purl.path)
+ send_browser_headers(self, None, conn)
+ response = conn.getresponse()
+ headers = dict(response.getheaders())
+ self.headers = headers
+ self.data = response.read()
+ return True
+
+def search_headers(self, s_headers, url):
+ if http_request(self, "GET", url):
+ headers = self.headers
+ else:
+ return None
+ result = {}
+ for h in s_headers.items():
+ result[h[0]] = h[0] in headers
+ return result
+
+# XXX for testing
+# [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
+
+"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
+def search_squid_headers(self):
+ test_name = "squid header"
+ self.logger.info("RUNNING %s test" % test_name)
+ url = ooni.helpers.get_random_url(self)
+ s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
+ ret = search_headers(self, s_headers, url)
+ for i in ret.items():
+ if i[1] is True:
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+
+def random_bad_request(self):
+ url = ooni.helpers.get_random_url(self)
+ r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
+ if http_request(self, r_str, url):
+ return True
+ else:
+ return None
+
+"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
+def squid_search_bad_request(self):
+ test_name = "squid bad request"
+ self.logger.info("RUNNING %s test" % test_name)
+ if random_bad_request(self):
+ s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
+ for i in s_headers.items():
+ if i[0] in self.headers:
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ self.logger.warning("the %s test returned failed" % test_name)
+ return None
+
+"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
+def squid_cacheobject_request(self):
+ url = ooni.helpers.get_random_url(self)
+ test_name = "squid cacheobject"
+ self.logger.info("RUNNING %s test" % test_name)
+ if http_request(self, "GET", url, "cache_object://localhost/info"):
+ soup = BeautifulSoup(self.data)
+ if soup.find('strong') and soup.find('strong').string == "Access Denied.":
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+ else:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ self.logger.warning("the %s test failed" % test_name)
+ return None
+
+
+def MSHTTP_CP_Tests(self):
+ test_name = "MS HTTP Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ experiment_url = "http://www.msftncsi.com/ncsi.txt"
+ expectedResponse = "Microsoft NCSI" # Only this - nothing more
+ expectedResponseCode = "200" # Must be this - nothing else
+ label = "MS HTTP"
+ headers = { 'User-Agent' : 'Microsoft NCSI' }
+ content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
+ headers, label)
+ status_match = http_status_code_match(expectedResponseCode,
+ experiment_code)
+ if status_match and content_match:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+def AppleHTTP_CP_Tests(self):
+ test_name = "Apple HTTP Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ experiment_url = "http://www.apple.com/library/test/success.html"
+ expectedResponse = "Success" # There is HTML that contains this string
+ expectedResponseCode = "200"
+ label = "Apple HTTP"
+ headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
+ 'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
+ ' Mobile/1A543a Safari/419.3' }
+ content_match, experiment_code = http_content_fuzzy_match(
+ experiment_url, expectedResponse, headers)
+ status_match = http_status_code_match(expectedResponseCode,
+ experiment_code)
+ if status_match and content_match:
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ print label + "content match:" + str(content_match) + " status match:" + str(status_match)
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+def WC3_CP_Tests(self):
+ test_name = "W3 Captive Portal"
+ self.logger.info("RUNNING %s test" % test_name)
+ url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
+ draftResponseCode = "428"
+ label = "WC3 draft-nottingham-http-portal"
+ response = http_fetch(url, label=label)
+ responseCode = response.code
+ if http_status_code_no_match(responseCode, draftResponseCode):
+ self.logger.info("the %s test returned True" % test_name)
+ return True
+ else:
+ print label + " experiment would conclude that the network is filtered."
+ print label + " status match:" + status_match
+ self.logger.info("the %s test returned False" % test_name)
+ return False
+
+# Google ChromeOS fetches this url in guest mode
+# and they expect the user to authenticate
+def googleChromeOSHTTPTest(self):
+ print "noop"
+ #url = "http://www.google.com/"
+
+def SquidHeader_TransparentHTTP_Tests(self):
+ return search_squid_headers(self)
+
+def SquidBadRequest_TransparentHTTP_Tests(self):
+ return squid_search_bad_request(self)
+
+def SquidCacheobject_TransparentHTTP_Tests(self):
+ return squid_cacheobject_request(self)
+
+
diff --git a/old-to-be-ported-code/very-old/ooni/input.py b/old-to-be-ported-code/very-old/ooni/input.py
new file mode 100644
index 0000000..c32ab48
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/input.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+class file:
+ def __init__(self, name=None):
+ if name:
+ self.name = name
+
+ def simple(self, name=None):
+ """ Simple file parsing method:
+ Read a file line by line and output an array with all it's lines, without newlines
+ """
+ if name:
+ self.name = name
+ output = []
+ try:
+ f = open(self.name, "r")
+ for line in f.readlines():
+ output.append(line.strip())
+ return output
+ except:
+ return output
+
+ def csv(self, name=None):
+ if name:
+ self.name = name
+
+ def yaml(self, name):
+ if name:
+ self.name = name
+
+ def consensus(self, name):
+ if name:
+ self.name = name
diff --git a/old-to-be-ported-code/very-old/ooni/namecheck.py b/old-to-be-ported-code/very-old/ooni/namecheck.py
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/namecheck.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.randomize = args.randomize
+
+ def DNS_Tests(self):
+ print "DNS tampering detection:"
+ filter_name = "_DNS_Tests"
+ tests = [ooni.dnsooni]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/__init__.py b/old-to-be-ported-code/very-old/ooni/plugins/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
new file mode 100644
index 0000000..0c0cfa7
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+
+import sys
+import re
+from pprint import pprint
+from twisted.internet import reactor, endpoints
+from twisted.names import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class DNSTestPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ""
+ self.control_response = []
+
+ def response_split(self, response):
+ a = []
+ b = []
+ for i in response:
+ a.append(i[0])
+ b.append(i[1])
+
+ return a,b
+
+ def cb(self, type, hostname, dns_server, value):
+ if self.control_response is None:
+ self.control_response = []
+ if type == 'control' and self.control_response != value:
+ print "%s %s" % (dns_server, value)
+ self.control_response.append((dns_server,value))
+ pprint(self.control_response)
+ if type == 'experiment':
+ pprint(self.control_response)
+ _, res = self.response_split(self.control_response)
+ if value not in res:
+ print "res (%s) : " % value
+ pprint(res)
+ print "---"
+ print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
+
+ else:
+ print "%s appears to be clean on %s" % (hostname, dns_server)
+ self.r2.servers = [('212.245.158.66',53)]
+ print "HN: %s %s" % (hostname, value)
+
+ def err(self, pck, error):
+ pprint(pck)
+ error.printTraceback()
+ reactor.stop()
+ print "error!"
+ pass
+
+ def ooni_main(self, args):
+ self.experimentalproxy = ''
+ self.test_hostnames = ['dio.it']
+ self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
+ self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
+
+ self.control_res = []
+ self.control_response = None
+
+ self.r1 = client.Resolver(None, [self.control_dns.pop()])
+ self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
+
+ for hostname in self.test_hostnames:
+ for dns_server in self.control_dns:
+ self.r1.servers = [dns_server]
+ f = self.r1.getHostByName(hostname)
+ pck = (hostname, dns_server)
+ f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+ for dns_server in self.experiment_dns:
+ self.r2.servers = [dns_server]
+ pck = (hostname, dns_server)
+ f = self.r2.getHostByName(hostname)
+ f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+ reactor.run()
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
new file mode 100644
index 0000000..021e863
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+
+import sys
+import re
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ''
+
+ def cb(self, type, content):
+ print "got %d bytes from %s" % (len(content), type) # DEBUG
+ if not self.buf:
+ self.buf = content
+ else:
+ if self.buf == content:
+ print "SUCCESS"
+ else:
+ print "FAIL"
+ reactor.stop()
+
+ def endpoint(self, scheme, host, port):
+ ep = None
+ if scheme == 'http':
+ ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ elif scheme == 'https':
+ ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
+ return ep
+
+ def ooni_main(self):
+ # We don't have the Command object so cheating for now.
+ url = 'http://check.torproject.org/'
+ self.controlproxy = 'socks4a://127.0.0.1:9050'
+ self.experimentalproxy = ''
+
+ if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
+ return None
+ scheme, host, port, path = client._parse(url)
+
+ ctrl_dest = self.endpoint(scheme, host, port)
+ if not ctrl_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ if self.controlproxy:
+ _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
+ control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+ else:
+ control = ctrl_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('control', x))
+ control.connect(f)
+
+ exp_dest = self.endpoint(scheme, host, port)
+ if not exp_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ # FIXME: use the experiment proxy if there is one
+ experiment = exp_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('experiment', x))
+ experiment.connect(f)
+
+ reactor.run()
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
new file mode 100644
index 0000000..cb63df7
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python
+# Copyright 2009 The Tor Project, Inc.
+# License at end of file.
+#
+# This tests connections to a list of Tor nodes in a given Tor consensus file
+# while also recording the certificates - it's not a perfect tool but complete
+# or even partial failure should raise alarms.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+ from ooni.plugooni import Plugoo
+except:
+ print "Error importing Plugoo"
+
+try:
+ from ooni.common import Storage
+except:
+ print "Error importing Storage"
+
+try:
+ from ooni import output
+except:
+ print "Error importing output"
+
+try:
+ from ooni import input
+except:
+ print "Error importing output"
+
+
+
+ssl = OpenSSL = None
+
+try:
+ import ssl
+except ImportError:
+ pass
+
+if ssl is None:
+ try:
+ import OpenSSL.SSL
+ import OpenSSL.crypto
+ except ImportError:
+ pass
+
+if ssl is None and OpenSSL is None:
+ if socket.ssl:
+ print """Your Python is too old to have the ssl module, and you haven't
+installed pyOpenSSL. I'll try to work with what you've got, but I can't
+record certificates so well."""
+ else:
+ print """Your Python has no OpenSSL support. Upgrade to 2.6, install
+pyOpenSSL, or both."""
+ sys.exit(1)
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+HANDSHAKING = "nohandshake"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+def clean_pem_cert(cert):
+ idx = cert.find('-----END')
+ if idx > 1 and cert[idx-1] != '\n':
+ cert = cert.replace('-----END','\n-----END')
+ return cert
+
+def record((addr,port), state, extra=None, cert=None):
+ LOCK.acquire()
+ try:
+ OUT.append({'addr' : addr,
+ 'port' : port,
+ 'state' : state,
+ 'extra' : extra})
+ if cert:
+ CERT_OUT.append({'addr' : addr,
+ 'port' : port,
+ 'clean_cert' : clean_pem_cert(cert)})
+ finally:
+ LOCK.release()
+
+def probe(address,theCtx=None):
+ sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ logging.info("Opening socket to %s",address)
+ try:
+ s.connect(address)
+ except IOError, e:
+ logging.info("Error %s from socket connect.",e)
+ record(address, CONNECTING, e)
+ s.close()
+ return
+ logging.info("Socket to %s open. Launching SSL handshake.",address)
+ if ssl:
+ try:
+ s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
+ # "MARCO!"
+ s.do_handshake()
+ except IOError, e:
+ logging.info("Error %s from ssl handshake",e)
+ record(address, HANDSHAKING, e)
+ s.close()
+ sock.close()
+ return
+ cert = s.getpeercert(True)
+ if cert != None:
+ cert = ssl.DER_cert_to_PEM_cert(cert)
+ elif OpenSSL:
+ try:
+ s = OpenSSL.SSL.Connection(theCtx, s)
+ s.set_connect_state()
+ s.setblocking(True)
+ s.do_handshake()
+ cert = s.get_peer_certificate()
+ if cert != None:
+ cert = OpenSSL.crypto.dump_certificate(
+ OpenSSL.crypto.FILETYPE_PEM, cert)
+ except IOError, e:
+ logging.info("Error %s from OpenSSL handshake",e)
+ record(address, HANDSHAKING, e)
+ s.close()
+ sock.close()
+ return
+ else:
+ try:
+ s = socket.ssl(s)
+ s.write('a')
+ cert = s.server()
+ except IOError, e:
+ logging.info("Error %s from socket.ssl handshake",e)
+ record(address, HANDSHAKING, e)
+ sock.close()
+ return
+
+ logging.info("SSL handshake with %s finished",address)
+ # "POLO!"
+ record(address,OK, cert=cert)
+ if (ssl or OpenSSL):
+ s.close()
+ sock.close()
+
+def parseNetworkstatus(ns):
+ for line in ns:
+ if line.startswith('r '):
+ r = line.split()
+ yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+ for line in cd:
+ if line.startswith('router '):
+ r = line.split()
+ yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+ done = False
+ logging.info("Launching thread.")
+
+ if OpenSSL is not None:
+ context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
+ else:
+ context = None
+
+ while True:
+ LOCK.acquire()
+ try:
+ if addrList:
+ print "Starting test %d/%d"%(
+ 1+origLength-len(addrList),origLength)
+ addr = addrList.pop()
+ else:
+ return
+ finally:
+ LOCK.release()
+
+ try:
+ logging.info("Launching probe for %s",addr)
+ probe(addr, context)
+ except Exception, e:
+ logging.info("Unexpected error from %s",addr)
+ record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+ ts = []
+ origLen = len(addrList)
+ for num in xrange(nThreads):
+ t = threading.Thread(target=worker, args=(addrList,origLen))
+ t.setName("Th#%s"%num)
+ ts.append(t)
+ t.start()
+ for t in ts:
+ logging.info("Joining thread %s",t.getName())
+ t.join()
+
+def main(self, args):
+ # BEGIN
+ # This logic should be present in more or less all plugoos
+ global OUT
+ global CERT_OUT
+ global OUT_DATA
+ global CERT_OUT_DATA
+ OUT_DATA = []
+ CERT_OUT_DATA = []
+
+ try:
+ OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+ except:
+ print "No output file given. quitting..."
+ return -1
+
+ try:
+ CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
+ except:
+ print "No output cert file given. quitting..."
+ return -1
+
+ logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+ datefmt="%b %d %H:%M:%S",
+ level=logging.INFO,
+ filename=args.log)
+ logging.info("============== STARTING NEW LOG")
+ # END
+
+ if ssl is not None:
+ methodName = "ssl"
+ elif OpenSSL is not None:
+ methodName = "OpenSSL"
+ else:
+ methodName = "socket"
+ logging.info("Running marco with method '%s'", methodName)
+
+ addresses = []
+
+ if args.input.ips:
+ for fn in input.file(args.input.ips).simple():
+ a, b = fn.split(":")
+ addresses.append( (a,int(b)) )
+
+ elif args.input.consensus:
+ for fn in args:
+ print fn
+ for a,b in parseNetworkstatus(open(args.input.consensus)):
+ addresses.append( (a,b) )
+
+ if args.input.randomize:
+ # Take a random permutation of the set the knuth way!
+ for i in range(0, len(addresses)):
+ j = random.randint(0, i)
+ addresses[i], addresses[j] = addresses[j], addresses[i]
+
+ if len(addresses) == 0:
+ logging.error("No input source given, quiting...")
+ return -1
+
+ addresses = list(addresses)
+
+ if not args.input.randomize:
+ addresses.sort()
+
+ runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+
+ self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+ "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
+
+ self.input = Storage()
+ self.input.ip = None
+ try:
+ c_file = os.path.expanduser("~/.tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ try:
+ c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ if not self.input.consensus:
+ print "Error importing consensus file"
+ sys.exit(1)
+
+ self.output = Storage()
+ self.output.main = 'reports/marco-1.yamlooni'
+ self.output.certificates = 'reports/marco_certs-1.out'
+
+ # XXX This needs to be moved to a proper function
+ # refactor, refactor and ... refactor!
+ if os.path.exists(self.output.main):
+ basedir = "/".join(self.output.main.split("/")[:-1])
+ fn = self.output.main.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ if os.path.exists(self.output.certificates):
+ basedir = "/".join(self.output.certificates.split("/")[:-1])
+ fn = self.output.certificates.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ # We require for Tor to already be running or have recently run
+ self.args = Storage()
+ self.args.input = self.input
+ self.args.output = self.output
+ self.args.log = 'reports/marco.log'
+
+ def ooni_main(self, cmd):
+ self.args.input.randomize = cmd.randomize
+ self.args.input.ips = cmd.listfile
+ main(self, self.args)
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >> sys.stderr, ("This script takes one or more networkstatus "
+ "files as an argument.")
+ self = None
+ main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
new file mode 100644
index 0000000..d175c1c
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+
+import sys
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+ self.type = ""
+ self.paranoia = ""
+ self.modules_to_import = []
+ self.output_dir = ""
+ self.buf = ''
+
+ def cb(self, type, content):
+ print "got %d bytes from %s" % (len(content), type) # DEBUG
+ if not self.buf:
+ self.buf = content
+ else:
+ if self.buf == content:
+ print "SUCCESS"
+ else:
+ print "FAIL"
+ reactor.stop()
+
+ def endpoint(self, scheme, host, port):
+ ep = None
+ if scheme == 'http':
+ ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+ elif scheme == 'https':
+ from twisted.internet import ssl
+ ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
+ ssl.ClientContextFactory())
+ return ep
+
+ def ooni_main(self, cmd):
+ # We don't have the Command object so cheating for now.
+ url = cmd.hostname
+
+ # FIXME: validate that url is on the form scheme://host[:port]/path
+ scheme, host, port, path = client._parse(url)
+
+ ctrl_dest = self.endpoint(scheme, host, port)
+ if not ctrl_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ if cmd.controlproxy:
+ assert scheme != 'https', "no support for proxied https atm, sorry"
+ _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
+ control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+ print "proxy: ", proxy_host, proxy_port
+ else:
+ control = ctrl_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('control', x))
+ control.connect(f)
+
+ exp_dest = self.endpoint(scheme, host, port)
+ if not exp_dest:
+ raise Exception('unsupported scheme %s in %s' % (scheme, url))
+ # FIXME: use the experiment proxy if there is one
+ experiment = exp_dest
+ f = client.HTTPClientFactory(url)
+ f.deferred.addCallback(lambda x: self.cb('experiment', x))
+ experiment.connect(f)
+
+ reactor.run()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
new file mode 100644
index 0000000..87d3684
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+from ooni.plugooni import Plugoo
+
+class DNSBulkPlugin(Plugoo):
+ def __init__(self):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.randomize = True # Pass this down properly
+ self.debug = False
+
+ def DNS_Tests(self):
+ print "DNS tampering detection for list of domains:"
+ tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
+ self.run_tests(tests)
+
+ def magic_main(self):
+ self.run_plgoo_tests("_Tests")
+
+ def ooni_main(self, args):
+ self.magic_main()
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
new file mode 100644
index 0000000..01dee81
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
@@ -0,0 +1,278 @@
+#!/usr/bin/python
+# Copyright 2011 The Tor Project, Inc.
+# License at end of file.
+#
+# This is a modified version of the marco plugoo. Given a list of #
+# IP:port addresses, this plugoo will attempt a TCP connection with each
+# host and write the results to a .yamlooni file.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+ from ooni.plugooni import Plugoo
+except:
+ print "Error importing Plugoo"
+
+try:
+ from ooni.common import Storage
+except:
+ print "Error importing Storage"
+
+try:
+ from ooni import output
+except:
+ print "Error importing output"
+
+try:
+ from ooni import input
+except:
+ print "Error importing output"
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+# We will want to log the IP address, the port and the state
+def record((addr,port), state, extra=None):
+ LOCK.acquire()
+ try:
+ OUT.append({'addr' : addr,
+ 'port' : port,
+ 'state' : state,
+ 'extra' : extra})
+ finally:
+ LOCK.release()
+
+# For each IP address in the list, open a socket, write to the log and
+# then close the socket
+def probe(address,theCtx=None):
+ sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ logging.info("Opening socket to %s",address)
+ try:
+ s.connect(address)
+ except IOError, e:
+ logging.info("Error %s from socket connect.",e)
+ record(address, CONNECTING, e)
+ s.close()
+ return
+ logging.info("Socket to %s open. Successfully launched TCP handshake.",address)
+ record(address, OK)
+ s.close()
+
+def parseNetworkstatus(ns):
+ for line in ns:
+ if line.startswith('r '):
+ r = line.split()
+ yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+ for line in cd:
+ if line.startswith('router '):
+ r = line.split()
+ yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+ done = False
+ context = None
+
+ while True:
+ LOCK.acquire()
+ try:
+ if addrList:
+ print "Starting test %d/%d"%(
+ 1+origLength-len(addrList),origLength)
+ addr = addrList.pop()
+ else:
+ return
+ finally:
+ LOCK.release()
+
+ try:
+ logging.info("Launching probe for %s",addr)
+ probe(addr, context)
+ except Exception, e:
+ logging.info("Unexpected error from %s",addr)
+ record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+ ts = []
+ origLen = len(addrList)
+ for num in xrange(nThreads):
+ t = threading.Thread(target=worker, args=(addrList,origLen))
+ t.setName("Th#%s"%num)
+ ts.append(t)
+ t.start()
+ for t in ts:
+ t.join()
+
+def main(self, args):
+ # BEGIN
+ # This logic should be present in more or less all plugoos
+ global OUT
+ global OUT_DATA
+ OUT_DATA = []
+
+ try:
+ OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+ except:
+ print "No output file given. quitting..."
+ return -1
+
+ logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+ datefmt="%b %d %H:%M:%S",
+ level=logging.INFO,
+ filename=args.log)
+ logging.info("============== STARTING NEW LOG")
+ # END
+
+ methodName = "socket"
+ logging.info("Running tcpcon with method '%s'", methodName)
+
+ addresses = []
+
+ if args.input.ips:
+ for fn in input.file(args.input.ips).simple():
+ a, b = fn.split(":")
+ addresses.append( (a,int(b)) )
+
+ elif args.input.consensus:
+ for fn in args:
+ print fn
+ for a,b in parseNetworkstatus(open(args.input.consensus)):
+ addresses.append( (a,b) )
+
+ if args.input.randomize:
+ # Take a random permutation of the set the knuth way!
+ for i in range(0, len(addresses)):
+ j = random.randint(0, i)
+ addresses[i], addresses[j] = addresses[j], addresses[i]
+
+ if len(addresses) == 0:
+ logging.error("No input source given, quiting...")
+ return -1
+
+ addresses = list(addresses)
+
+ if not args.input.randomize:
+ addresses.sort()
+
+ runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+ def __init__(self):
+ self.name = ""
+
+ self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+ "os" ]
+
+ self.input = Storage()
+ self.input.ip = None
+ try:
+ c_file = os.path.expanduser("~/.tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ try:
+ c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+ open(c_file)
+ self.input.consensus = c_file
+ except:
+ pass
+
+ if not self.input.consensus:
+ print "Error importing consensus file"
+ sys.exit(1)
+
+ self.output = Storage()
+ self.output.main = 'reports/tcpcon-1.yamlooni'
+ self.output.certificates = 'reports/tcpcon_certs-1.out'
+
+ # XXX This needs to be moved to a proper function
+ # refactor, refactor and ... refactor!
+ if os.path.exists(self.output.main):
+ basedir = "/".join(self.output.main.split("/")[:-1])
+ fn = self.output.main.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ if os.path.exists(self.output.certificates):
+ basedir = "/".join(self.output.certificates.split("/")[:-1])
+ fn = self.output.certificates.split("/")[-1].split(".")
+ ext = fn[1]
+ name = fn[0].split("-")[0]
+ i = fn[0].split("-")[1]
+ i = int(i) + 1
+ self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+ # We require for Tor to already be running or have recently run
+ self.args = Storage()
+ self.args.input = self.input
+ self.args.output = self.output
+ self.args.log = 'reports/tcpcon.log'
+
+ def ooni_main(self, cmd):
+ self.args.input.randomize = cmd.randomize
+ self.args.input.ips = cmd.listfile
+ main(self, self.args)
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >> sys.stderr, ("This script takes one or more networkstatus "
+ "files as an argument.")
+ self = None
+ main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+# * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tor.py b/old-to-be-ported-code/very-old/ooni/plugins/tor.py
new file mode 100644
index 0000000..0d95d4d
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/tor.py
@@ -0,0 +1,80 @@
+import re
+import os.path
+import signal
+import subprocess
+import socket
+import threading
+import time
+import logging
+
+from pytorctl import TorCtl
+
+torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
+# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
+
+class ThreadProc(threading.Thread):
+ def __init__(self, cmd):
+ threading.Thread.__init__(self)
+ self.cmd = cmd
+ self.proc = None
+
+ def run(self):
+ print "running"
+ try:
+ self.proc = subprocess.Popen(self.cmd,
+ shell = False, stdout = subprocess.PIPE,
+ stderr = subprocess.PIPE)
+
+ except OSError:
+ logging.fatal('cannot execute command')
+
+class Tor:
+ def __init__(self):
+ self.start()
+
+ def check(self):
+ conn = TorCtl.connect()
+ if conn != None:
+ conn.close()
+ return True
+
+ return False
+
+
+ def start(self):
+ if not os.path.exists(torrc):
+ raise OSError("torrc doesn't exist (%s)" % torrc)
+
+ tor_cmd = ["tor", "-f", torrc]
+
+ torproc = ThreadProc(tor_cmd)
+ torproc.run()
+
+ bootstrap_line = re.compile("Bootstrapped 100%: ")
+
+ while True:
+ if torproc.proc == None:
+ time.sleep(1)
+ continue
+
+ init_line = torproc.proc.stdout.readline().strip()
+
+ if not init_line:
+ torproc.proc.kill()
+ return False
+
+ if bootstrap_line.search(init_line):
+ break
+
+ return True
+
+ def stop(self):
+ if not self.check():
+ return
+
+ conn = TorCtl.connect()
+ if conn != None:
+ conn.send_signal("SHUTDOWN")
+ conn.close()
+
+t = Tor()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/torrc b/old-to-be-ported-code/very-old/ooni/plugins/torrc
new file mode 100644
index 0000000..b9ffc80
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugins/torrc
@@ -0,0 +1,9 @@
+SocksPort 9050
+ControlPort 9051
+VirtualAddrNetwork 10.23.47.0/10
+AutomapHostsOnResolve 1
+TransPort 9040
+TransListenAddress 127.0.0.1
+DNSPort 5353
+DNSListenAddress 127.0.0.1
+
diff --git a/old-to-be-ported-code/very-old/ooni/plugooni.py b/old-to-be-ported-code/very-old/ooni/plugooni.py
new file mode 100644
index 0000000..17f17b3
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/plugooni.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+#
+# Plugooni, ooni plugin module for loading plgoo files.
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+# Arturo Filasto' <art(a)fuffa.org>
+
+import sys
+import os
+
+import imp, pkgutil, inspect
+
+class Plugoo:
+ def __init__(self, name, plugin_type, paranoia, author):
+ self.name = name
+ self.author = author
+ self.type = plugin_type
+ self.paranoia = paranoia
+
+ """
+ Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
+ Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
+ """
+ def get_tests_by_filter(self, filters, plugins):
+ ret_functions = []
+
+ for plugin in plugins:
+ for function_ptr in dir(plugin):
+ if function_ptr.endswith(filters):
+ ret_functions.append((plugin,function_ptr))
+ return ret_functions
+
+ """
+ Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
+ 'dirty' and 'failed'.
+ Run the tests and print 'clean','dirty' or 'failed' according to the test result.
+ """
+ def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
+ for test in tests:
+ filter_result = getattr(test[0], test[1])(self)
+ if filter_result == True:
+ print test[1] + ": " + clean
+ elif filter_result == None:
+ print test[1] + ": " + failed
+ else:
+ print test[1] + ": " + dirty
+
+ """
+ Find all the tests belonging to plgoo 'self' and run them.
+ We know the tests when we see them because they end in 'filter'.
+ """
+ def run_plgoo_tests(self, filter):
+ for function_ptr in dir(self):
+ if function_ptr.endswith(filter):
+ getattr(self, function_ptr)()
+
+PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
+RESERVED_NAMES = [ "skel_plgoo" ]
+
+class Plugooni():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.loadall = True
+ self.plugin_name = args.plugin_name
+ self.listfile = args.listfile
+
+ self.plgoo_found = False
+
+ # Print all the plugoons to stdout.
+ def list_plugoons(self):
+ print "Plugooni list:"
+ for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+ if name not in RESERVED_NAMES:
+ print "\t%s" %(name.split("_")[0])
+
+ # Return name of the plgoo class of a plugin.
+ # We know because it always ends with "Plugin".
+ def get_plgoo_class(self,plugin):
+ for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
+ if memb.__name__.endswith("Plugin"):
+ return memb
+
+ # This function is responsible for loading and running the plugoons
+ # the user wants to run.
+ def run(self, command_object):
+ print "Plugooni: the ooni plgoo plugin module loader"
+
+ # iterate all modules
+ for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+ # see if this module should be loaded
+ if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
+ self.plgoo_found = True # we found at least one plgoo!
+
+ file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
+ # load module
+ plugin = imp.load_module(name, file, pathname, desc)
+ # instantiate plgoo class and call its ooni_main()
+ self.get_plgoo_class(plugin)().ooni_main(command_object)
+
+ # if we couldn't find the plgoo; whine to the user
+ if self.plgoo_found is False:
+ print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/transparenthttp.py b/old-to-be-ported-code/very-old/ooni/transparenthttp.py
new file mode 100644
index 0000000..311fb32
--- /dev/null
+++ b/old-to-be-ported-code/very-old/ooni/transparenthttp.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Captive Portal Detection With Multi-Vendor Emulation
+# by Jacob Appelbaum <jacob(a)appelbaum.net>
+#
+# This module performs multiple tests that match specific vendor
+# mitm proxies
+
+import sys
+import ooni.http
+import ooni.report
+
+class TransparentHTTPProxy():
+ def __init__(self, args):
+ self.in_ = sys.stdin
+ self.out = sys.stdout
+ self.debug = False
+ self.logger = ooni.report.Log().logger
+
+ def TransparentHTTPProxy_Tests(self):
+ print "Transparent HTTP Proxy:"
+ filter_name = "_TransparentHTTP_Tests"
+ tests = [ooni.http]
+ for test in tests:
+ for function_ptr in dir(test):
+ if function_ptr.endswith(filter_name):
+ filter_result = getattr(test, function_ptr)(self)
+ if filter_result == True:
+ print function_ptr + " thinks the network is clean"
+ elif filter_result == None:
+ print function_ptr + " failed"
+ else:
+ print function_ptr + " thinks the network is dirty"
+
+ def main(self):
+ for function_ptr in dir(self):
+ if function_ptr.endswith("_Tests"):
+ getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+ self.main()
diff --git a/old-to-be-ported-code/very-old/traceroute.py b/old-to-be-ported-code/very-old/traceroute.py
new file mode 100644
index 0000000..e8252c1
--- /dev/null
+++ b/old-to-be-ported-code/very-old/traceroute.py
@@ -0,0 +1,108 @@
+try:
+ from dns import resolver
+except:
+ print "Error: dnspython is not installed (http://www.dnspython.org/)"
+import gevent
+import os
+import plugoo
+
+try:
+ import scapy
+except:
+ print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+import socket
+
+__plugoo__ = "Traceroute"
+__desc__ = "Performs TTL walking tests"
+
+class TracerouteAsset(Asset):
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+
+class Traceroute(Test):
+ """A *very* quick and dirty traceroute implementation, UDP and TCP
+ """
+ def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
+ dest_addr = socket.gethostbyname(dst)
+ print "Doing traceroute on %s" % dst
+
+ recv = socket.getprotobyname('icmp')
+ send = socket.getprotobyname(proto)
+ ttl = 1
+ while True:
+ recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
+ if proto == "tcp":
+ send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
+ else:
+ send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
+ recv_sock.settimeout(10)
+ send_sock.settimeout(10)
+
+ send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
+ recv_sock.bind(("", src_port))
+ if proto == "tcp":
+ try:
+ send_sock.settimeout(2)
+ send_sock.connect((dst, dst_port))
+ except socket.timeout:
+ pass
+
+ except Exception, e:
+ print "Error doing connect %s" % e
+ else:
+ send_sock.sendto("", (dst, dst_port))
+
+ curr_addr = None
+ try:
+ print "receiving data..."
+ _, curr_addr = recv_sock.recvfrom(512)
+ curr_addr = curr_addr[0]
+
+ except socket.error, e:
+ print "SOCKET ERROR: %s" % e
+
+ except Exception, e:
+ print "ERROR: %s" % e
+
+ finally:
+ send_sock.close()
+ recv_sock.close()
+
+ if curr_addr is not None:
+ curr_host = "%s" % curr_addr
+ else:
+ curr_host = "*"
+
+ print "%d\t%s" % (ttl, curr_host)
+
+ if curr_addr == dest_addr or ttl > max_hops:
+ break
+
+ ttl += 1
+
+
+ def experiment(self, *a, **kw):
+ # this is just a dirty hack
+ address = kw['data'][0]
+
+ self.traceroute(address)
+
+def run(ooni):
+ """Run the test"""
+ config = ooni.config
+ urls = []
+
+ traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
+ config.tests.traceroute))
+
+ assets = [traceroute_experiment]
+
+ traceroute = Traceroute(ooni)
+ ooni.logger.info("starting traceroute test")
+ traceroute.run(assets)
+ ooni.logger.info("finished")
1
0

[ooni-probe/master] Added basic ICMP classes for testing reachability
by isis@torproject.org 03 Nov '12
by isis@torproject.org 03 Nov '12
03 Nov '12
commit 8d2a1919f7755c5c084576a7ac1bdd37bb4c52be
Author: Isis Lovecruft <isis(a)torproject.org>
Date: Sun Aug 19 08:34:12 2012 +0000
Added basic ICMP classes for testing reachability
---
ooni/plugins/echo.py | 127 ++++++++++++++++++++++++++++++++++++++++++++++++++
1 files changed, 127 insertions(+), 0 deletions(-)
diff --git a/ooni/plugins/echo.py b/ooni/plugins/echo.py
new file mode 100644
index 0000000..bc1b2a8
--- /dev/null
+++ b/ooni/plugins/echo.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# +---------+
+# | echo.py |
+# +---------+
+# A simply ICMP-8 ping test.
+#
+# :author: Isis Lovecruft
+# :version: 0.1.0-pre-alpha
+# :license: (c) 2012 Isis Lovecruft
+# see attached LICENCE file
+#
+
+import os
+import sys
+
+from twisted.plugin import IPlugin
+from twisted.python import usage
+from zope.interface import implements
+
+from lib import txscapy
+from utils import log
+from plugoo.assets import Asset
+from plugoo.interface import ITest
+from protocols.scapyproto import ScapyTest
+
+class EchoOptions(usage.Options):
+ optParameters = [
+ ['interface', 'i', None, 'Network interface to use'],
+ ['destination', 'd', None, 'File of hosts to ping'],
+ ['count', 'c', 5, 'Number of packets to send', int],
+ ['size', 's', 56, 'Number of bytes to send in ICMP data field', int],
+ ['ttl', 't', 25, 'Set the IP Time to Live', int],
+ ]
+ optFlags = []
+
+class EchoAsset(Asset):
+ def __init__(self, file=None):
+ self = Asset.__init__(self, file)
+
+ def parse_line(self, line):
+ if line.startswith('#'):
+ return
+ else:
+ return line.replace('\n', '')
+
+class EchoTest(ScapyTest):
+ implements(IPlugin, ITest)
+
+ shortName = 'echo'
+ description = 'A simple ICMP-8 test to check if a host is reachable'
+ options = EchoOptions
+ requirements = None
+ blocking = False
+
+ pcap_file = 'echo.pcap'
+ receive = True
+
+ def initialize(self):
+ self.request = {}
+ self.response = {}
+
+ if self.local_options:
+
+ options = self.local_options
+
+ if options['interface']:
+ self.interface = options['interface']
+
+ if options['count']:
+ ## there's a Counter() somewhere, use it
+ self.count = options['count']
+
+ if options['size']:
+ self.size = options['size']
+
+ if options['ttl']:
+ self.ttl = options['ttl']
+
+ def load_assets(self):
+ assets = {}
+ option = self.local_options
+
+ if option and option['destination']:
+
+ try:
+ from scapy.all import IP
+ except:
+ log.err()
+
+ if os.path.isfile(option['destination']):
+ with open(option['destination']) as hosts:
+ for line in hosts.readlines():
+ assets.update({'host': EchoAsset(line)})
+ else:
+ while type(options['destination']) is str:
+ try:
+ IP(options['destination'])
+ except:
+ log.err()
+ break
+ assets.update({'host': options['destination']})
+ else:
+ log.msg("Couldn't understand destination option...")
+ log.msg("Give one IPv4 address, or a file with one address per line.")
+ return assets
+
+ def experiment(self, args):
+ if len(args) == 0:
+ log.err("Error: We're Echo, not Narcissus!")
+ log.err(" Provide a list of hosts to ping...")
+ d = sys.exit(1)
+ return d
+
+ ## XXX v4 / v6
+ from scapy.all import ICMP, IP, sr
+ ping = sr(IP(dst=args)/ICMP())
+ if ping:
+ self.response.update(ping.show())
+ else:
+ log.msg('No response received from %s' % args)
+
+ def control(self, *args):
+ pass
+
+echo = EchoTest(None, None, None)
1
0

[translation/orbot_completed] Update translations for orbot_completed
by translation@torproject.org 03 Nov '12
by translation@torproject.org 03 Nov '12
03 Nov '12
commit 34358a8f66e6ca15b4272202eaa33f358d5c4e89
Author: Translation commit bot <translation(a)torproject.org>
Date: Sat Nov 3 01:15:11 2012 +0000
Update translations for orbot_completed
---
values-es/strings.xml | 10 +++++-----
1 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/values-es/strings.xml b/values-es/strings.xml
index 109afe5..afa9042 100644
--- a/values-es/strings.xml
+++ b/values-es/strings.xml
@@ -30,14 +30,14 @@
<string name="button_help">Ayuda</string>
<string name="button_close">Cerrar</string>
<string name="button_about">Acerca de</string>
- <string name="button_clear_log">Borrar Registro</string>
+ <string name="button_clear_log">Borrar Registro (log)</string>
<string name="menu_verify">Verificar</string>
<string name="menu_exit">Salir</string>
<string name="press_to_start">- mantega pulsado para iniciar -</string>
- <string name="pref_trans_proxy_group">Proxificación Transparente (requiere acceso como root)</string>
- <string name="pref_trans_proxy_title">Proxificación Transparente</string>
- <string name="pref_trans_proxy_summary">Torificación Automática de las Aplicaciones</string>
- <string name="pref_transparent_all_title">Todo por Tor</string>
+ <string name="pref_trans_proxy_group">Proxyficación transparente (requiere acceder como root)</string>
+ <string name="pref_trans_proxy_title">Proxyficación Transparente</string>
+ <string name="pref_trans_proxy_summary">Torificación automática de aplicaciones</string>
+ <string name="pref_transparent_all_title">Todo a través de Tor</string>
<string name="pref_transparent_all_summary">Proxifica el tráfico de todas las aplicaciones a través de Tor</string>
<string name="pref_transparent_port_fallback_title">Desconexión de Puertos Proxy</string>
<string name="pref_transparent_port_fallback_summary">ADVERTENCIA: Esto evita los puertos comunes (80, 443, etc). *USAR SÓLO* si los modos \'Todo\' o \'Aplicaciones\' no funcionan.</string>
1
0