[tor-commits] [ooni-probe/master] * Moved /old-to-be-ported to /to-be-ported.

isis at torproject.org isis at torproject.org
Sun Nov 4 13:50:41 UTC 2012


commit ed7cb1a39289d18eb869151dfa376d9b73be6c1c
Author: Isis Lovecruft <isis at torproject.org>
Date:   Sun Nov 4 13:49:36 2012 +0000

    * Moved /old-to-be-ported to /to-be-ported.
---
 old-to-be-ported-code/TODO                         |  418 --------------------
 .../old-api/.ropeproject/config.py                 |   85 ----
 .../old-api/.ropeproject/globalnames               |  Bin 108 -> 0 bytes
 old-to-be-ported-code/old-api/.ropeproject/history |    1 -
 .../old-api/.ropeproject/objectdb                  |  Bin 741 -> 0 bytes
 old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt |    8 -
 old-to-be-ported-code/old-api/chinatrigger.py      |  140 -------
 old-to-be-ported-code/old-api/daphn3.py            |  152 -------
 old-to-be-ported-code/old-api/domclass.py          |  216 ----------
 old-to-be-ported-code/old-api/dropin.cache         |  243 ------------
 old-to-be-ported-code/old-api/httpt.py             |   94 -----
 old-to-be-ported-code/old-api/tcpconnect.py        |   65 ---
 old-to-be-ported-code/old-api/tcpscan.py           |   84 ----
 old-to-be-ported-code/spec/proxooni-spec.txt       |   65 ---
 old-to-be-ported-code/very-old/TODO.plgoons        |   79 ----
 old-to-be-ported-code/very-old/TO_BE_PORTED        |   14 -
 old-to-be-ported-code/very-old/ooni-probe.diff     |  358 -----------------
 old-to-be-ported-code/very-old/ooni/#namecheck.py# |   39 --
 old-to-be-ported-code/very-old/ooni/.DS_Store      |  Bin 15364 -> 0 bytes
 old-to-be-ported-code/very-old/ooni/__init__.py    |   12 -
 old-to-be-ported-code/very-old/ooni/command.py     |  250 ------------
 .../very-old/ooni/dns_poisoning.py                 |   43 --
 old-to-be-ported-code/very-old/ooni/dnsooni.py     |  356 -----------------
 old-to-be-ported-code/very-old/ooni/helpers.py     |   38 --
 old-to-be-ported-code/very-old/ooni/http.py        |  306 --------------
 old-to-be-ported-code/very-old/ooni/input.py       |   33 --
 old-to-be-ported-code/very-old/ooni/namecheck.py   |   39 --
 .../very-old/ooni/plugins/dnstest_plgoo.py         |   84 ----
 .../very-old/ooni/plugins/http_plgoo.py            |   70 ----
 .../very-old/ooni/plugins/marco_plgoo.py           |  377 ------------------
 .../very-old/ooni/plugins/proxy_plgoo.py           |   69 ----
 .../very-old/ooni/plugins/simple_dns_plgoo.py      |   35 --
 .../very-old/ooni/plugins/tcpcon_plgoo.py          |  278 -------------
 old-to-be-ported-code/very-old/ooni/plugins/tor.py |   80 ----
 old-to-be-ported-code/very-old/ooni/plugins/torrc  |    9 -
 old-to-be-ported-code/very-old/ooni/plugooni.py    |  106 -----
 .../very-old/ooni/transparenthttp.py               |   41 --
 old-to-be-ported-code/very-old/traceroute.py       |  108 -----
 to-be-ported/TODO                                  |  418 ++++++++++++++++++++
 to-be-ported/old-api/.ropeproject/config.py        |   85 ++++
 to-be-ported/old-api/.ropeproject/globalnames      |  Bin 0 -> 108 bytes
 to-be-ported/old-api/.ropeproject/history          |    1 +
 to-be-ported/old-api/.ropeproject/objectdb         |  Bin 0 -> 741 bytes
 to-be-ported/old-api/TESTS_ARE_MOVING.txt          |    8 +
 to-be-ported/old-api/chinatrigger.py               |  140 +++++++
 to-be-ported/old-api/daphn3.py                     |  152 +++++++
 to-be-ported/old-api/domclass.py                   |  216 ++++++++++
 to-be-ported/old-api/dropin.cache                  |  243 ++++++++++++
 to-be-ported/old-api/httpt.py                      |   94 +++++
 to-be-ported/old-api/tcpconnect.py                 |   65 +++
 to-be-ported/old-api/tcpscan.py                    |   84 ++++
 to-be-ported/spec/proxooni-spec.txt                |   65 +++
 to-be-ported/very-old/TODO.plgoons                 |   79 ++++
 to-be-ported/very-old/TO_BE_PORTED                 |   14 +
 to-be-ported/very-old/ooni-probe.diff              |  358 +++++++++++++++++
 to-be-ported/very-old/ooni/#namecheck.py#          |   39 ++
 to-be-ported/very-old/ooni/.DS_Store               |  Bin 0 -> 15364 bytes
 to-be-ported/very-old/ooni/__init__.py             |   12 +
 to-be-ported/very-old/ooni/command.py              |  250 ++++++++++++
 to-be-ported/very-old/ooni/dns_poisoning.py        |   43 ++
 to-be-ported/very-old/ooni/dnsooni.py              |  356 +++++++++++++++++
 to-be-ported/very-old/ooni/helpers.py              |   38 ++
 to-be-ported/very-old/ooni/http.py                 |  306 ++++++++++++++
 to-be-ported/very-old/ooni/input.py                |   33 ++
 to-be-ported/very-old/ooni/namecheck.py            |   39 ++
 .../very-old/ooni/plugins/dnstest_plgoo.py         |   84 ++++
 to-be-ported/very-old/ooni/plugins/http_plgoo.py   |   70 ++++
 to-be-ported/very-old/ooni/plugins/marco_plgoo.py  |  377 ++++++++++++++++++
 to-be-ported/very-old/ooni/plugins/proxy_plgoo.py  |   69 ++++
 .../very-old/ooni/plugins/simple_dns_plgoo.py      |   35 ++
 to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py |  278 +++++++++++++
 to-be-ported/very-old/ooni/plugins/tor.py          |   80 ++++
 to-be-ported/very-old/ooni/plugins/torrc           |    9 +
 to-be-ported/very-old/ooni/plugooni.py             |  106 +++++
 to-be-ported/very-old/ooni/transparenthttp.py      |   41 ++
 to-be-ported/very-old/traceroute.py                |  108 +++++
 76 files changed, 4395 insertions(+), 4395 deletions(-)

diff --git a/old-to-be-ported-code/TODO b/old-to-be-ported-code/TODO
deleted file mode 100644
index 81d834f..0000000
--- a/old-to-be-ported-code/TODO
+++ /dev/null
@@ -1,418 +0,0 @@
-This is a list of techniques that should be added as plugins or hooks or yamlooni
-
-Implement Plugooni - our plugin framework
-Implement Yamlooni - our output format
-Implement Proxooni - our proxy spec and program
-
-We should launch our own Tor on a special port (say, 127.0.0.1:9066)
-We should act as a controller with TorCtl to do this, etc
-We should take the Tor consensus file and pass it to plugins such as marco
-
-HTTP Host header comparsion of a vs b
-HTTP Content length header comparision of a vs b
-
-GET request splitting
-  "G E T "
-  Used in Iran
-
-General Malformed HTTP requests
-  Error pages are fingerprintable
-
-traceroute
-  icmp/udp/tcp
-  each network link is an edge, each hop is a vertex in a network graph
-
-traceroute hop count
-  "TTL walking"
-
-Latency measurement
-TCP reset detection
-Forged DNS spoofing detection
-
-DNS oracle query tool
-  given DNS server foo - test resolve and look for known block pages
-
-Test HTTP header order - do they get reordered?
-
-Look for these filter fingerprints:
-X-Squid-Error: ERR_SCC_SMARTFILTER_DENIED 0 
-X-Squid-Error: ERR_ACCESS_DENIED 0 
-X-Cache: MISS from SmartFilter 
-
-
-WWW-Authenticate: Basic realm="SmartFilter Control List HTTP Download" 
-
-
-Via: 1.1 WEBFILTER.CONSERVESCHOOL.ORG:8080 
-
-X-Cache: MISS from webfilter.whiteschneider.com 
-X-Cache: MISS from webfilter.whiteschneider.com 
-X-Cache: MISS from webfilter.whiteschneider.com 
-
-Location: http://192.168.0.244/webfilter/blockpage?nonce=7d2b7e500e99a0fe&tid=3 
-
-
-X-Cache: MISS from webfilter.imscs.local 
-X-Cache: MISS from webfilter.tjs.at
-
-
-Via: 1.1 webwasher (Webwasher 6.8.7.9396) 
-
-Websense:
-HTTP/1.0 301 Moved Permanently  -> Location: http://www.websense.com/
-
-Via: HTTP/1.1 localhost.localdomain (Websense-Content_Gateway/7.1.4 [c s f ]), HTTP/1.0 localhost.localdomain (Websense-Content_Gateway/7.1.4 [cMsSf ]) 
-
-
-BlueCoat:
-
-Via: 1.1 testrating.dc5.es.bluecoat.com 
-403 -> 
-Set-Cookie: BIGipServerpool_bluecoat=1185677834.20480.0000; expires=Fri, 15-Apr-2011 10:13:21 GMT; path=/ 
-
-HTTP/1.0 407 Proxy Authentication Required ( The ISA Server requires authorization to fulfill the request. Access to the Web Proxy filter is denied. )  -> Via: 1.1 WEBSENSE 
-
-HTTP/1.0 302 Found -> Location: http://bluecoat/?cfru=aHR0cDovLzIwMC4yNy4xMjMuMTc4Lw== 
-
-HTTP/1.0 403 Forbidden 
-Server: squid/3.0.STABLE8 
-
-X-Squid-Error: ERR_ACCESS_DENIED 0 
-X-Cache: MISS from Bluecoat 
-X-Cache-Lookup: NONE from Bluecoat:3128 
-Via: 1.0 Bluecoat (squid/3.0.STABLE8) 
-
-ISA server:
-HTTP/1.0 403 Forbidden ( ISA Server is configured to block HTTP requests that require authentication. ) 
-
-
-Unknown:
-X-XSS-Protection: 1; mode=block 
-
-Rimon filter:
-
-Rimon: RWC_BLOCK
-HTTP/1.1 Rimon header
-Rimon header is only sent by lighttpd
-http://www.ynetnews.com/articles/0,7340,L-3446129,00.html
-http://btya.org/pdfs/rvienerbrochure.pdf
-
-Korea filtering:
-HTTP/1.0 302 Object Moved -> Location: http://www.willtechnology.co.kr/eng/BlockingMSGew.htm 
-Redirects to Korean filter:
-http://www.willtechnology.co.kr/eng/BlockingMSGew.htm
-
-UA filtering:
-HTTP/1.0 307 Temporary Redirect 
-https://my.best.net.ua/login/blocked/
-
-netsweeper:
-HTTP/1.0 302 Moved 
-Location: http://netsweeper1.gaggle.net:8080/webadmin/deny/index.php?dpid=53&dpruleid=53&cat=254&ttl=-905&groupname=default&policyname=default&username=-&userip=74.82.57.112&connectionip=1.0.0.127&nsphostname=netsweeper1.gaggle.net&protocol=nsef&dplanguage=-&url=http%3a%2f%2f184%2e105%2e199%2e252%2f 
-
-Set-cookie: RT_SID_netsweeper.com.80=68a6f5c564a9db297e8feb2bff69d73f; path=/ 
-X-Cache: MISS from netsweeper.irishbroadband.ie 
-X-Cache-Lookup: NONE from netsweeper.irishbroadband.ie:80 
-Via: 1.0 netsweeper.irishbroadband.ie:80 (squid/2.6.STABLE21)
-
-Nokia:
-Via: 1.1 saec-nokiaq05ca (NetCache NetApp/6.0.7) 
-Server: "Nokia" 
-
-CensorNet:
-HTTP/1.0 401 Authorization Required 
-WWW-Authenticate: Basic realm="CensorNet Administration Area" 
-Server: CensorNet/4.0 
-
-http://www.itcensor.com/censor 
-
-
-Server: ZyWALL Content Filter
-
-Apache/1.3.34 (Unix) filter/1.0
-
-HTTP/1.0 502 infiniteproxyloop 
-Via: 1.0 218.102.20.37 (McAfee Web Gateway 7.0.1.5.0.8505) 
-
-
-Set-Cookie: McAfee-SCM-URL-Filter-Coach="dD4OzXciEcp8Ihf1dD4ZzHM5FMZ2PSvRTllOnSR4RZkqfkmEIGgb3hZlVJsEaFaXNmNS3mgsdZAxaVOKIGgrrSx4Rb8hekmNKn4g02VZToogf1SbIQcVz3Q8G/U="; Comment="McAfee URL access coaching"; Version=1; Path=/; Max-Age=900; expires=Sat, 18 Dec 2010 06:47:11 GMT; 
-
-
-WWW-Authenticate: Basic realm="(Nancy McAfee)" 
-
-
-No known fingerprints for:
-NetNanny
-WebChaver
-accountable2you.com
-http://www.shodanhq.com/?q=barracuda
-http://www.shodanhq.com/?q=untangle
-http://www.shodanhq.com/?q=Lightspeed
-
-Server: Smarthouse Lightspeed 
-Server: Smarthouse Lightspeed2 
-Server: Smarthouse Lightspeed 3 
-
-Server: EdgePrism/3.8.1.1 
-
-
-X-Cache: MISS from Barracuda-WebFilter.jmpsecurities.com 
-Via: 1.0 Barracuda-WebFilter.jmpsecurities.com:8080 (http_scan/4.0.2.6.19) 
-
-HTTP/1.0 302 Redirected by M86 Web Filter
-http://www.m86security.com/products/web_security/m86-web-filter.asp
-
-Location: http://10.1.61.37:81/cgi/block.cgi?URL=http://70.182.111.99/&IP=96.9.174.54&CAT=WEMAIL&USER=DEFAULT&CE=0 
-
-
-Via: 1.1 WEBSENSE 
-
-
-Via: 1.1 192.168.1.251 (McAfee Web Gateway 7.1.0.1.0.10541) 
-Via: 1.1 McAfeeSA3000.cbcl.lan 
-
-
-X-Squid-Error: ERR_CONNECT_FAIL 111 
-X-Cache: MISS from CudaWebFilter.poten.com  
-
-http://212.50.251.82/ -iran squid
-
-HTTP/1.0 403 Forbidden ( Forefront TMG denied the specified Uniform Resource Locator (URL). ) 
-Via: 1.1 TMG 
-
-
-Server: NetCache appliance (NetApp/6.0.2) 
-
-
-Server: EdgePrism/3.8.1.1 
-
-
-Server: Mikrotik HttpProxy 
-
-
-Via: 1.1 TMG-04, 1.1 TMG-03 
-
-
-X-Squid-Error: ERR_INVALID_REQ 0 
-X-Cache: MISS from uspa150.trustedproxies.com 
-X-Cache-Lookup: NONE from uspa150.trustedproxies.com:80 
-
-http://www.shodanhq.com/host/view/93.125.95.177
-
-
-Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server 
-http://203.229.245.100/ <- korea block page
-
-
-
-Server: Asroc Intelligent Security Filter 4.1.8 
-
-
-
-Server: tinyproxy/1.8.2 
-
-http://www.shodanhq.com/host/view/64.104.95.251
-
-
-
-Server: Asroc Intelligent Security Filter 4.1.8 
-
-http://www.shodanhq.com/host/view/67.220.92.62
-
-
-Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server 
-http://www.shodanhq.com/host/view/203.229.245.100
-Location: http://192.168.3.20/redirect.cgi?Time=05%2FJul%2F2011%3A21%3A29%3A32%20%2B0800&ID=0000034097&Client_IP=173.212.232.58&User=-&Site=64.104.95.251&URI=-&Status_Code=403&Decision_Tag=BLOCK_ADMIN_PROTOCOL-DefaultGroup-DefaultGroup-NONE-NONE-NONE&URL_Cat=URL%20Filtering%20Bypassed&WBRS=-&DVS_Verdict=-&DVS_ThreatName=-&Reauth_URL=- 
-
-
-http://www.shodanhq.com/?q=%22content+filter%22+-squid+-apache+-ZyWall&page=4
-http://www.shodanhq.com/host/view/72.5.92.51
-http://www.microsoft.com/forefront/threat-management-gateway/en/us/pricing-licensing.aspx
-
-http://meta.wikimedia.org/wiki/Talk:XFF_project
-
-% dig nats.epiccash.com       
-
-; <<>> DiG 9.7.3 <<>> nats.epiccash.com
-;; global options: +cmd
-;; Got answer:
-;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 14920
-;; flags: qr rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 2, ADDITIONAL: 0
-
-;; QUESTION SECTION:
-;nats.epiccash.com.		IN	A
-
-;; ANSWER SECTION:
-nats.epiccash.com.	5	IN	A	172.27.0.1
-
-;; AUTHORITY SECTION:
-epiccash.com.		5	IN	NS	ns0.example.net.
-epiccash.com.		5	IN	NS	ns1.example.net.
-
-;; Query time: 81 msec
-;; SERVER: 172.16.42.2#53(172.16.42.2)
-;; WHEN: Sat Jul 16 16:14:11 2011
-;; MSG SIZE  rcvd: 98
-
-If we think it's squid, we can perhaps confirm it:
-echo -e "GET cache_object://localhost/info HTTP/1.0\r\n" | nc en.wikipedia.com 80                                                                                                                                                      
-Harvest urls from:
-http://urlblacklist.com/?sec=download
-
-https://secure.wikimedia.org/wikipedia/simple/wiki/User_talk:62.30.249.131
-
-mention WCCPv2 filters (http://www.cl.cam.ac.uk/~rnc1/talks/090528-uknof13.pdf)
-
-Cite a bunch of Richard's work:
-http://www.cl.cam.ac.uk/~rnc1/ignoring.pdf
-
-http://www.contentkeeper.com/products/web
-
-We should detect HTTP re-directs to rfc-1918 addresses; they're almost always captive portals.
-We should also detect HTTP MITM served from rfc-1918 addresses for the same reason.
-
-We should take a page from sshshuttle and run without touching the disk
-
-VIA Rail MITM's SSL In Ottawa:
-Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
-
-http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&password=al1852
-
-VIA Rail Via header (DONE):
-
-HTTP/1.0 301 Moved Permanently
-Location: http://www.google.com/
-Content-Type: text/html; charset=UTF-8
-Date: Sat, 23 Jul 2011 02:21:30 GMT
-Expires: Mon, 22 Aug 2011 02:21:30 GMT
-Cache-Control: public, max-age=2592000
-Server: gws
-Content-Length: 219
-X-XSS-Protection: 1; mode=block
-X-Cache: MISS from cache_server
-X-Cache-Lookup: MISS from cache_server:3128
-Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-Connection: close
-
-<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
-<TITLE>301 Moved</TITLE></HEAD><BODY>
-<H1>301 Moved</H1>
-The document has moved
-<A HREF="http://www.google.com/">here</A>.
-</BODY></HTML>
-
-
-blocked site (DONE):
-
-HTTP/1.0 302 Moved Temporarily
-Server: squid/2.6.STABLE21
-Date: Sat, 23 Jul 2011 02:22:17 GMT
-Content-Length: 0
-Location: http://10.66.66.66/denied.html
-
-invalid request response:
-
-$ nc 8.8.8.8 80 (DONE)
-hjdashjkdsahjkdsa
-HTTP/1.0 400 Bad Request
-Server: squid/2.6.STABLE21
-Date: Sat, 23 Jul 2011 02:22:44 GMT
-Content-Type: text/html
-Content-Length: 1178
-Expires: Sat, 23 Jul 2011 02:22:44 GMT
-X-Squid-Error: ERR_INVALID_REQ 0
-X-Cache: MISS from cache_server
-X-Cache-Lookup: NONE from cache_server:3128
-Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-Proxy-Connection: close
-
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-</HEAD><BODY>
-<H1>ERROR</H1>
-<H2>The requested URL could not be retrieved</H2>
-<HR noshade size="1px">
-<P>
-While trying to process the request:
-<PRE>
-hjdashjkdsahjkdsa
-
-</PRE>
-<P>
-The following error was encountered:
-<UL>
-<LI>
-<STRONG>
-Invalid Request
-</STRONG>
-</UL>
-
-<P>
-Some aspect of the HTTP Request is invalid.  Possible problems:
-<UL>
-<LI>Missing or unknown request method
-<LI>Missing URL
-<LI>Missing HTTP Identifier (HTTP/1.0)
-<LI>Request is too large
-<LI>Content-Length missing for POST or PUT requests
-<LI>Illegal character in hostname; underscores are not allowed
-</UL>
-<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
-
-<BR clear="all">
-<HR noshade size="1px">
-<ADDRESS>
-Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
-</ADDRESS>
-</BODY></HTML>
-
-nc 10.66.66.66 80
-GET cache_object://localhost/info HTTP/1.0
-HTTP/1.0 403 Forbidden
-Server: squid/2.6.STABLE21
-Date: Sat, 23 Jul 2011 02:25:56 GMT
-Content-Type: text/html
-Content-Length: 1061
-Expires: Sat, 23 Jul 2011 02:25:56 GMT
-X-Squid-Error: ERR_ACCESS_DENIED 0
-X-Cache: MISS from cache_server
-X-Cache-Lookup: NONE from cache_server:3128
-Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-Proxy-Connection: close
-
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-</HEAD><BODY>
-<H1>ERROR</H1>
-<H2>The requested URL could not be retrieved</H2>
-<HR noshade size="1px">
-<P>
-While trying to retrieve the URL:
-<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
-<P>
-The following error was encountered:
-<UL>
-<LI>
-<STRONG>
-Access Denied.
-</STRONG>
-<P>
-Access control configuration prevents your request from
-being allowed at this time.  Please contact your service provider if
-you feel this is incorrect.
-</UL>
-<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
-
-
-<BR clear="all">
-<HR noshade size="1px">
-<ADDRESS>
-Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
-</ADDRESS>
-</BODY></HTML>
-
-
diff --git a/old-to-be-ported-code/old-api/.ropeproject/config.py b/old-to-be-ported-code/old-api/.ropeproject/config.py
deleted file mode 100644
index ffebcd4..0000000
--- a/old-to-be-ported-code/old-api/.ropeproject/config.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# The default ``config.py``
-
-
-def set_prefs(prefs):
-    """This function is called before opening the project"""
-
-    # Specify which files and folders to ignore in the project.
-    # Changes to ignored resources are not added to the history and
-    # VCSs.  Also they are not returned in `Project.get_files()`.
-    # Note that ``?`` and ``*`` match all characters but slashes.
-    # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
-    # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
-    # '.svn': matches 'pkg/.svn' and all of its children
-    # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
-    # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
-    prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
-                                  '.hg', '.svn', '_svn', '.git']
-
-    # Specifies which files should be considered python files.  It is
-    # useful when you have scripts inside your project.  Only files
-    # ending with ``.py`` are considered to be python files by
-    # default.
-    #prefs['python_files'] = ['*.py']
-
-    # Custom source folders:  By default rope searches the project
-    # for finding source folders (folders that should be searched
-    # for finding modules).  You can add paths to that list.  Note
-    # that rope guesses project source folders correctly most of the
-    # time; use this if you have any problems.
-    # The folders should be relative to project root and use '/' for
-    # separating folders regardless of the platform rope is running on.
-    # 'src/my_source_folder' for instance.
-    #prefs.add('source_folders', 'src')
-
-    # You can extend python path for looking up modules
-    #prefs.add('python_path', '~/python/')
-
-    # Should rope save object information or not.
-    prefs['save_objectdb'] = True
-    prefs['compress_objectdb'] = False
-
-    # If `True`, rope analyzes each module when it is being saved.
-    prefs['automatic_soa'] = True
-    # The depth of calls to follow in static object analysis
-    prefs['soa_followed_calls'] = 0
-
-    # If `False` when running modules or unit tests "dynamic object
-    # analysis" is turned off.  This makes them much faster.
-    prefs['perform_doa'] = True
-
-    # Rope can check the validity of its object DB when running.
-    prefs['validate_objectdb'] = True
-
-    # How many undos to hold?
-    prefs['max_history_items'] = 32
-
-    # Shows whether to save history across sessions.
-    prefs['save_history'] = True
-    prefs['compress_history'] = False
-
-    # Set the number spaces used for indenting.  According to
-    # :PEP:`8`, it is best to use 4 spaces.  Since most of rope's
-    # unit-tests use 4 spaces it is more reliable, too.
-    prefs['indent_size'] = 4
-
-    # Builtin and c-extension modules that are allowed to be imported
-    # and inspected by rope.
-    prefs['extension_modules'] = []
-
-    # Add all standard c-extensions to extension_modules list.
-    prefs['import_dynload_stdmods'] = True
-
-    # If `True` modules with syntax errors are considered to be empty.
-    # The default value is `False`; When `False` syntax errors raise
-    # `rope.base.exceptions.ModuleSyntaxError` exception.
-    prefs['ignore_syntax_errors'] = False
-
-    # If `True`, rope ignores unresolvable imports.  Otherwise, they
-    # appear in the importing namespace.
-    prefs['ignore_bad_imports'] = False
-
-
-def project_opened(project):
-    """This function is called after opening the project"""
-    # Do whatever you like here!
diff --git a/old-to-be-ported-code/old-api/.ropeproject/globalnames b/old-to-be-ported-code/old-api/.ropeproject/globalnames
deleted file mode 100644
index 2877ef5..0000000
Binary files a/old-to-be-ported-code/old-api/.ropeproject/globalnames and /dev/null differ
diff --git a/old-to-be-ported-code/old-api/.ropeproject/history b/old-to-be-ported-code/old-api/.ropeproject/history
deleted file mode 100644
index fcd9c96..0000000
--- a/old-to-be-ported-code/old-api/.ropeproject/history
+++ /dev/null
@@ -1 +0,0 @@
-€]q(]q]qe.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/.ropeproject/objectdb b/old-to-be-ported-code/old-api/.ropeproject/objectdb
deleted file mode 100644
index f276839..0000000
Binary files a/old-to-be-ported-code/old-api/.ropeproject/objectdb and /dev/null differ
diff --git a/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt b/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
deleted file mode 100644
index f4c0084..0000000
--- a/old-to-be-ported-code/old-api/TESTS_ARE_MOVING.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-7/10/2012
-
-All new tests will be moved to the directory /nettests/.
-
-Tests that are in this directory are either here for historical reasons or have
-not yet been properly tested and fully supporting the new API.
-
-A.
diff --git a/old-to-be-ported-code/old-api/chinatrigger.py b/old-to-be-ported-code/old-api/chinatrigger.py
deleted file mode 100644
index cf4bcb3..0000000
--- a/old-to-be-ported-code/old-api/chinatrigger.py
+++ /dev/null
@@ -1,140 +0,0 @@
-import random
-import string
-import struct
-import time
-
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, defer
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.scapyproto import ScapyTest
-
-from ooni.lib.txscapy import txsr, txsend
-
-class scapyArgs(usage.Options):
-    optParameters = [['dst', 'd', None, 'Specify the target address'],
-                     ['port', 'p', None, 'Specify the target port'],
-                     ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
-                    ]
-
-class ChinaTriggerTest(ScapyTest):
-    """
-    This test is a OONI based implementation of the C tool written
-    by Philipp Winter to engage chinese probes in active scanning.
-
-    Example of running it:
-    ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
-    """
-    implements(IPlugin, ITest)
-
-    shortName = "chinatrigger"
-    description = "Triggers the chinese probes into scanning"
-    requirements = ['root']
-    options = scapyArgs
-    blocking = False
-
-    receive = True
-    pcapfile = 'example_scapy.pcap'
-    timeout = 5
-
-    def initialize(self, reactor=None):
-        if not self.reactor:
-            from twisted.internet import reactor
-            self.reactor = reactor
-
-    @staticmethod
-    def set_random_servername(pkt):
-        ret = pkt[:121]
-        for i in range(16):
-            ret += random.choice(string.ascii_lowercase)
-        ret += pkt[121+16:]
-        return ret
-
-    @staticmethod
-    def set_random_time(pkt):
-        ret = pkt[:11]
-        ret += struct.pack('!I', int(time.time()))
-        ret += pkt[11+4:]
-        return ret
-
-    @staticmethod
-    def set_random_field(pkt):
-        ret = pkt[:15]
-        for i in range(28):
-            ret += chr(random.randint(0, 256))
-        ret += pkt[15+28:]
-        return ret
-
-    @staticmethod
-    def mutate(pkt, idx):
-        """
-        Slightly changed mutate function.
-        """
-        ret = pkt[:idx-1]
-        mutation = chr(random.randint(0, 256))
-        while mutation == pkt[idx]:
-            mutation = chr(random.randint(0, 256))
-        ret += mutation
-        ret += pkt[idx:]
-        return ret
-
-    @staticmethod
-    def set_all_random_fields(pkt):
-        pkt = ChinaTriggerTest.set_random_servername(pkt)
-        pkt = ChinaTriggerTest.set_random_time(pkt)
-        pkt = ChinaTriggerTest.set_random_field(pkt)
-        return pkt
-
-    def build_packets(self, *args, **kw):
-        """
-        Override this method to build scapy packets.
-        """
-        from scapy.all import IP, TCP
-        pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
-              "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
-              "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
-              "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
-              "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
-              "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
-              "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
-              "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
-              "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
-              "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
-              "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
-              "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
-              "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
-              "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
-              "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
-              "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
-              "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
-              "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
-              "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
-              "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
-              "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
-              "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
-              "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
-              "\x00\x00"
-
-        pkt = ChinaTriggerTest.set_all_random_fields(pkt)
-        pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
-        for x in range(len(pkt)):
-            mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
-            pkts.append(mutation)
-        return pkts
-
-    def load_assets(self):
-        if self.local_options:
-            self.dst = self.local_options['dst']
-            self.port = int(self.local_options['port'])
-            if self.local_options['pcap']:
-                self.pcapfile = self.local_options['pcap']
-            if not self.port or not self.dst:
-                pass
-
-        return {}
-
-#chinatrigger = ChinaTriggerTest(None, None, None)
-
diff --git a/old-to-be-ported-code/old-api/daphn3.py b/old-to-be-ported-code/old-api/daphn3.py
deleted file mode 100644
index bf4d60d..0000000
--- a/old-to-be-ported-code/old-api/daphn3.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet import protocol, endpoints
-
-from ooni.plugoo import reports
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import daphn3
-from ooni.utils import log
-
-class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
-    def connectionMade(self):
-        self.next_state()
-
-class Daphn3ClientFactory(protocol.ClientFactory):
-    protocol = Daphn3ClientProtocol
-    mutator = None
-    steps = None
-    test = None
-
-    def buildProtocol(self, addr):
-        p = self.protocol()
-        p.factory = self
-        p.test = self.test
-
-        if self.steps:
-            p.steps = self.steps
-
-        if not self.mutator:
-            self.mutator = daphn3.Mutator(p.steps)
-
-        else:
-            print "Moving on to next mutation"
-            self.mutator.next()
-
-        p.mutator = self.mutator
-        p.current_state = self.mutator.state()
-        return p
-
-    def clientConnectionFailed(self, reason):
-        print "We failed connecting the the OONIB"
-        print "Cannot perform test. Perhaps it got blocked?"
-        print "Please report this to tor-assistants at torproject.org"
-        self.test.result['error'] = ('Failed in connecting to OONIB', reason)
-        self.test.end(d)
-
-    def clientConnectionLost(self, reason):
-        print "Connection Lost."
-
-class daphn3Args(usage.Options):
-    optParameters = [['pcap', 'f', None,
-                        'PCAP to read for generating the YAML output'],
-
-                     ['output', 'o', 'daphn3.yaml',
-                        'What file should be written'],
-
-                     ['yaml', 'y', None,
-                        'The input file to the test'],
-
-                     ['host', 'h', None, 'Target Hostname'],
-                     ['port', 'p', None, 'Target port number'],
-                     ['resume', 'r', 0, 'Resume at this index']]
-
-class daphn3Test(OONITest):
-    implements(IPlugin, ITest)
-
-    shortName = "daphn3"
-    description = "daphn3"
-    requirements = None
-    options = daphn3Args
-    blocking = False
-
-    local_options = None
-
-    steps = None
-
-    def initialize(self):
-        if not self.local_options:
-            self.end()
-            return
-
-        self.factory = Daphn3ClientFactory()
-        self.factory.test = self
-
-        if self.local_options['pcap']:
-            self.tool = True
-
-        elif self.local_options['yaml']:
-            self.steps = daphn3.read_yaml(self.local_options['yaml'])
-
-        else:
-            log.msg("Not enough inputs specified to the test")
-            self.end()
-
-    def runTool(self):
-        import yaml
-        pcap = daphn3.read_pcap(self.local_options['pcap'])
-        f = open(self.local_options['output'], 'w')
-        f.write(yaml.dump(pcap))
-        f.close()
-
-    def control(self, exp_res, args):
-        try:
-            mutation = self.factory.mutator.get(0)
-            self.result['censored'] = False
-        except:
-            mutation = None
-
-        return {'mutation_number': args['mutation'],
-                'value': mutation}
-
-    def _failure(self, *argc, **kw):
-        self.result['censored'] = True
-        self.result['error'] = ('Failed in connecting', (argc, kw))
-        self.end()
-
-    def experiment(self, args):
-        log.msg("Doing mutation %s" % args['mutation'])
-        self.factory.steps = self.steps
-        host = self.local_options['host']
-        port = int(self.local_options['port'])
-        log.msg("Connecting to %s:%s" % (host, port))
-
-        if self.ended:
-            return
-
-        endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
-        d = endpoint.connect(self.factory)
-        d.addErrback(self._failure)
-        return d
-
-    def load_assets(self):
-        if not self.local_options:
-            return {}
-        if not self.steps:
-            print "Error: No assets!"
-            self.end()
-            return {}
-        mutations = 0
-        for x in self.steps:
-            mutations += len(x['data'])
-        return {'mutation': range(mutations)}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#daphn3test = daphn3Test(None, None, None)
diff --git a/old-to-be-ported-code/old-api/domclass.py b/old-to-be-ported-code/old-api/domclass.py
deleted file mode 100644
index 3080c40..0000000
--- a/old-to-be-ported-code/old-api/domclass.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/usr/bin/env python
-#-*- encoding: utf-8 -*-
-#
-#    domclass
-#    ********
-#
-#    :copyright: (c) 2012 by Arturo Filastò
-#    :license: see LICENSE for more details.
-#
-#    how this works
-#    --------------
-#
-#    This classifier uses the DOM structure of a website to determine how similar
-#    the two sites are.
-#    The procedure we use is the following:
-#        * First we parse all the DOM tree of the web page and we build a list of
-#          TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
-#          (html, a), (a, b), (html, c)).
-#
-#        * We then use this information to build a matrix (M) where m[i][j] = P(of
-#          transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
-#          Note: M is a square matrix that is number_of_tags wide.
-#
-#        * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
-#
-#        * The corelation between page A and B is given via this formula:
-#          correlation = dot_product(e_A, e_B), where e_A and e_B are
-#          resepectively the eigenvalues for the probability matrix A and the
-#          probability matrix B.
-#
-
-try:
-    import numpy
-except:
-    print "Error numpy not installed!"
-
-import yaml
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-from ooni.protocols.http import HTTPTest
-
-class domclassArgs(usage.Options):
-    optParameters = [['output', 'o', None, 'Output to write'],
-                     ['file', 'f', None, 'Corpus file'],
-                     ['fileb', 'b', None, 'Corpus file'],
-                     ['urls', 'u', None, 'URL List'],
-                     ['resume', 'r', 0, 'Resume at this index']]
-
-# All HTML4 tags
-# XXX add link to W3C page where these came from
-alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
-           'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
-           'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
-           'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
-           'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
-           'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
-           'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
-           'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
-           'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
-           'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
-
-# Reduced subset of only the most common tags
-commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
-           'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
-           'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
-           'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
-           'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
-           'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
-           'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
-           'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
-           'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
-
-# The tags we are intested in using for our analysis
-thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
-           'H3', 'H4', 'IFRAME ', 'INPUT',
-           'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
-           'STYLE', 'TR']
-
-def compute_probability_matrix(dataset):
-    """
-    Compute the probability matrix based on the input dataset.
-
-    :dataset: an array of pairs representing the parent child relationships.
-    """
-    import itertools
-    ret = {}
-    matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
-
-    for data in dataset:
-        x = data[0].upper()
-        y = data[1].upper()
-        try:
-            x = thetags.index(x)
-        except:
-            x = len(thetags)
-
-        try:
-            y = thetags.index(y)
-        except:
-            y = len(thetags)
-
-        matrix[x,y] += 1
-
-    for x in xrange(len(thetags) + 1):
-        possibilities = 0
-        for y in matrix[x]:
-            possibilities += y
-
-        for i in xrange(len(matrix[x])):
-            if possibilities != 0:
-                matrix[x][i] = matrix[x][i]/possibilities
-
-    return matrix
-
-def compute_eigenvalues(matrix):
-    """
-    Returns the eigenvalues of the supplied square matrix.
-
-    :matrix: must be a square matrix and diagonalizable.
-    """
-    return numpy.linalg.eigvals(matrix)
-
-def readDOM(content=None, filename=None):
-    """
-    Parses the DOM of the HTML page and returns an array of parent, child
-    pairs.
-
-    :content: the content of the HTML page to be read.
-
-    :filename: the filename to be read from for getting the content of the
-               page.
-    """
-    from bs4 import BeautifulSoup
-
-    if filename:
-        f = open(filename)
-        content = ''.join(f.readlines())
-        f.close()
-
-    dom = BeautifulSoup(content)
-    couples = []
-    for x in dom.findAll():
-        couples.append((str(x.parent.name), str(x.name)))
-
-    return couples
-
-class domclassTest(HTTPTest):
-    implements(IPlugin, ITest)
-
-    shortName = "domclass"
-    description = "domclass"
-    requirements = None
-    options = domclassArgs
-    blocking = False
-
-    follow_redirects = True
-    #tool = True
-
-    def runTool(self):
-        site_a = readDOM(filename=self.local_options['file'])
-        site_b = readDOM(filename=self.local_options['fileb'])
-        a = {}
-        a['matrix'] = compute_probability_matrix(site_a)
-        a['eigen'] = compute_eigenvalues(a['matrix'])
-
-        self.result['eigenvalues'] = a['eigen']
-        b = {}
-        b['matrix'] = compute_probability_matrix(site_b)
-        b['eigen'] = compute_eigenvalues(b['matrix'])
-
-        #print "A: %s" % a
-        #print "B: %s" % b
-        correlation = numpy.vdot(a['eigen'],b['eigen'])
-        correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
-        correlation = (correlation + 1)/2
-        print "Corelation: %s" % correlation
-        self.end()
-        return a
-
-    def processResponseBody(self, data):
-        site_a = readDOM(data)
-        #site_b = readDOM(self.local_options['fileb'])
-        a = {}
-        a['matrix'] = compute_probability_matrix(site_a)
-        a['eigen'] = compute_eigenvalues(a['matrix'])
-
-
-        if len(data) == 0:
-            self.result['eigenvalues'] = None
-            self.result['matrix'] = None
-        else:
-            self.result['eigenvalues'] = a['eigen']
-            #self.result['matrix'] = a['matrix']
-        #self.result['content'] = data[:200]
-        #b = compute_matrix(site_b)
-        print "A: %s" % a
-        return a['eigen']
-
-    def load_assets(self):
-        if self.local_options:
-            if self.local_options['file']:
-                self.tool = True
-                return {}
-            elif self.local_options['urls']:
-                return {'url': Asset(self.local_options['urls'])}
-            else:
-                self.end()
-                return {}
-        else:
-            return {}
-
-#domclass = domclassTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/dropin.cache b/old-to-be-ported-code/old-api/dropin.cache
deleted file mode 100755
index 65c2187..0000000
--- a/old-to-be-ported-code/old-api/dropin.cache
+++ /dev/null
@@ -1,243 +0,0 @@
-(dp1
-S'tcpconnect'
-p2
-ccopy_reg
-_reconstructor
-p3
-(ctwisted.plugin
-CachedDropin
-p4
-c__builtin__
-object
-p5
-NtRp6
-(dp7
-S'moduleName'
-p8
-S'ooni.plugins.tcpconnect'
-p9
-sS'description'
-p10
-S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
-p11
-sS'plugins'
-p12
-(lp13
-g3
-(ctwisted.plugin
-CachedPlugin
-p14
-g5
-NtRp15
-(dp16
-S'provided'
-p17
-(lp18
-ctwisted.plugin
-IPlugin
-p19
-acooni.plugoo.interface
-ITest
-p20
-asS'dropin'
-p21
-g6
-sS'name'
-p22
-S'tcpconnect'
-p23
-sg10
-NsbasbsS'domclass'
-p24
-g3
-(g4
-g5
-NtRp25
-(dp26
-g8
-S'ooni.plugins.domclass'
-p27
-sg10
-Nsg12
-(lp28
-g3
-(g14
-g5
-NtRp29
-(dp30
-g17
-(lp31
-g19
-ag20
-asg21
-g25
-sg22
-S'domclass'
-p32
-sg10
-NsbasbsS'bridget'
-p33
-g3
-(g4
-g5
-NtRp34
-(dp35
-g8
-S'ooni.plugins.bridget'
-p36
-sg10
-Nsg12
-(lp37
-g3
-(g14
-g5
-NtRp38
-(dp39
-g17
-(lp40
-g19
-ag20
-asg21
-g34
-sg22
-S'bridget'
-p41
-sg10
-S"\n    XXX fill me in\n\n    :ivar config:\n        An :class:`ooni.lib.txtorcon.TorConfig` instance.\n    :ivar relays:\n        A list of all provided relays to test.\n    :ivar bridges:\n        A list of all provided bridges to test.\n    :ivar socks_port:\n        Integer for Tor's SocksPort.\n    :ivar control_port:\n        Integer for Tor's ControlPort.\n    :ivar transport:\n        String defining the Tor's ClientTransportPlugin, for testing \n        a bridge's pluggable transport functionality.\n    :ivar tor_binary:\n        Path to the Tor binary to use, e.g. '/usr/sbin/tor'\n    "
-p42
-sbasbsS'daphn3'
-p43
-g3
-(g4
-g5
-NtRp44
-(dp45
-g8
-S'plugins.daphn3'
-p46
-sg10
-S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
-p47
-sg12
-(lp48
-g3
-(g14
-g5
-NtRp49
-(dp50
-g17
-(lp51
-g19
-ag20
-asg21
-g44
-sg22
-S'daphn3test'
-p52
-sg10
-NsbasbsS'httpt'
-p53
-g3
-(g4
-g5
-NtRp54
-(dp55
-g8
-S'ooni.plugins.httpt'
-p56
-sg10
-S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
-p57
-sg12
-(lp58
-sbsS'chinatrigger'
-p59
-g3
-(g4
-g5
-NtRp60
-(dp61
-g8
-S'plugins.chinatrigger'
-p62
-sg10
-Nsg12
-(lp63
-g3
-(g14
-g5
-NtRp64
-(dp65
-g17
-(lp66
-g19
-ag20
-asg21
-g60
-sg22
-S'chinatrigger'
-p67
-sg10
-S'\n    This test is a OONI based implementation of the C tool written\n    by Philipp Winter to engage chinese probes in active scanning.\n\n    Example of running it:\n    ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap\n    '
-p68
-sbasbsS'dnstamper'
-p69
-g3
-(g4
-g5
-NtRp70
-(dp71
-g8
-S'ooni.plugins.dnstamper'
-p72
-sg10
-S'\n    dnstamper\n    *********\n\n    This test resolves DNS for a list of domain names, one per line, in the\n    file specified in the ooni-config under the setting "dns_experiment". If\n    the file is top-1m.txt, the test will be run using Amazon\'s list of top\n    one million domains. The experimental dns servers to query should\n    be specified one per line in assets/dns_servers.txt.\n\n    The test reports censorship if the cardinality of the intersection of\n    the query result set from the control server and the query result set\n    from the experimental server is zero, which is to say, if the two sets\n    have no matching results whatsoever.\n\n    NOTE: This test frequently results in false positives due to GeoIP-based\n    load balancing on major global sites such as google, facebook, and\n    youtube, etc.\n\n    :author: Isis Lovecruft, Arturo Filast\xc3\xb2\n    :license: see LICENSE for more details\n\n    TODO:\n         * Finish porting to twisted\n 
         * Finish the client.Resolver() subclass and test it\n         * Use the DNS tests from captiveportal\n         * Use plugoo/reports.py for final data\n'
-p73
-sg12
-(lp74
-g3
-(g14
-g5
-NtRp75
-(dp76
-g17
-(lp77
-g19
-ag20
-asg21
-g70
-sg22
-S'dnstamper'
-p78
-sg10
-S'\n    XXX fill me in\n    '
-p79
-sbasbsS'blocking'
-p80
-g3
-(g4
-g5
-NtRp81
-(dp82
-g8
-S'plugins.blocking'
-p83
-sg10
-Nsg12
-(lp84
-g3
-(g14
-g5
-NtRp85
-(dp86
-g17
-(lp87
-g19
-ag20
-asg21
-g81
-sg22
-S'blocking'
-p88
-sg10
-Nsbasbs.
\ No newline at end of file
diff --git a/old-to-be-ported-code/old-api/httpt.py b/old-to-be-ported-code/old-api/httpt.py
deleted file mode 100644
index 358f1ea..0000000
--- a/old-to-be-ported-code/old-api/httpt.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from ooni.plugoo.tests import ITest, OONITest
-from ooni.plugoo.assets import Asset
-from ooni.protocols import http
-from ooni.utils import log
-
-class httptArgs(usage.Options):
-    optParameters = [['urls', 'f', None, 'Urls file'],
-                     ['url', 'u', 'http://torproject.org/', 'Test single site'],
-                     ['resume', 'r', 0, 'Resume at this index'],
-                     ['rules', 'y', None, 'Specify the redirect rules file']]
-
-class httptTest(http.HTTPTest):
-    implements(IPlugin, ITest)
-
-    shortName = "httpt"
-    description = "httpt"
-    requirements = None
-    options = httptArgs
-    blocking = False
-
-
-    def testPattern(self, value, pattern, type):
-        if type == 'eq':
-            return value == pattern
-        elif type == 're':
-            import re
-            if re.match(pattern, value):
-                return True
-            else:
-                return False
-        else:
-            return None
-
-    def testPatterns(self, patterns, location):
-        test_result = False
-
-        if type(patterns) == list:
-            for pattern in patterns:
-                test_result |= self.testPattern(location, pattern['value'], pattern['type'])
-        else:
-            test_result |= self.testPattern(location, patterns['value'], patterns['type'])
-
-        return test_result
-
-    def testRules(self, rules, location):
-        result = {}
-        blocked = False
-        for rule, value in rules.items():
-            current_rule = {}
-            current_rule['name'] = value['name']
-            current_rule['patterns'] = value['patterns']
-            current_rule['test'] = self.testPatterns(value['patterns'], location)
-            blocked |= current_rule['test']
-            result[rule] = current_rule
-        result['blocked'] = blocked
-        return result
-
-    def processRedirect(self, location):
-        self.result['redirect'] = None
-        try:
-            rules_file = self.local_options['rules']
-            import yaml
-            rules = yaml.load(open(rules_file))
-            log.msg("Testing rules %s" % rules)
-            redirect = self.testRules(rules, location)
-            self.result['redirect'] = redirect
-        except TypeError:
-            log.msg("No rules file. Got a redirect, but nothing to do.")
-
-
-    def control(self, experiment_result, args):
-        print self.response
-        print self.request
-        # What you return here ends up inside of the report.
-        log.msg("Running control")
-        return {}
-
-    def load_assets(self):
-        if self.local_options and self.local_options['urls']:
-            return {'url': Asset(self.local_options['urls'])}
-        else:
-            return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#httpt = httptTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpconnect.py b/old-to-be-ported-code/old-api/tcpconnect.py
deleted file mode 100644
index 7758a9e..0000000
--- a/old-to-be-ported-code/old-api/tcpconnect.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
-This is a self genrated test created by scaffolding.py.
-you will need to fill it up with all your necessities.
-Safe hacking :).
-"""
-from zope.interface import implements
-from twisted.python import usage
-from twisted.plugin import IPlugin
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet.endpoints import TCP4ClientEndpoint
-
-from ooni.plugoo.interface import ITest
-from ooni.plugoo.tests import OONITest
-from ooni.plugoo.assets import Asset
-from ooni.utils import log
-
-class tcpconnectArgs(usage.Options):
-    optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
-                     ['resume', 'r', 0, 'Resume at this index']]
-
-class tcpconnectTest(OONITest):
-    implements(IPlugin, ITest)
-
-    shortName = "tcpconnect"
-    description = "tcpconnect"
-    requirements = None
-    options = tcpconnectArgs
-    blocking = False
-
-    def experiment(self, args):
-        try:
-            host, port = args['asset'].split(':')
-        except:
-            raise Exception("Error in parsing asset. Wrong format?")
-        class DummyFactory(Factory):
-            def buildProtocol(self, addr):
-                return Protocol()
-
-        def gotProtocol(p):
-            p.transport.loseConnection()
-            log.msg("Got a connection!")
-            log.msg(str(p))
-            return {'result': True, 'target': [host, port]}
-
-        def gotError(err):
-            log.msg("Had error :(")
-            log.msg(err)
-            return {'result': False, 'target': [host, port]}
-
-        # What you return here gets handed as input to control
-        point = TCP4ClientEndpoint(self.reactor, host, int(port))
-        d = point.connect(DummyFactory())
-        d.addCallback(gotProtocol)
-        d.addErrback(gotError)
-        return d
-
-    def load_assets(self):
-        if self.local_options:
-            return {'asset': Asset(self.local_options['asset'])}
-        else:
-            return {}
-
-# We need to instantiate it otherwise getPlugins does not detect it
-# XXX Find a way to load plugins without instantiating them.
-#tcpconnect = tcpconnectTest(None, None, None)
diff --git a/old-to-be-ported-code/old-api/tcpscan.py b/old-to-be-ported-code/old-api/tcpscan.py
deleted file mode 100644
index b371c88..0000000
--- a/old-to-be-ported-code/old-api/tcpscan.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
-    TCP Port Scanner
-    ****************
-
-    Does a TCP connect scan on the IP:port pairs.
-
-"""
-import os
-from gevent import socket
-from datetime import datetime
-import socks
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-__plugoo__ = "TCP Port Scanner"
-__desc__ = "This a test template to be used to build your own tests"
-
-class TCPScanAsset(Asset):
-    """
-    This is the asset that should be used by the Test. It will
-    contain all the code responsible for parsing the asset file
-    and should be passed on instantiation to the test.
-    """
-    def __init__(self, file=None):
-        self = Asset.__init__(self, file)
-
-
-class TCPScan(Test):
-    """
-    The main Test class
-    """
-
-    def experiment(self, *a, **kw):
-        """
-        Fill this up with the tasks that should be performed
-        on the "dirty" network and should be compared with the
-        control.
-        """
-        addr = kw['data']
-        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        res = False
-        try:
-            self.logger.debug('Doing a connection to %s' % addr)
-            s.connect((addr.split(':')[0], int(addr.split(':')[1])))
-            res = True
-        except socket.error, msg:
-            self.logger.debug('Connection failed to %s: %s' % (addr, msg))
-
-        finally:
-            s.close()
-
-        return {'Time': datetime.now(),
-                'Address': addr,
-                'Status': res}
-
-    def control(self):
-        """
-        Fill this up with the control related code.
-        """
-        return True
-
-def run(ooni, asset=None):
-    """
-    This is the function that will be called by OONI
-    and it is responsible for instantiating and passing
-    the arguments to the Test class.
-    """
-    config = ooni.config
-
-    # This the assets array to be passed to the run function of
-    # the test
-    if asset:
-        assets = [TCPScanAsset(asset)]
-    else:
-        assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
-                                            "tcpscan.txt"))]
-
-    # Instantiate the Test
-    thetest = TCPScan(ooni)
-    ooni.logger.info("starting TCP Scan...")
-    # Run the test with argument assets
-    thetest.run(assets)
-    ooni.logger.info("finished.")
diff --git a/old-to-be-ported-code/spec/proxooni-spec.txt b/old-to-be-ported-code/spec/proxooni-spec.txt
deleted file mode 100644
index 7cc476f..0000000
--- a/old-to-be-ported-code/spec/proxooni-spec.txt
+++ /dev/null
@@ -1,65 +0,0 @@
-
-                              Proxyooni specification
-                                   version 0.0
-                                  Jacob Appelbaum
-
-0. Preface
-
- This document describes a new proxy that is required to support ooni-probe.
-
-1. Overview
-
- There is no common proxy type that thwarts even the most basic traffic
- monitoring. The Proxyooni specification aims to provide a proxy that is
- encrypted by default, optionally authenticated, and will provide a way to run
- specific ooni-probe tests natively on the system where the proxy is running.
-
-2. Implementation
-
- Proxyooni may be written in any language, the reference implementation will be
- implemented in Python. The program shall be called ooni-proxy and it will handle
- running as a privileged user or an unprivileged user on supported systems. We
- aim to support ooni-proxy on Debian Gnu/Linux as the reference platform.
-
-2.1 Connections
-
- When ooni-proxy runs, it should open a single port and it will allow TLS 1.0
- clients to connect with a cipher suite that provides perfect forward secrecy.
-
-2.2 Certificates
-
- ooni-proxy should use a certificate if supplied or dynamically generate a
- certificate on startup; any connecting client should bootstrap trust with a
- TOFU model, a client may ignore the
-
-2.3 Authentication
-
- ooni-proxy should provide open access by default with no authentication.
- It should support TLS-PSK[0] if authentication is desired. Key distribution is
- explictly an out of scope problem.
-
-3.0 Services offered
-
- Post authentication, a remote client should treat ooni-proxy as a SOCKS4A[1]
- proxy. It should be possible to chain as many Proxyooni proxies as desired.
-
-3.1 Additional services offered
-
- ooni-proxy should allow for the sending of raw socket data - this is currently
- left unspecified. This should be specified in the next revision of the
- specification.
-
-3.2 Advanced meta-services
-
- It may be desired to load code on the ooni-proxy from a client with newer
- tests. This should be specified in the next revision of the specification.
-
-4. Security Concerns
-
- It is probably not a good idea to run ooni-proxy unless you have permission to
- do so. Consider your network context carefully; if it is dangerous to run a test
- ensure that you do not run the test.
-
-[0] http://en.wikipedia.org/wiki/TLS-PSK
-[1] http://en.wikipedia.org/wiki/SOCKS#SOCKS_4a
-
diff --git a/old-to-be-ported-code/very-old/TODO.plgoons b/old-to-be-ported-code/very-old/TODO.plgoons
deleted file mode 100644
index ace2a10..0000000
--- a/old-to-be-ported-code/very-old/TODO.plgoons
+++ /dev/null
@@ -1,79 +0,0 @@
-We should implement the following as plugoons:
-
-dns_plgoo.py - Various DNS checks
-
-As a start - we should perform a known good check against a name or list of
-names.  As input, we should take an ip address, a name or a list of names for
-testing; we also take dns servers for experiment or control data. For output we
-emit UDP or TCP packets - we should support proxying these requests when
-possible as is the case with TCP but probably not with UDP for certain DNS
-request types.
-
-http_plgoo.py - Various HTTP checks
-
-We should compare two pages and see if we have identical properties.
-At the very least, we should print the important differences - perhaps
-with a diff like output? We should look for fingerprints in URLS that are
-returned. We should detect 302 re-direction.
-
-As input, we should take an ip address, a name or a list of names for testing;
-we also take a list of headers such as random user agent strings and so on.
-We should emit TCP packets and ensure that we do not leak DNS for connections
-that we expect to proxy to a remote network.
-
-latency_plgoo.py - Measure latency for a host or a list of hosts
-
-As input, we should take an ip address, a name or a list of names for testing;
-We should measure the mean latency from the ooni-probe to the host with various
-traceroute tests. We should also measure the latency between the ooni-probe and
-a given server for any other protocol that is request and response oriented;
-HTTP latency may be calculated by simply tracking the delta between requests
-and responses.
-
-tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
-
-tcptrace_plgoo.py should allow for both stray and in-connection traceroute
-modes.
-
-udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
-- it may also be nice to simply make a random A record request in a DNS packet
-and use it as the payload for a UDP traceroute.
-
-reversetrace_plgoo.py should give a remote host the client's IP and return the
-output of a traceroute to that IP from the remote host. It will need a remote
-component if run against a web server. It would not need a remote component if
-run against route-views - we can simply telnet over Tor and ask it to trace to
-our detected client IP.
-
-keyword_plgoo.py should take a keyword or a list of keywords for use as a
-payload in a varity of protocols. This should be protocol aware - dns keyword
-filtering requires a sniffer to catch stray packets after the censor wins the
-race. HTTP payloads in open connections may be similar and in practice, we'll
-have to find tune it.
-
-icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
-servers. They have dozens of tests and to implement this plgoo, we'll need to
-add many things to ooni. More details here:
-http://netalyzr.icsi.berkeley.edu/faq.html
-http://netalyzr.icsi.berkeley.edu/json/id=example-session
-
-HTML output:
-http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-JSON output:
-http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-
-Netalyzer log:
-http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
-http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
-
-sniffer_plgoo.py - We need a generic method for capturing packets during a full
-run - this may be better as a core ooni-probe feature but we should implement
-packet capture in a plugin if it is done no where else.
-
-nmap_plgoo.py - We should take a list of hosts and run nmap against each of
-these hosts; many hosts are collected during testing and they should be scanned
-with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
-more reasonable.
-
diff --git a/old-to-be-ported-code/very-old/TO_BE_PORTED b/old-to-be-ported-code/very-old/TO_BE_PORTED
deleted file mode 100644
index 49ce5e0..0000000
--- a/old-to-be-ported-code/very-old/TO_BE_PORTED
+++ /dev/null
@@ -1,14 +0,0 @@
-
-The tests in this directory are very old, and have neither been ported to
-Twisted, nor to the new twisted.trial API framework. Although, they are not
-old in the sense of the *seriously old* OONI code which was written two years
-ago.
-
-These tests should be updated at least to use Twisted.
-
-If you want to hack on something care free, feel free to mess with these files
-because it would be difficult to not improve on them.
-
-<(A)3
-isis
-0x2cdb8b35
diff --git a/old-to-be-ported-code/very-old/ooni-probe.diff b/old-to-be-ported-code/very-old/ooni-probe.diff
deleted file mode 100644
index fc61d3f..0000000
--- a/old-to-be-ported-code/very-old/ooni-probe.diff
+++ /dev/null
@@ -1,358 +0,0 @@
-diff --git a/TODO b/TODO
-index c2e19af..51fa559 100644
---- a/TODO
-+++ b/TODO
-@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
- Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
- 
- http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&password=al1852
-+
-+VIA Rail Via header:
-+
-+HTTP/1.0 301 Moved Permanently
-+Location: http://www.google.com/
-+Content-Type: text/html; charset=UTF-8
-+Date: Sat, 23 Jul 2011 02:21:30 GMT
-+Expires: Mon, 22 Aug 2011 02:21:30 GMT
-+Cache-Control: public, max-age=2592000
-+Server: gws
-+Content-Length: 219
-+X-XSS-Protection: 1; mode=block
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: MISS from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Connection: close
-+
-+<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
-+<TITLE>301 Moved</TITLE></HEAD><BODY>
-+<H1>301 Moved</H1>
-+The document has moved
-+<A HREF="http://www.google.com/">here</A>.
-+</BODY></HTML>
-+
-+
-+blocked site:
-+
-+HTTP/1.0 302 Moved Temporarily
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:17 GMT
-+Content-Length: 0
-+Location: http://10.66.66.66/denied.html
-+
-+invalid request response:
-+
-+$ nc 8.8.8.8 80
-+hjdashjkdsahjkdsa
-+HTTP/1.0 400 Bad Request
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:22:44 GMT
-+Content-Type: text/html
-+Content-Length: 1178
-+Expires: Sat, 23 Jul 2011 02:22:44 GMT
-+X-Squid-Error: ERR_INVALID_REQ 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to process the request:
-+<PRE>
-+hjdashjkdsahjkdsa
-+
-+</PRE>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Invalid Request
-+</STRONG>
-+</UL>
-+
-+<P>
-+Some aspect of the HTTP Request is invalid.  Possible problems:
-+<UL>
-+<LI>Missing or unknown request method
-+<LI>Missing URL
-+<LI>Missing HTTP Identifier (HTTP/1.0)
-+<LI>Request is too large
-+<LI>Content-Length missing for POST or PUT requests
-+<LI>Illegal character in hostname; underscores are not allowed
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+nc 10.66.66.66 80
-+GET cache_object://localhost/info HTTP/1.0
-+HTTP/1.0 403 Forbidden
-+Server: squid/2.6.STABLE21
-+Date: Sat, 23 Jul 2011 02:25:56 GMT
-+Content-Type: text/html
-+Content-Length: 1061
-+Expires: Sat, 23 Jul 2011 02:25:56 GMT
-+X-Squid-Error: ERR_ACCESS_DENIED 0
-+X-Cache: MISS from cache_server
-+X-Cache-Lookup: NONE from cache_server:3128
-+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
-+Proxy-Connection: close
-+
-+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
-+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
-+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
-+</HEAD><BODY>
-+<H1>ERROR</H1>
-+<H2>The requested URL could not be retrieved</H2>
-+<HR noshade size="1px">
-+<P>
-+While trying to retrieve the URL:
-+<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
-+<P>
-+The following error was encountered:
-+<UL>
-+<LI>
-+<STRONG>
-+Access Denied.
-+</STRONG>
-+<P>
-+Access control configuration prevents your request from
-+being allowed at this time.  Please contact your service provider if
-+you feel this is incorrect.
-+</UL>
-+<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
-+
-+
-+<BR clear="all">
-+<HR noshade size="1px">
-+<ADDRESS>
-+Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
-+</ADDRESS>
-+</BODY></HTML>
-+
-+
-diff --git a/ooni/command.py b/ooni/command.py
-index 361190f..df1a58c 100644
---- a/ooni/command.py
-+++ b/ooni/command.py
-@@ -13,6 +13,7 @@ import ooni.captive_portal
- import ooni.namecheck
- import ooni.dns_poisoning
- import ooni.dns_cc_check
-+import ooni.transparenthttp
- 
- class Command():
-     def __init__(self, args):
-@@ -48,6 +49,15 @@ class Command():
-             help="run captiveportal tests"
-         )
- 
-+        # --transhttp
-+        def cb_transhttp(option, opt, value, oparser):
-+            self.action = opt[2:]
-+        optparser.add_option(
-+            "--transhttp",
-+            action="callback", callback=cb_transhttp,
-+            help="run Transparent HTTP tests"
-+        )
-+
-         # --dns
-         def cb_dnstests(option, opt, value, oparser):
-             self.action = opt[2:]
-@@ -122,7 +132,7 @@ class Command():
-             if (not self.action):
-                 raise optparse.OptionError(
-                     'is required',
--                    '--dns | --dnsbulk | --captiveportal | --help | --version'
-+                    '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
-                 )
- 
-         except optparse.OptionError, err:
-@@ -138,6 +148,10 @@ class Command():
-         captive_portal = ooni.captive_portal.CaptivePortal
-         captive_portal(self).main()
- 
-+    def transhttp(self):
-+        transparent_http = ooni.transparenthttp.TransparentHTTPProxy
-+        transparent_http(self).main()
-+
-     def dns(self):
-         dnstests = ooni.namecheck.DNS
-         dnstests(self).main()
-diff --git a/ooni/dns.py b/ooni/dns.py
-index 95da6ef..90d50bd 100644
---- a/ooni/dns.py
-+++ b/ooni/dns.py
-@@ -8,7 +8,7 @@ from socket import gethostbyname
- import ooni.common
- 
- # apt-get install python-dns
--import DNS
-+import dns
- import random
- 
- """ Wrap gethostbyname """
-diff --git a/ooni/http.py b/ooni/http.py
-index 62365bb..bb72001 100644
---- a/ooni/http.py
-+++ b/ooni/http.py
-@@ -7,8 +7,14 @@
- from socket import gethostbyname
- import ooni.common
- import urllib2
-+import httplib
-+from urlparse import urlparse
-+from pprint import pprint
- import pycurl
-+import random
-+import string
- import re
-+from BeautifulSoup import BeautifulSoup
- 
- # By default, we'll be Torbutton's UA
- default_ua = { 'User-Agent' : 
-@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
- default_proxy_host = "127.0.0.1"
- default_proxy_port = "9050"
- 
--
--
--
--
--
--
--
--
--
--
--
--
--
--
-+#class HTTPResponse(object):
-+#  def __init__(self):
- 
- 
- """A very basic HTTP fetcher that uses Tor by default and returns a curl
-@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
-    http_code = getinfo(pycurl.HTTP_CODE)
-    return response, http_code
- 
--"""A very basic HTTP fetcher that returns a urllib3 response object."""
-+"""A very basic HTTP fetcher that returns a urllib2 response object."""
- def http_fetch(url, 
-                headers= default_ua,
-                label="generic HTTP fetch"):
-@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
-   else:
-     return True
- 
-+def http_request(self, method, url, path=None):
-+  """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
-+  purl = urlparse(url)
-+  host = purl.netloc
-+  conn = httplib.HTTPConnection(host, 80)
-+  if path is None:
-+    path = purl.path
-+  conn.request(method, purl.path)
-+  response = conn.getresponse()
-+  headers = dict(response.getheaders())
-+  self.headers = headers
-+  self.data = response.read()
-+  return True
-+
-+def search_headers(self, s_headers, url):
-+  if http_request(self, "GET", url):
-+    headers = self.headers
-+  else:
-+    return None
-+  result = {}
-+  for h in s_headers.items():
-+    result[h[0]] = h[0] in headers
-+  return result
-+
-+def http_header_match_dict(experimental_url, dict_header):
-+  result = {}
-+  url_header = http_get_header_dict(experimental_url)
-+
-+# XXX for testing
-+#  [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]   
-+      
-+def search_squid_headers(self):
-+  url = "http://securityfocus.org/blabla"
-+  s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
-+  ret = search_headers(self, s_headers, url)
-+  for i in ret.items():
-+    if i[1] is True:
-+      return False
-+  return True
-+
-+def random_bad_request(self):
-+  url = "http://securityfocus.org/blabla"
-+  r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
-+  if http_request(self, r_str, url):
-+    return True
-+  else:
-+    return None
-+
-+def squid_search_bad_request(self):
-+  if random_bad_request(self):
-+    s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
-+    for i in s_headers.items():
-+      if i[0] in self.headers:
-+        return False
-+    return True
-+  else:
-+    return None
-+
-+def squid_cacheobject_request(self):
-+  url = "http://securityfocus.org/blabla"
-+  if http_request(self, "GET", url, "cache_object://localhost/info"):
-+    soup = BeautifulSoup(self.data)
-+    if soup.find('strong') and soup.find('strong').string == "Access Denied.":
-+      return False
-+    else:
-+      return True
-+  else:
-+    return None
-+  
-+
- def MSHTTP_CP_Tests(self):
-   experiment_url = "http://www.msftncsi.com/ncsi.txt"
-   expectedResponse = "Microsoft NCSI" # Only this - nothing more
-@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
- 
- # Google ChromeOS fetches this url in guest mode
- # and they expect the user to authenticate
--  def googleChromeOSHTTPTest(self):
--    print "noop"
--    #url = "http://www.google.com/"
-+def googleChromeOSHTTPTest(self):
-+  print "noop"
-+  #url = "http://www.google.com/"
-+
-+def SquidHeader_TransparentHTTP_Tests(self):
-+  return search_squid_headers(self)
-+
-+def SquidBadRequest_TransparentHTTP_Tests(self):
-+  squid_cacheobject_request(self)
-+  return squid_search_bad_request(self)    
-+
-+def SquidCacheobject_TransparentHTTP_Tests(self):
-+  return squid_cacheobject_request(self)
-+
-+
diff --git a/old-to-be-ported-code/very-old/ooni/#namecheck.py# b/old-to-be-ported-code/very-old/ooni/#namecheck.py#
deleted file mode 100644
index 1a2a3f0..0000000
--- a/old-to-be-ported-code/very-old/ooni/#namecheck.py#
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#
-# This module performs multiple DNS tests.
-
-import sys
-import ooni.dnsooni
-
-class DNS():
-  def __init__(self, args):
-    self.in_ = sys.stdin
-    self.out = sys.stdout
-    self.debug = False
-    self.randomize = args.randomize
-
-  def DNS_Tests(self):
-    print "DNS tampering detection:"
-    filter_name = "_DNS_Tests"
-    tests = [ooni.dnsooni]
-    for test in tests:
-      for function_ptr in dir(test):
-        if function_ptr.endswith(filter_name):
-          filter_result = getattr(test, function_ptr)(self)
-          if filter_result == True:
-            print function_ptr + " thinks the network is clean"
-          elif filter_result == None:
-              print function_ptr + " failed"
-          else:
-            print function_ptr + " thinks the network is dirty"
-
-  def main(self):
-    for function_ptr in dir(self):
-      if function_ptr.endswith("_Tests"):
-        getattr(self, function_ptr)()
-
-if __name__ == '__main__':
-  self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/.DS_Store b/old-to-be-ported-code/very-old/ooni/.DS_Store
deleted file mode 100644
index f5738a5..0000000
Binary files a/old-to-be-ported-code/very-old/ooni/.DS_Store and /dev/null differ
diff --git a/old-to-be-ported-code/very-old/ooni/__init__.py b/old-to-be-ported-code/very-old/ooni/__init__.py
deleted file mode 100644
index 8f1b96e..0000000
--- a/old-to-be-ported-code/very-old/ooni/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-"""\
-This is your package, 'ooni'.
-
-It was provided by the package, `package`.
-
-Please change this documentation, and write this module!
-"""
-
-__version__ = '0.0.1'
-
-# If you run 'make test', this is your failing test.
-# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/old-to-be-ported-code/very-old/ooni/command.py b/old-to-be-ported-code/very-old/ooni/command.py
deleted file mode 100644
index e5f8f9f..0000000
--- a/old-to-be-ported-code/very-old/ooni/command.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# -*- coding: utf-8
-"""\
-Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
-"""
-
-import os
-import sys
-import re
-import optparse
-
-# Only include high level ooni tests at this time
-import ooni.captive_portal
-import ooni.namecheck
-import ooni.dns_poisoning
-import ooni.dns_cc_check
-import ooni.transparenthttp
-import ooni.helpers
-import ooni.plugooni
-import ooni.input
-
-class Command():
-    def __init__(self, args):
-        sys.argv = sys.argv[0:1]
-        sys.argv.extend(args)
-        self.startup_options()
-
-    def startup_options(self):
-        self.action = None
-        self.from_ = None
-        self.to = None
-        self.parser = None
-        self.emitter = None
-        self.emit_header = None
-        self.emit_trailer = None
-        self.in_ = sys.stdin
-        self.out = sys.stdout
-        self.debug = False
-        self.randomize = True
-        self.cc = None
-        self.hostname = None
-        self.listfile = None
-        self.listplugooni = False
-        self.plugin_name = "all"
-        self.controlproxy = None # "socks4a://127.0.0.1:9050/"
-        self.experimentproxy = None
-
-        usage = """
-
-  'ooni' is the Open Observatory of Network Interference
-
-        command line usage:  ooni-probe [options]"""
-
-        optparser = optparse.OptionParser(usage=usage)
-
-        # --plugin
-        def cb_plugin(option, opt, value, oparser):
-            self.action = opt[2:]
-            self.plugin_name = str(value)
-        optparser.add_option(
-            "--plugin", type="string",
-            action="callback", callback=cb_plugin,
-            help="run the Plugooni plgoo plugin specified"
-        )
-
-        # --listplugins
-        def cb_list_plugins(option, opt, value, oparser):
-            self.action = opt[2:]
-        optparser.add_option(
-            "--listplugins",
-            action="callback", callback=cb_list_plugins,
-            help="list available Plugooni as plgoos plugin names"
-        )
-
-        # --captiveportal
-        def cb_captiveportal(option, opt, value, oparser):
-            self.action = opt[2:]
-        optparser.add_option(
-            "--captiveportal",
-            action="callback", callback=cb_captiveportal,
-            help="run vendor emulated captiveportal tests"
-        )
-
-        # --transhttp
-        def cb_transhttp(option, opt, value, oparser):
-            self.action = opt[2:]
-        optparser.add_option(
-            "--transhttp",
-            action="callback", callback=cb_transhttp,
-            help="run Transparent HTTP tests"
-        )
-
-        # --dns
-        def cb_dnstests(option, opt, value, oparser):
-            self.action = opt[2:]
-        optparser.add_option(
-            "--dns",
-            action="callback", callback=cb_dnstests,
-            help="run fixed generic dns tests"
-        )
-
-        # --dnsbulk
-        def cb_dnsbulktests(option, opt, value, oparser):
-            self.action = opt[2:]
-        optparser.add_option(
-            "--dnsbulk",
-            action="callback", callback=cb_dnsbulktests,
-            help="run bulk DNS tests in random.shuffle() order"
-        )
-
-        # --dns-cc-check
-        def cb_dnscccheck(option, opt, value, oparser):
-            self.action = opt[2:]
-        optparser.add_option(
-            "--dnscccheck",
-            action="callback", callback=cb_dnscccheck,
-            help="run cc specific bulk DNS tests in random.shuffle() order"
-        )
-
-        # --cc [country code]
-        def cb_cc(option, opt, value, optparser):
-          # XXX: We should check this against a list of supported county codes
-          # and then return the matching value from the list into self.cc
-          self.cc = str(value)
-        optparser.add_option(
-            "--cc", type="string",
-            action="callback", callback=cb_cc,
-            help="set a specific county code -- default is None",
-        )
-
-        # --list [url/hostname/ip list in file]
-        def cb_list(option, opt, value, optparser):
-          self.listfile = os.path.expanduser(value)
-          if not os.path.isfile(self.listfile):
-              print "Wrong file '" + value + "' in --list."
-              sys.exit(1)
-        optparser.add_option(
-            "--list", type="string",
-            action="callback", callback=cb_list,
-            help="file to read from -- default is None",
-        )
-
-        # --url [url/hostname/ip]
-        def cb_host(option, opt, value, optparser):
-          self.hostname = str(value)
-        optparser.add_option(
-            "--url", type="string",
-            action="callback", callback=cb_host,
-            help="set URL/hostname/IP for use in tests -- default is None",
-        )
-
-        # --controlproxy [scheme://host:port]
-        def cb_controlproxy(option, opt, value, optparser):
-          self.controlproxy = str(value)
-        optparser.add_option(
-            "--controlproxy", type="string",
-            action="callback", callback=cb_controlproxy,
-            help="proxy to be used as a control -- default is None",
-        )
-
-        # --experimentproxy [scheme://host:port]
-        def cb_experimentproxy(option, opt, value, optparser):
-          self.experimentproxy = str(value)
-        optparser.add_option(
-            "--experimentproxy", type="string",
-            action="callback", callback=cb_experimentproxy,
-            help="proxy to be used for experiments -- default is None",
-        )
-
-
-
-        # --randomize
-        def cb_randomize(option, opt, value, optparser):
-          self.randomize = bool(int(value))
-        optparser.add_option(
-            "--randomize", type="choice",
-            choices=['0', '1'], metavar="0|1",
-            action="callback", callback=cb_randomize,
-            help="randomize host order -- default is on",
-        )
-
-        # XXX TODO:
-        # pause/resume scans for dns_BULK_DNS_Tests()
-        # setting of control/experiment resolver
-        # setting of control/experiment proxy
-        #
-
-        def cb_version(option, opt, value, oparser):
-            self.action = 'version'
-        optparser.add_option(
-            "-v", "--version",
-            action="callback", callback=cb_version,
-            help="print ooni-probe version"
-        )
-
-        # parse options
-        (opts, args) = optparser.parse_args()
-
-        # validate options
-        try:
-            if (args):
-                raise optparse.OptionError('extra arguments found', args)
-            if (not self.action):
-                raise optparse.OptionError(
-                    'RTFS', 'required arguments missing'
-                )
-
-        except optparse.OptionError, err:
-            sys.stderr.write(str(err) + '\n\n')
-            optparser.print_help()
-            sys.exit(1)
-
-    def version(self):
-        print """
-ooni-probe pre-alpha
-Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
-See: https://www.torproject.org/ooni/
-
-"""
-
-    def run(self):
-        getattr(self, self.action)()
-
-    def plugin(self):
-        plugin_run = ooni.plugooni.Plugooni
-        plugin_run(self).run(self)
-
-    def listplugins(self):
-        plugin_run = ooni.plugooni.Plugooni
-        plugin_run(self).list_plugoons()
-
-    def captiveportal(self):
-        captive_portal = ooni.captive_portal.CaptivePortal
-        captive_portal(self).main()
-
-    def transhttp(self):
-        transparent_http = ooni.transparenthttp.TransparentHTTPProxy
-        transparent_http(self).main()
-
-    def dns(self):
-        dnstests = ooni.namecheck.DNS
-        dnstests(self).main()
-
-    def dnsbulk(self):
-        dnstests = ooni.dns_poisoning.DNSBulk
-        dnstests(self).main()
-
-    def dnscccheck(self):
-        dnstests = ooni.dns_cc_check.DNSBulk
-        dnstests(self).main()
-
diff --git a/old-to-be-ported-code/very-old/ooni/dns_poisoning.py b/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
deleted file mode 100644
index 939391e..0000000
--- a/old-to-be-ported-code/very-old/ooni/dns_poisoning.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-class DNSBulk():
-  def __init__(self, args):
-    self.in_ = sys.stdin
-    self.out = sys.stdout
-    self.randomize = args.randomize
-    self.debug = False
-
-  def DNS_Tests(self):
-    print "DNS tampering detection for list of domains:"
-    filter_name = "_DNS_BULK_Tests"
-    tests = [ooni.dnsooni]
-    for test in tests:
-      for function_ptr in dir(test):
-        if function_ptr.endswith(filter_name):
-          filter_result = getattr(test, function_ptr)(self)
-          if filter_result == True:
-            print function_ptr + " thinks the network is clean"
-          elif filter_result == None:
-              print function_ptr + " failed"
-          else:
-            print function_ptr + " thinks the network is dirty"
-  def main(self):
-    for function_ptr in dir(self):
-      if function_ptr.endswith("_Tests"):
-        getattr(self, function_ptr)()
-
-if __name__ == '__main__':
-  self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/dnsooni.py b/old-to-be-ported-code/very-old/ooni/dnsooni.py
deleted file mode 100644
index bfdfe51..0000000
--- a/old-to-be-ported-code/very-old/ooni/dnsooni.py
+++ /dev/null
@@ -1,356 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS support for ooni-probe
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#
-
-from socket import gethostbyname
-import ooni.common
-
-# requires python-dns
-# (pydns.sourceforge.net)
-try:
-  import DNS
-# Mac OS X needs this
-except:
-  try:
-    import dns as DNS
-  except:
-    pass                        # Never mind, let's break later.
-import random
-from pprint import pprint
-
-""" Wrap gethostbyname """
-def dns_resolve(hostname):
-  try:
-    resolved_host = gethostbyname(hostname)
-    return resolved_host
-  except:
-    return False
-
-"""Perform a resolution on test_hostname and compare it with the expected
-   control_resolved ip address. Optionally, a label may be set to customize
-   output. If the experiment matches the control, this returns True; otherwise
-   it returns False.
-"""
-def dns_resolve_match(experiment_hostname, control_resolved,
-                       label="generic DNS comparison"):
-  experiment_resolved = dns_resolve(experiment_hostname)
-  if experiment_resolved == False:
-    return None
-  if experiment_resolved:
-    if str(experiment_resolved) != str(control_resolved):
-      print label + " control " + str(control_resolved) + " data does not " \
-            "match experiment response: " + str(experiment_resolved)
-      return False
-    return True
-
-def generic_DNS_resolve(experiment_hostname, experiment_resolver):
-  if experiment_resolver == None:
-    req = DNS.Request(name=experiment_hostname) # local resolver
-  else:
-    req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
-  resolved_data = req.req().answers
-  return resolved_data
-
-""" Return a list of all known censors. """
-def load_list_of_known_censors(known_proxy_file=None):
-  proxyfile = "proxy-lists/ips.txt"
-  known_proxy_file = open(proxyfile, 'r', 1)
-  known_proxy_list = []
-  for known_proxy in known_proxy_file.readlines():
-    known_proxy_list.append(known_proxy)
-  known_proxy_file.close()
-  known_proxy_count = len(known_proxy_list)
-  print "Loading " + str(known_proxy_count) + " known proxies..."
-  return known_proxy_list, known_proxy_count
-
-def load_list_of_test_hosts(hostfile=None):
-  if hostfile == None:
-    hostfile="censorship-lists/norwegian-dns-blacklist.txt"
-  host_list_file = open(hostfile, 'r', 1)
-  host_list = []
-  for host_name in host_list_file.readlines():
-    if host_name.isspace():
-      continue
-    else:
-     host_list.append(host_name)
-  host_list_file.close()
-  host_count = len(host_list)
-  #print "Loading " + str(host_count) + " test host names..."
-  return host_list, host_count
-
-""" Return True with a list of censors if we find a known censor from
-    known_proxy_list in the experiment_data DNS response. Otherwise return
-    False and None. """
-def contains_known_censors(known_proxy_list, experiment_data):
-  match = False
-  proxy_list = []
-  for answer in range(len(experiment_data)):
-    for known_proxy in known_proxy_list:
-      if answer == known_proxy:
-        print "CONFLICT: known proxy discovered: " + str(known_proxy),
-        proxy_list.append(known_proxy)
-        match = True
-  return match, proxy_list
-
-""" Return True and the experiment response that failed to match."""
-def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
-  known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
-  conflict_list = []
-  conflict = False
-  if known_proxy_found:
-    print "known proxy discovered: " + str(known_proxies)
-  for answer in range(len(control_data)):
-    if control_data[answer]['data'] == experiment_data:
-      print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
-      continue
-    else:
-      conflict = True
-      conflict_list.append(experiment_data)
-      #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
-  return conflict, conflict_list
-
-def dns_DNS_BULK_Tests(self, hostfile=None,
-                       known_good_resolver="8.8.8.8", test_resolver=None):
-  tampering = False # By default we'll pretend the internet is nice
-  tampering_list = []
-  host_list, host_count = load_list_of_test_hosts()
-  known_proxies, proxy_count = load_list_of_known_censors()
-  check_count = 1
-  if test_resolver == None:
-    DNS.ParseResolvConf() # Set the local resolver as our default
-  if self.randomize:
-    random.shuffle(host_list) # This makes our list non-sequential for now
-  for host_name in host_list:
-    host_name = host_name.strip()
-    print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
-    print "Resolving with control resolver..."
-    print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
-    print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
-    # XXX TODO - we need to keep track of the status of these requests and then resume them
-    while True:
-      try:
-        control_data = generic_DNS_resolve(host_name, known_good_resolver)
-        break
-      except KeyboardInterrupt:
-        print "bailing out..."
-        exit()
-      except DNS.Base.DNSError:
-        print "control resolver appears to be failing..."
-        continue
-      except:
-        print "Timeout; looping!"
-        continue
-
-    print "Resolving with experiment resolver..."
-    while True:
-      try:
-        experiment_data = generic_DNS_resolve(host_name, test_resolver)
-        break
-      except KeyboardInterrupt:
-        print "bailing out..."
-        exit()
-      except DNS.Base.DNSError:
-        print "experiment resolver appears to be failing..."
-        continue
-      except:
-        print "Timeout; looping!"
-        continue
-
-    print "Comparing control and experiment...",
-    tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
-    if tampering:
-      tampering_list.append(conflicts)
-      print "Conflicts with " + str(host_name) + " : " + str(conflicts)
-    check_count = check_count + 1
-  host_list.close()
-  return tampering
-
-""" Attempt to resolve random_hostname and return True and None if empty. If an
-    address is returned we return False and the returned address.
-"""
-def dns_response_empty(random_hostname):
-  response = dns_resolve(random_hostname)
-  if response == False:
-    return True, None
-  return False, response
-
-def dns_multi_response_empty(count, size):
-  for i in range(count):
-    randName = ooni.common._randstring(size)
-    response_empty, response_ip = dns_response_empty(randName)
-    if response_empty == True and response_ip == None:
-      responses_are_empty = True
-    else:
-      print label + " " + randName + " found with value " + str(response_ip)
-      responses_are_empty = False
-  return responses_are_empty
-
-""" Attempt to resolve one random host name per tld in tld_list where the
-    hostnames are random strings with a length between min_length and
-    max_length. Return True if list is empty, otherwise return False."""
-def dns_list_empty(tld_list, min_length, max_length,
-                   label="generic DNS list test"):
-  for tld in tld_list:
-    randName = ooni.common._randstring(min_length, max_length) + tld
-    response_empty, response_ip = dns_response_empty(randName)
-  return response_empty
-
-# Known bad test
-# Test for their DNS breakage and their HTTP MITM
-# "Family Shield" is 208.67.222.123 and 208.67.220.123
-# returns 67.215.65.130 for filtered sites like kink.com
-# block.opendns.com is a block page where users are redirected
-# 208.67.216.135 208.67.217.135 are the block pages currently point
-# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
-# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
-# whatever the HOST header says
-# Amusingly - their Server header is: "OpenDNS Guide"
-""" Return True if we are not being directed as known OpenDNS block pages."""
-def OpenDNS_DNS_Tests(self):
-  return OpenDNS_Censorship_DNS_TESTS(self)
-  return OpenDNS_NXDomain_DNS_TESTS(self)
-
-def OpenDNS_Censorship_DNS_TESTS(self):
-  known_filter = "67.215.65.130"
-  randName = ooni.common._randstring(10)
-  redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
-  if redirected:
-    return False
-  else:
-    return True
-
-def OpenDNS_NXDomain_DNS_TESTS(self):
-  known_filter = "67.215.65.132"
-  randName = ooni.common._randstring(10)
-  redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
-  if redirected:
-    return False
-  else:
-    return True
-
-"""Returns True if the experiment_url returns the well known Italian block page."""
-def cc_DNS_Tests_it(self):
-  tampering = False # By default we'll pretend the internet is nice
-  tampering_list = []
-  conflicts = []
-  known_good_resolver = "8.8.8.8"
-  host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
-  known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
-  known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
-
-  check_count = 1
-  DNS.ParseResolvConf()
-  # Set the local resolver as our default
-  if self.randomize:
-    random.shuffle(host_list) # This makes our list non-sequential for now
-  print "We're testing (" + str(host_count) + ") URLs"
-  print "We're looking for (" + str(known_block_count) + ") block pages"
-  print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
-  for test_resolver in known_censoring_resolvers:
-    test_resolver = test_resolver.strip()
-    for host_name in host_list:
-      host_name = host_name.strip()
-      print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
-      print "Testing " + host_name + " with control resolver: " + known_good_resolver
-      print "Testing " + host_name + " with experiment resolver: " + test_resolver
-      while True:
-        try:
-          control_data = generic_DNS_resolve(host_name, known_good_resolver)
-          break
-        except KeyboardInterrupt:
-          print "bailing out..."
-          exit()
-        except DNS.Base.DNSError:
-          print "control resolver appears to be failing..."
-          break
-        except:
-          print "Timeout; looping!"
-          continue
-
-      while True:
-        try:
-          experiment_data = generic_DNS_resolve(host_name, test_resolver)
-          break
-        except KeyboardInterrupt:
-          print "bailing out..."
-          exit()
-        except DNS.Base.DNSError:
-          print "experiment resolver appears to be failing..."
-          continue
-        except:
-          print "Timeout; looping!"
-          continue
-
-      print "Comparing control and experiment...",
-      tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
-      if tampering:
-        tampering_list.append(conflicts)
-        print "Conflicts with " + str(host_name) + " : " + str(conflicts)
-      check_count = check_count + 1
-
-  host_list.close()
-  return tampering
-
-
-## XXX TODO
-## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
-
-""" Returns True if the experiment_host returns a well known Australian filter
-    IP address."""
-def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
-  # http://www.robtex.com/ip/61.88.88.88.html
-  # http://requests.optus.net.au/dns/
-  known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
-  known_censoring_resolvers = ["61.88.88.88"] # Optus
-  for resolver in known_censoring_resolvers:
-    blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
-    if blocked:
-      return True
-
-"""Returns True if experiment_hostname as resolved by experiment_resolver
-   resolves to control_data. Returns False if there is no match or None if the
-   attempt fails."""
-def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
-                           control_data):
-  req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
-  resolved_data = s.req().answers
-  for answer in range(len(resolved_data)):
-    if resolved_data[answer]['data'] == control_data:
-      return True
-  return False
-
-# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
-# details
-""" Return True if Tor would consider the network fine; False if it's hostile
-    and has no signs of DNS tampering. """
-def Tor_DNS_Tests(self):
-  response_rfc2606_empty = RFC2606_DNS_Tests(self)
-  tor_tld_list = ["", ".com", ".org", ".net"]
-  response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
-  return response_tor_empty | response_rfc2606_empty
-
-""" Return True if RFC2606 would consider the network hostile; False if it's all
-    clear and has no signs of DNS tampering. """
-def RFC2606_DNS_Tests(self):
-  tld_list = [".invalid", ".test"]
-  return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
-
-""" Return True if googleChromeDNSTest would consider the network OK."""
-def googleChrome_CP_Tests(self):
-    maxGoogleDNSTests = 3
-    GoogleDNSTestSize = 10
-    return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
-                                            GoogleDNSTestSize)
-def googleChrome_DNS_Tests(self):
-    return googleChrome_CP_Tests(self)
-
-""" Return True if MSDNSTest would consider the network OK."""
-def MSDNS_CP_Tests(self):
-    experimentHostname = "dns.msftncsi.com"
-    expectedResponse = "131.107.255.255"
-    return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
-
-def MSDNS_DNS_Tests(self):
-    return MSDNS_CP_Tests(self)
diff --git a/old-to-be-ported-code/very-old/ooni/helpers.py b/old-to-be-ported-code/very-old/ooni/helpers.py
deleted file mode 100644
index 514e65f..0000000
--- a/old-to-be-ported-code/very-old/ooni/helpers.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#    Arturo Filasto' <art at fuffa.org>
-
-import ooni.common
-import pycurl
-import random
-import zipfile
-import os
-from xml.dom import minidom
-try:
-   from BeautifulSoup import BeautifulSoup
-except:
-   pass                        # Never mind, let's break later.
-
-def get_random_url(self):
-   filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
-   fp = zipfile.ZipFile(filepath, "r")
-   fp.open("top-1m.csv")
-   content = fp.read("top-1m.csv")
-   return "http://" + random.choice(content.split("\n")).split(",")[1]
-
-"""Pick a random header and use that for the request"""
-def get_random_headers(self):
-  filepath = os.getcwd() + "/test-lists/whatheaders.xml"
-  headers = []
-  content = open(filepath, "r").read()
-  soup = BeautifulSoup(content)
-  measurements = soup.findAll('measurement')
-  i = random.randint(0,len(measurements))
-  for vals in measurements[i].findAll('header'):
-    name = vals.find('name').string
-    value = vals.find('value').string
-    if name != "host":
-      headers.append((name, value))
-  return headers
diff --git a/old-to-be-ported-code/very-old/ooni/http.py b/old-to-be-ported-code/very-old/ooni/http.py
deleted file mode 100644
index 59e2abb..0000000
--- a/old-to-be-ported-code/very-old/ooni/http.py
+++ /dev/null
@@ -1,306 +0,0 @@
-#!/usr/bin/env python
-#
-# HTTP support for ooni-probe
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#    Arturo Filasto' <art at fuffa.org>
-#
-
-from socket import gethostbyname
-import ooni.common
-import ooni.helpers
-import ooni.report
-import urllib2
-import httplib
-from urlparse import urlparse
-from pprint import pprint
-import pycurl
-import random
-import string
-import re
-from pprint import pprint
-try:
-   from BeautifulSoup import BeautifulSoup
-except:
-   pass                        # Never mind, let's break later.
-
-# By default, we'll be Torbutton's UA
-default_ua = { 'User-Agent' :
-               'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
-
-# Use pycurl to connect over a proxy
-PROXYTYPE_SOCKS5 = 5
-default_proxy_type = PROXYTYPE_SOCKS5
-default_proxy_host = "127.0.0.1"
-default_proxy_port = "9050"
-
-#class HTTPResponse(object):
-#  def __init__(self):
-
-
-"""A very basic HTTP fetcher that uses Tor by default and returns a curl
-   object."""
-def http_proxy_fetch(url, headers, proxy_type=5,
-                     proxy_host="127.0.0.1",
-                     proxy_port=9050):
-   request = pycurl.Curl()
-   request.setopt(pycurl.PROXY, proxy_host)
-   request.setopt(pycurl.PROXYPORT, proxy_port)
-   request.setopt(pycurl.PROXYTYPE, proxy_type)
-   request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
-   request.setopt(pycurl.URL, url)
-   response = request.perform()
-   http_code = getinfo(pycurl.HTTP_CODE)
-   return response, http_code
-
-"""A very basic HTTP fetcher that returns a urllib2 response object."""
-def http_fetch(url,
-               headers= default_ua,
-               label="generic HTTP fetch"):
-   request = urllib2.Request(url, None, headers)
-   response = urllib2.urlopen(request)
-   return response
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
-   control_result. Optionally, a label may be set to customize
-   output. If the experiment matches the control, this returns True with the http
-   status code; otherwise it returns False.
-"""
-def http_content_match(experimental_url, control_result,
-                       headers= { 'User-Agent' : default_ua },
-                       label="generic HTTP content comparison"):
-  request = urllib2.Request(experimental_url, None, headers)
-  response = urllib2.urlopen(request)
-  responseContents = response.read()
-  responseCode = response.code
-  if responseContents != False:
-    if str(responseContents) != str(control_result):
-      print label + " control " + str(control_result) + " data does not " \
-            "match experiment response: " + str(responseContents)
-      return False, responseCode
-    return True, responseCode
-  else:
-    print "HTTP connection appears to have failed"
-  return False, False
-
-"""Connect to test_hostname on port 80, request url and compare it with the expected
-   control_result as a regex. Optionally, a label may be set to customize
-   output. If the experiment matches the control, this returns True with the HTTP
-   status code; otherwise it returns False.
-"""
-def http_content_fuzzy_match(experimental_url, control_result,
-                       headers= { 'User-Agent' : default_ua },
-                       label="generic HTTP content comparison"):
-  request = urllib2.Request(experimental_url, None, headers)
-  response = urllib2.urlopen(request)
-  responseContents = response.read()
-  responseCode = response.code
-  pattern = re.compile(control_result)
-  match = pattern.search(responseContents)
-  if responseContents != False:
-    if not match:
-      print label + " control " + str(control_result) + " data does not " \
-            "match experiment response: " + str(responseContents)
-      return False, responseCode
-    return True, responseCode
-  else:
-    print "HTTP connection appears to have failed"
-  return False, False
-
-"""Compare two HTTP status codes as integers and return True if they match."""
-def http_status_code_match(experiment_code, control_code):
-  if int(experiment_code) != int(control_code):
-    return False
-  return True
-
-"""Compare two HTTP status codes as integers and return True if they don't match."""
-def http_status_code_no_match(experiment_code, control_code):
-   if http_status_code_match(experiment_code, control_code):
-     return False
-   return True
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it matches, False if it does not."""
-def http_header_match(experiment_url, control_header, control_result):
-  response = http_fetch(url, label=label)
-  remote_header = response.get_header(control_header)
-  if str(remote_header) == str(control_result):
-    return True
-  else:
-    return False
-
-"""Connect to a URL and compare the control_header/control_result with the data
-served by the remote server. Return True if it does not matche, False if it does."""
-def http_header_no_match(experiment_url, control_header, control_result):
-  match = http_header_match(experiment_url, control_header, control_result)
-  if match:
-    return False
-  else:
-    return True
-
-def send_browser_headers(self, browser, conn):
-  headers = ooni.helpers.get_random_headers(self)
-  for h in headers:
-    conn.putheader(h[0], h[1])
-  conn.endheaders()
-  return True
-
-def http_request(self, method, url, path=None):
-  purl = urlparse(url)
-  host = purl.netloc
-  conn = httplib.HTTPConnection(host, 80)
-  conn.connect()
-  if path is None:
-    path = purl.path
-  conn.putrequest(method, purl.path)
-  send_browser_headers(self, None, conn)
-  response = conn.getresponse()
-  headers = dict(response.getheaders())
-  self.headers = headers
-  self.data = response.read()
-  return True
-
-def search_headers(self, s_headers, url):
-  if http_request(self, "GET", url):
-    headers = self.headers
-  else:
-    return None
-  result = {}
-  for h in s_headers.items():
-    result[h[0]] = h[0] in headers
-  return result
-
-# XXX for testing
-#  [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
-
-"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
-def search_squid_headers(self):
-  test_name = "squid header"
-  self.logger.info("RUNNING %s test" % test_name)
-  url = ooni.helpers.get_random_url(self)
-  s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
-  ret = search_headers(self, s_headers, url)
-  for i in ret.items():
-    if i[1] is True:
-      self.logger.info("the %s test returned False" % test_name)
-      return False
-  self.logger.info("the %s test returned True" % test_name)
-  return True
-
-def random_bad_request(self):
-  url = ooni.helpers.get_random_url(self)
-  r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
-  if http_request(self, r_str, url):
-    return True
-  else:
-    return None
-
-"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
-def squid_search_bad_request(self):
-  test_name = "squid bad request"
-  self.logger.info("RUNNING %s test" % test_name)
-  if random_bad_request(self):
-    s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
-    for i in s_headers.items():
-      if i[0] in self.headers:
-        self.logger.info("the %s test returned False" % test_name)
-        return False
-    self.logger.info("the %s test returned True" % test_name)
-    return True
-  else:
-    self.logger.warning("the %s test returned failed" % test_name)
-    return None
-
-"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
-def squid_cacheobject_request(self):
-  url = ooni.helpers.get_random_url(self)
-  test_name = "squid cacheobject"
-  self.logger.info("RUNNING %s test" % test_name)
-  if http_request(self, "GET", url, "cache_object://localhost/info"):
-    soup = BeautifulSoup(self.data)
-    if soup.find('strong') and soup.find('strong').string == "Access Denied.":
-      self.logger.info("the %s test returned False" % test_name)
-      return False
-    else:
-      self.logger.info("the %s test returned True" % test_name)
-      return True
-  else:
-    self.logger.warning("the %s test failed" % test_name)
-    return None
-
-
-def MSHTTP_CP_Tests(self):
-  test_name = "MS HTTP Captive Portal"
-  self.logger.info("RUNNING %s test" % test_name)
-  experiment_url = "http://www.msftncsi.com/ncsi.txt"
-  expectedResponse = "Microsoft NCSI" # Only this - nothing more
-  expectedResponseCode = "200" # Must be this - nothing else
-  label = "MS HTTP"
-  headers = { 'User-Agent' : 'Microsoft NCSI' }
-  content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
-                         headers, label)
-  status_match = http_status_code_match(expectedResponseCode,
-                        experiment_code)
-  if status_match and content_match:
-    self.logger.info("the %s test returned True" % test_name)
-    return True
-  else:
-    print label + " experiment would conclude that the network is filtered."
-    self.logger.info("the %s test returned False" % test_name)
-    return False
-
-def AppleHTTP_CP_Tests(self):
-  test_name = "Apple HTTP Captive Portal"
-  self.logger.info("RUNNING %s test" % test_name)
-  experiment_url = "http://www.apple.com/library/test/success.html"
-  expectedResponse = "Success" # There is HTML that contains this string
-  expectedResponseCode = "200"
-  label = "Apple HTTP"
-  headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
-                           'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
-                           ' Mobile/1A543a Safari/419.3' }
-  content_match, experiment_code = http_content_fuzzy_match(
-                                   experiment_url, expectedResponse, headers)
-  status_match = http_status_code_match(expectedResponseCode,
-                          experiment_code)
-  if status_match and content_match:
-    self.logger.info("the %s test returned True" % test_name)
-    return True
-  else:
-    print label + " experiment would conclude that the network is filtered."
-    print label + "content match:" + str(content_match) + " status match:" + str(status_match)
-    self.logger.info("the %s test returned False" % test_name)
-    return False
-
-def WC3_CP_Tests(self):
-  test_name = "W3 Captive Portal"
-  self.logger.info("RUNNING %s test" % test_name)
-  url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
-  draftResponseCode = "428"
-  label = "WC3 draft-nottingham-http-portal"
-  response = http_fetch(url, label=label)
-  responseCode = response.code
-  if http_status_code_no_match(responseCode, draftResponseCode):
-    self.logger.info("the %s test returned True" % test_name)
-    return True
-  else:
-    print label + " experiment would conclude that the network is filtered."
-    print label + " status match:" + status_match
-    self.logger.info("the %s test returned False" % test_name)
-    return False
-
-# Google ChromeOS fetches this url in guest mode
-# and they expect the user to authenticate
-def googleChromeOSHTTPTest(self):
-  print "noop"
-  #url = "http://www.google.com/"
-
-def SquidHeader_TransparentHTTP_Tests(self):
-  return search_squid_headers(self)
-
-def SquidBadRequest_TransparentHTTP_Tests(self):
-  return squid_search_bad_request(self)
-
-def SquidCacheobject_TransparentHTTP_Tests(self):
-  return squid_cacheobject_request(self)
-
-
diff --git a/old-to-be-ported-code/very-old/ooni/input.py b/old-to-be-ported-code/very-old/ooni/input.py
deleted file mode 100644
index c32ab48..0000000
--- a/old-to-be-ported-code/very-old/ooni/input.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-
-class file:
-    def __init__(self, name=None):
-        if name:
-            self.name = name
-
-    def simple(self, name=None):
-        """ Simple file parsing method:
-        Read a file line by line and output an array with all it's lines, without newlines
-        """
-        if name:
-            self.name = name
-        output = []
-        try:
-            f = open(self.name, "r")
-            for line in f.readlines():
-                output.append(line.strip())
-            return output
-        except:
-            return output
-
-    def csv(self, name=None):
-        if name:
-            self.name = name
-
-    def yaml(self, name):
-        if name:
-            self.name = name
-
-    def consensus(self, name):
-        if name:
-            self.name = name
diff --git a/old-to-be-ported-code/very-old/ooni/namecheck.py b/old-to-be-ported-code/very-old/ooni/namecheck.py
deleted file mode 100644
index 1a2a3f0..0000000
--- a/old-to-be-ported-code/very-old/ooni/namecheck.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#
-# This module performs multiple DNS tests.
-
-import sys
-import ooni.dnsooni
-
-class DNS():
-  def __init__(self, args):
-    self.in_ = sys.stdin
-    self.out = sys.stdout
-    self.debug = False
-    self.randomize = args.randomize
-
-  def DNS_Tests(self):
-    print "DNS tampering detection:"
-    filter_name = "_DNS_Tests"
-    tests = [ooni.dnsooni]
-    for test in tests:
-      for function_ptr in dir(test):
-        if function_ptr.endswith(filter_name):
-          filter_result = getattr(test, function_ptr)(self)
-          if filter_result == True:
-            print function_ptr + " thinks the network is clean"
-          elif filter_result == None:
-              print function_ptr + " failed"
-          else:
-            print function_ptr + " thinks the network is dirty"
-
-  def main(self):
-    for function_ptr in dir(self):
-      if function_ptr.endswith("_Tests"):
-        getattr(self, function_ptr)()
-
-if __name__ == '__main__':
-  self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/__init__.py b/old-to-be-ported-code/very-old/ooni/plugins/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
deleted file mode 100644
index 0c0cfa7..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/dnstest_plgoo.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from pprint import pprint
-from twisted.internet import reactor, endpoints
-from twisted.names import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class DNSTestPlugin(Plugoo):
-    def __init__(self):
-        self.name = ""
-        self.type = ""
-        self.paranoia = ""
-        self.modules_to_import = []
-        self.output_dir = ""
-        self.buf = ""
-        self.control_response = []
-
-    def response_split(self, response):
-      a = []
-      b = []
-      for i in response:
-        a.append(i[0])
-        b.append(i[1])
-
-      return a,b
-
-    def cb(self, type, hostname, dns_server, value):
-      if self.control_response is None:
-        self.control_response = []
-      if type == 'control' and self.control_response != value:
-          print "%s %s" % (dns_server, value)
-          self.control_response.append((dns_server,value))
-          pprint(self.control_response)
-      if type == 'experiment':
-        pprint(self.control_response)
-        _, res = self.response_split(self.control_response)
-        if value not in res:
-          print "res (%s) : " % value
-          pprint(res)
-          print "---"
-          print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
-
-        else:
-          print "%s appears to be clean on %s" % (hostname, dns_server)
-        self.r2.servers = [('212.245.158.66',53)]
-      print "HN: %s %s" % (hostname, value)
-
-    def err(self, pck, error):
-      pprint(pck)
-      error.printTraceback()
-      reactor.stop()
-      print "error!"
-      pass
-
-    def ooni_main(self, args):
-        self.experimentalproxy = ''
-        self.test_hostnames = ['dio.it']
-        self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
-        self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
-
-        self.control_res = []
-        self.control_response = None
-
-        self.r1 = client.Resolver(None, [self.control_dns.pop()])
-        self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
-
-        for hostname in self.test_hostnames:
-          for dns_server in self.control_dns:
-            self.r1.servers = [dns_server]
-            f = self.r1.getHostByName(hostname)
-            pck = (hostname, dns_server)
-            f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
-          for dns_server in self.experiment_dns:
-            self.r2.servers = [dns_server]
-            pck = (hostname, dns_server)
-            f = self.r2.getHostByName(hostname)
-            f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
-
-        reactor.run()
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
deleted file mode 100644
index 021e863..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/http_plgoo.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import re
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
-    def __init__(self):
-        self.name = ""
-        self.type = ""
-        self.paranoia = ""
-        self.modules_to_import = []
-        self.output_dir = ""
-        self.buf = ''
-
-    def cb(self, type, content):
-        print "got %d bytes from %s" % (len(content), type) # DEBUG
-        if not self.buf:
-            self.buf = content
-        else:
-            if self.buf == content:
-                print "SUCCESS"
-            else:
-                print "FAIL"
-            reactor.stop()
-
-    def endpoint(self, scheme, host, port):
-        ep = None
-        if scheme == 'http':
-            ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
-        elif scheme == 'https':
-            ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
-        return ep
-
-    def ooni_main(self):
-        # We don't have the Command object so cheating for now.
-        url = 'http://check.torproject.org/'
-        self.controlproxy = 'socks4a://127.0.0.1:9050'
-        self.experimentalproxy = ''
-
-        if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
-          return None
-        scheme, host, port, path = client._parse(url)
-
-        ctrl_dest = self.endpoint(scheme, host, port)
-        if not ctrl_dest:
-            raise Exception('unsupported scheme %s in %s' % (scheme, url))
-        if self.controlproxy:
-            _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
-            control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
-        else:
-            control = ctrl_dest
-        f = client.HTTPClientFactory(url)
-        f.deferred.addCallback(lambda x: self.cb('control', x))
-        control.connect(f)
-
-        exp_dest = self.endpoint(scheme, host, port)
-        if not exp_dest:
-            raise Exception('unsupported scheme %s in %s' % (scheme, url))
-        # FIXME: use the experiment proxy if there is one
-        experiment = exp_dest
-        f = client.HTTPClientFactory(url)
-        f.deferred.addCallback(lambda x: self.cb('experiment', x))
-        experiment.connect(f)
-
-        reactor.run()
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
deleted file mode 100644
index cb63df7..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/marco_plgoo.py
+++ /dev/null
@@ -1,377 +0,0 @@
-#!/usr/bin/python
-# Copyright 2009 The Tor Project, Inc.
-# License at end of file.
-#
-# This tests connections to a list of Tor nodes in a given Tor consensus file
-# while also recording the certificates - it's not a perfect tool but complete
-# or even partial failure should raise alarms.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
-    from ooni.plugooni import Plugoo
-except:
-    print "Error importing Plugoo"
-
-try:
-    from ooni.common import Storage
-except:
-    print "Error importing Storage"
-
-try:
-    from ooni import output
-except:
-    print "Error importing output"
-
-try:
-    from ooni import input
-except:
-    print "Error importing output"
-
-
-
-ssl = OpenSSL = None
-
-try:
-    import ssl
-except ImportError:
-    pass
-
-if ssl is None:
-    try:
-        import OpenSSL.SSL
-        import OpenSSL.crypto
-    except ImportError:
-        pass
-
-if ssl is None and OpenSSL is None:
-    if socket.ssl:
-        print """Your Python is too old to have the ssl module, and you haven't
-installed pyOpenSSL.  I'll try to work with what you've got, but I can't
-record certificates so well."""
-    else:
-        print """Your Python has no OpenSSL support.  Upgrade to 2.6, install
-pyOpenSSL, or both."""
-        sys.exit(1)
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-HANDSHAKING = "nohandshake"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-def clean_pem_cert(cert):
-    idx = cert.find('-----END')
-    if idx > 1 and cert[idx-1] != '\n':
-        cert = cert.replace('-----END','\n-----END')
-    return cert
-
-def record((addr,port), state, extra=None, cert=None):
-    LOCK.acquire()
-    try:
-        OUT.append({'addr' : addr,
-                         'port' : port,
-                         'state' : state,
-                         'extra' : extra})
-        if cert:
-            CERT_OUT.append({'addr' : addr,
-                                  'port' : port,
-                                  'clean_cert' : clean_pem_cert(cert)})
-    finally:
-        LOCK.release()
-
-def probe(address,theCtx=None):
-    sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    logging.info("Opening socket to %s",address)
-    try:
-        s.connect(address)
-    except IOError, e:
-        logging.info("Error %s from socket connect.",e)
-        record(address, CONNECTING, e)
-        s.close()
-        return
-    logging.info("Socket to %s open.  Launching SSL handshake.",address)
-    if ssl:
-        try:
-            s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
-            # "MARCO!"
-            s.do_handshake()
-        except IOError, e:
-            logging.info("Error %s from ssl handshake",e)
-            record(address, HANDSHAKING, e)
-            s.close()
-            sock.close()
-            return
-        cert = s.getpeercert(True)
-        if cert != None:
-            cert = ssl.DER_cert_to_PEM_cert(cert)
-    elif OpenSSL:
-        try:
-            s = OpenSSL.SSL.Connection(theCtx, s)
-            s.set_connect_state()
-            s.setblocking(True)
-            s.do_handshake()
-            cert = s.get_peer_certificate()
-            if cert != None:
-                cert = OpenSSL.crypto.dump_certificate(
-                    OpenSSL.crypto.FILETYPE_PEM, cert)
-        except IOError, e:
-            logging.info("Error %s from OpenSSL handshake",e)
-            record(address, HANDSHAKING, e)
-            s.close()
-            sock.close()
-            return
-    else:
-        try:
-            s = socket.ssl(s)
-            s.write('a')
-            cert = s.server()
-        except IOError, e:
-            logging.info("Error %s from socket.ssl handshake",e)
-            record(address, HANDSHAKING, e)
-            sock.close()
-            return
-
-    logging.info("SSL handshake with %s finished",address)
-    # "POLO!"
-    record(address,OK, cert=cert)
-    if (ssl or OpenSSL):
-        s.close()
-    sock.close()
-
-def parseNetworkstatus(ns):
-    for line in ns:
-        if line.startswith('r '):
-            r = line.split()
-            yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
-    for line in cd:
-        if line.startswith('router '):
-            r = line.split()
-            yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
-    done = False
-    logging.info("Launching thread.")
-
-    if OpenSSL is not None:
-        context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
-    else:
-        context = None
-
-    while True:
-        LOCK.acquire()
-        try:
-            if addrList:
-                print "Starting test %d/%d"%(
-                    1+origLength-len(addrList),origLength)
-                addr = addrList.pop()
-            else:
-                return
-        finally:
-            LOCK.release()
-
-        try:
-            logging.info("Launching probe for %s",addr)
-            probe(addr, context)
-        except Exception, e:
-            logging.info("Unexpected error from %s",addr)
-            record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
-    ts = []
-    origLen = len(addrList)
-    for num in xrange(nThreads):
-        t = threading.Thread(target=worker, args=(addrList,origLen))
-        t.setName("Th#%s"%num)
-        ts.append(t)
-        t.start()
-    for t in ts:
-        logging.info("Joining thread %s",t.getName())
-        t.join()
-
-def main(self, args):
-    # BEGIN
-    # This logic should be present in more or less all plugoos
-    global OUT
-    global CERT_OUT
-    global OUT_DATA
-    global CERT_OUT_DATA
-    OUT_DATA = []
-    CERT_OUT_DATA = []
-
-    try:
-        OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
-    except:
-        print "No output file given. quitting..."
-        return -1
-
-    try:
-        CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
-    except:
-        print "No output cert file given. quitting..."
-        return -1
-
-    logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
-                        datefmt="%b %d %H:%M:%S",
-                        level=logging.INFO,
-                        filename=args.log)
-    logging.info("============== STARTING NEW LOG")
-    # END
-
-    if ssl is not None:
-        methodName = "ssl"
-    elif OpenSSL is not None:
-        methodName = "OpenSSL"
-    else:
-        methodName = "socket"
-    logging.info("Running marco with method '%s'", methodName)
-
-    addresses = []
-
-    if args.input.ips:
-        for fn in input.file(args.input.ips).simple():
-            a, b = fn.split(":")
-            addresses.append( (a,int(b)) )
-
-    elif args.input.consensus:
-        for fn in args:
-            print fn
-            for a,b in parseNetworkstatus(open(args.input.consensus)):
-                addresses.append( (a,b) )
-
-    if args.input.randomize:
-        # Take a random permutation of the set the knuth way!
-        for i in range(0, len(addresses)):
-            j = random.randint(0, i)
-            addresses[i], addresses[j] = addresses[j], addresses[i]
-
-    if len(addresses) == 0:
-        logging.error("No input source given, quiting...")
-        return -1
-
-    addresses = list(addresses)
-
-    if not args.input.randomize:
-        addresses.sort()
-
-    runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
-  def __init__(self):
-    self.name = ""
-
-    self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
-                     "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
-
-    self.input = Storage()
-    self.input.ip = None
-    try:
-        c_file = os.path.expanduser("~/.tor/cached-consensus")
-        open(c_file)
-        self.input.consensus = c_file
-    except:
-        pass
-
-    try:
-        c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
-        open(c_file)
-        self.input.consensus = c_file
-    except:
-        pass
-
-    if not self.input.consensus:
-        print "Error importing consensus file"
-        sys.exit(1)
-
-    self.output = Storage()
-    self.output.main = 'reports/marco-1.yamlooni'
-    self.output.certificates = 'reports/marco_certs-1.out'
-
-    # XXX This needs to be moved to a proper function
-    #     refactor, refactor and ... refactor!
-    if os.path.exists(self.output.main):
-        basedir = "/".join(self.output.main.split("/")[:-1])
-        fn = self.output.main.split("/")[-1].split(".")
-        ext = fn[1]
-        name = fn[0].split("-")[0]
-        i = fn[0].split("-")[1]
-        i = int(i) + 1
-        self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
-    if os.path.exists(self.output.certificates):
-        basedir = "/".join(self.output.certificates.split("/")[:-1])
-        fn = self.output.certificates.split("/")[-1].split(".")
-        ext = fn[1]
-        name = fn[0].split("-")[0]
-        i = fn[0].split("-")[1]
-        i = int(i) + 1
-        self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
-    # We require for Tor to already be running or have recently run
-    self.args = Storage()
-    self.args.input = self.input
-    self.args.output = self.output
-    self.args.log = 'reports/marco.log'
-
-  def ooni_main(self, cmd):
-    self.args.input.randomize = cmd.randomize
-    self.args.input.ips = cmd.listfile
-    main(self, self.args)
-
-if __name__ == '__main__':
-    if len(sys.argv) < 2:
-        print >> sys.stderr, ("This script takes one or more networkstatus "
-                              "files as an argument.")
-    self = None
-    main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-#     * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
deleted file mode 100644
index d175c1c..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/proxy_plgoo.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/python
-
-import sys
-from twisted.internet import reactor, endpoints
-from twisted.web import client
-from ooni.plugooni import Plugoo
-from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
-
-class HttpPlugin(Plugoo):
-    def __init__(self):
-        self.name = ""
-        self.type = ""
-        self.paranoia = ""
-        self.modules_to_import = []
-        self.output_dir = ""
-        self.buf = ''
-
-    def cb(self, type, content):
-        print "got %d bytes from %s" % (len(content), type) # DEBUG
-        if not self.buf:
-            self.buf = content
-        else:
-            if self.buf == content:
-                print "SUCCESS"
-            else:
-                print "FAIL"
-            reactor.stop()
-
-    def endpoint(self, scheme, host, port):
-        ep = None
-        if scheme == 'http':
-            ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
-        elif scheme == 'https':
-            from twisted.internet import ssl
-            ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
-                                              ssl.ClientContextFactory())
-        return ep
-
-    def ooni_main(self, cmd):
-        # We don't have the Command object so cheating for now.
-        url = cmd.hostname
-
-        # FIXME: validate that url is on the form scheme://host[:port]/path
-        scheme, host, port, path = client._parse(url)
-
-        ctrl_dest = self.endpoint(scheme, host, port)
-        if not ctrl_dest:
-            raise Exception('unsupported scheme %s in %s' % (scheme, url))
-        if cmd.controlproxy:
-            assert scheme != 'https', "no support for proxied https atm, sorry"
-            _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
-            control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
-            print "proxy: ", proxy_host, proxy_port
-        else:
-            control = ctrl_dest
-        f = client.HTTPClientFactory(url)
-        f.deferred.addCallback(lambda x: self.cb('control', x))
-        control.connect(f)
-
-        exp_dest = self.endpoint(scheme, host, port)
-        if not exp_dest:
-            raise Exception('unsupported scheme %s in %s' % (scheme, url))
-        # FIXME: use the experiment proxy if there is one
-        experiment = exp_dest
-        f = client.HTTPClientFactory(url)
-        f.deferred.addCallback(lambda x: self.cb('experiment', x))
-        experiment.connect(f)
-
-        reactor.run()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
deleted file mode 100644
index 87d3684..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/simple_dns_plgoo.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-#
-# DNS tampering detection module
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#
-# This module performs DNS queries against a known good resolver and a possible
-# bad resolver. We compare every resolved name against a list of known filters
-# - if we match, we ring a bell; otherwise, we list possible filter IP
-# addresses. There is a high false positive rate for sites that are GeoIP load
-# balanced.
-#
-
-import sys
-import ooni.dnsooni
-
-from ooni.plugooni import Plugoo
-
-class DNSBulkPlugin(Plugoo):
-  def __init__(self):
-    self.in_ = sys.stdin
-    self.out = sys.stdout
-    self.randomize = True # Pass this down properly
-    self.debug = False
-
-  def DNS_Tests(self):
-    print "DNS tampering detection for list of domains:"
-    tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
-    self.run_tests(tests)
-
-  def magic_main(self):
-    self.run_plgoo_tests("_Tests")
-
-  def ooni_main(self, args):
-    self.magic_main()
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py b/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
deleted file mode 100644
index 01dee81..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/tcpcon_plgoo.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/python
-# Copyright 2011 The Tor Project, Inc.
-# License at end of file.
-#
-# This is a modified version of the marco plugoo. Given a list of #
-# IP:port addresses, this plugoo will attempt a TCP connection with each
-# host and write the results to a .yamlooni file.
-#
-# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
-#
-
-import logging
-import socket
-import time
-import random
-import threading
-import sys
-import os
-try:
-    from ooni.plugooni import Plugoo
-except:
-    print "Error importing Plugoo"
-
-try:
-    from ooni.common import Storage
-except:
-    print "Error importing Storage"
-
-try:
-    from ooni import output
-except:
-    print "Error importing output"
-
-try:
-    from ooni import input
-except:
-    print "Error importing output"
-
-################################################################
-
-# How many servers should we test in parallel?
-N_THREADS = 16
-
-# How long do we give individual socket operations to succeed or fail?
-# (Seconds)
-TIMEOUT = 10
-
-################################################################
-
-CONNECTING = "noconnect"
-OK = "ok"
-ERROR = "err"
-
-LOCK = threading.RLock()
-socket.setdefaulttimeout(TIMEOUT)
-
-# We will want to log the IP address, the port and the state
-def record((addr,port), state, extra=None):
-    LOCK.acquire()
-    try:
-        OUT.append({'addr' : addr,
-                    'port' : port,
-                    'state' : state,
-                    'extra' : extra})
-    finally:
-        LOCK.release()
-
-# For each IP address in the list, open a socket, write to the log and
-# then close the socket
-def probe(address,theCtx=None):
-    sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    logging.info("Opening socket to %s",address)
-    try:
-        s.connect(address)
-    except IOError, e:
-        logging.info("Error %s from socket connect.",e)
-        record(address, CONNECTING, e)
-        s.close()
-        return
-    logging.info("Socket to %s open.  Successfully launched TCP handshake.",address)
-    record(address, OK)
-    s.close()
-
-def parseNetworkstatus(ns):
-    for line in ns:
-        if line.startswith('r '):
-            r = line.split()
-            yield (r[-3],int(r[-2]))
-
-def parseCachedDescs(cd):
-    for line in cd:
-        if line.startswith('router '):
-            r = line.split()
-            yield (r[2],int(r[3]))
-
-def worker(addrList, origLength):
-    done = False
-    context = None
-
-    while True:
-        LOCK.acquire()
-        try:
-            if addrList:
-                print "Starting test %d/%d"%(
-                    1+origLength-len(addrList),origLength)
-                addr = addrList.pop()
-            else:
-                return
-        finally:
-            LOCK.release()
-
-        try:
-            logging.info("Launching probe for %s",addr)
-            probe(addr, context)
-        except Exception, e:
-            logging.info("Unexpected error from %s",addr)
-            record(addr, ERROR, e)
-
-def runThreaded(addrList, nThreads):
-    ts = []
-    origLen = len(addrList)
-    for num in xrange(nThreads):
-        t = threading.Thread(target=worker, args=(addrList,origLen))
-        t.setName("Th#%s"%num)
-        ts.append(t)
-        t.start()
-    for t in ts:
-        t.join()
-
-def main(self, args):
-    # BEGIN
-    # This logic should be present in more or less all plugoos
-    global OUT
-    global OUT_DATA
-    OUT_DATA = []
-
-    try:
-        OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
-    except:
-        print "No output file given. quitting..."
-        return -1
-
-    logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
-                        datefmt="%b %d %H:%M:%S",
-                        level=logging.INFO,
-                        filename=args.log)
-    logging.info("============== STARTING NEW LOG")
-    # END
-
-    methodName = "socket"
-    logging.info("Running tcpcon with method '%s'", methodName)
-
-    addresses = []
-
-    if args.input.ips:
-        for fn in input.file(args.input.ips).simple():
-            a, b = fn.split(":")
-            addresses.append( (a,int(b)) )
-
-    elif args.input.consensus:
-        for fn in args:
-            print fn
-            for a,b in parseNetworkstatus(open(args.input.consensus)):
-                addresses.append( (a,b) )
-
-    if args.input.randomize:
-        # Take a random permutation of the set the knuth way!
-        for i in range(0, len(addresses)):
-            j = random.randint(0, i)
-            addresses[i], addresses[j] = addresses[j], addresses[i]
-
-    if len(addresses) == 0:
-        logging.error("No input source given, quiting...")
-        return -1
-
-    addresses = list(addresses)
-
-    if not args.input.randomize:
-        addresses.sort()
-
-    runThreaded(addresses, N_THREADS)
-
-class MarcoPlugin(Plugoo):
-  def __init__(self):
-    self.name = ""
-
-    self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
-                     "os" ]
-
-    self.input = Storage()
-    self.input.ip = None
-    try:
-        c_file = os.path.expanduser("~/.tor/cached-consensus")
-        open(c_file)
-        self.input.consensus = c_file
-    except:
-        pass
-
-    try:
-        c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
-        open(c_file)
-        self.input.consensus = c_file
-    except:
-        pass
-
-    if not self.input.consensus:
-        print "Error importing consensus file"
-        sys.exit(1)
-
-    self.output = Storage()
-    self.output.main = 'reports/tcpcon-1.yamlooni'
-    self.output.certificates = 'reports/tcpcon_certs-1.out'
-
-    # XXX This needs to be moved to a proper function
-    #     refactor, refactor and ... refactor!
-    if os.path.exists(self.output.main):
-        basedir = "/".join(self.output.main.split("/")[:-1])
-        fn = self.output.main.split("/")[-1].split(".")
-        ext = fn[1]
-        name = fn[0].split("-")[0]
-        i = fn[0].split("-")[1]
-        i = int(i) + 1
-        self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
-    if os.path.exists(self.output.certificates):
-        basedir = "/".join(self.output.certificates.split("/")[:-1])
-        fn = self.output.certificates.split("/")[-1].split(".")
-        ext = fn[1]
-        name = fn[0].split("-")[0]
-        i = fn[0].split("-")[1]
-        i = int(i) + 1
-        self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
-
-    # We require for Tor to already be running or have recently run
-    self.args = Storage()
-    self.args.input = self.input
-    self.args.output = self.output
-    self.args.log = 'reports/tcpcon.log'
-
-  def ooni_main(self, cmd):
-    self.args.input.randomize = cmd.randomize
-    self.args.input.ips = cmd.listfile
-    main(self, self.args)
-
-if __name__ == '__main__':
-    if len(sys.argv) < 2:
-        print >> sys.stderr, ("This script takes one or more networkstatus "
-                              "files as an argument.")
-    self = None
-    main(self, sys.argv[1:])
-
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#
-#     * Neither the names of the copyright owners nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/tor.py b/old-to-be-ported-code/very-old/ooni/plugins/tor.py
deleted file mode 100644
index 0d95d4d..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/tor.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import re
-import os.path
-import signal
-import subprocess
-import socket
-import threading
-import time
-import logging
-
-from pytorctl import TorCtl
-
-torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
-# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
-
-class ThreadProc(threading.Thread):
-    def __init__(self, cmd):
-        threading.Thread.__init__(self)
-        self.cmd = cmd
-        self.proc = None
-
-    def run(self):
-        print "running"
-        try:
-            self.proc = subprocess.Popen(self.cmd,
-                                         shell = False, stdout = subprocess.PIPE,
-                                         stderr = subprocess.PIPE)
-
-        except OSError:
-           logging.fatal('cannot execute command')
-
-class Tor:
-    def __init__(self):
-        self.start()
-
-    def check(self):
-        conn = TorCtl.connect()
-        if conn != None:
-            conn.close()
-            return True
-
-        return False
-
-
-    def start(self):
-        if not os.path.exists(torrc):
-            raise OSError("torrc doesn't exist (%s)" % torrc)
-
-        tor_cmd = ["tor", "-f", torrc]
-
-        torproc = ThreadProc(tor_cmd)
-        torproc.run()
-
-        bootstrap_line = re.compile("Bootstrapped 100%: ")
-
-        while True:
-            if torproc.proc == None:
-                time.sleep(1)
-                continue
-
-            init_line = torproc.proc.stdout.readline().strip()
-
-            if not init_line:
-                torproc.proc.kill()
-                return False
-
-            if bootstrap_line.search(init_line):
-                break
-
-        return True
-
-    def stop(self):
-        if not self.check():
-            return
-
-        conn = TorCtl.connect()
-        if conn != None:
-            conn.send_signal("SHUTDOWN")
-            conn.close()
-
-t = Tor()
diff --git a/old-to-be-ported-code/very-old/ooni/plugins/torrc b/old-to-be-ported-code/very-old/ooni/plugins/torrc
deleted file mode 100644
index b9ffc80..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugins/torrc
+++ /dev/null
@@ -1,9 +0,0 @@
-SocksPort 9050
-ControlPort 9051
-VirtualAddrNetwork 10.23.47.0/10
-AutomapHostsOnResolve 1
-TransPort 9040
-TransListenAddress 127.0.0.1
-DNSPort 5353
-DNSListenAddress 127.0.0.1
-
diff --git a/old-to-be-ported-code/very-old/ooni/plugooni.py b/old-to-be-ported-code/very-old/ooni/plugooni.py
deleted file mode 100644
index 17f17b3..0000000
--- a/old-to-be-ported-code/very-old/ooni/plugooni.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-#
-# Plugooni, ooni plugin module for loading plgoo files.
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#    Arturo Filasto' <art at fuffa.org>
-
-import sys
-import os
-
-import imp, pkgutil, inspect
-
-class Plugoo:
-  def __init__(self, name, plugin_type, paranoia, author):
-    self.name = name
-    self.author = author
-    self.type = plugin_type
-    self.paranoia = paranoia
-
-  """
-  Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
-  Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
-  """
-  def get_tests_by_filter(self, filters, plugins):
-    ret_functions = []
-
-    for plugin in plugins:
-     for function_ptr in dir(plugin):
-       if function_ptr.endswith(filters):
-         ret_functions.append((plugin,function_ptr))
-    return ret_functions
-
-  """
-  Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
-  'dirty' and 'failed'.
-  Run the tests and print 'clean','dirty' or 'failed' according to the test result.
-  """
-  def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
-    for test in tests:
-      filter_result = getattr(test[0], test[1])(self)
-      if filter_result == True:
-        print test[1] + ": " + clean
-      elif filter_result == None:
-        print test[1] + ": " + failed
-      else:
-        print test[1] + ": " + dirty
-
-  """
-  Find all the tests belonging to plgoo 'self' and run them.
-  We know the tests when we see them because they end in 'filter'.
-  """
-  def run_plgoo_tests(self, filter):
-    for function_ptr in dir(self):
-      if function_ptr.endswith(filter):
-        getattr(self, function_ptr)()
-
-PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
-RESERVED_NAMES = [ "skel_plgoo" ]
-
-class Plugooni():
-  def __init__(self, args):
-    self.in_ = sys.stdin
-    self.out = sys.stdout
-    self.debug = False
-    self.loadall = True
-    self.plugin_name = args.plugin_name
-    self.listfile = args.listfile
-
-    self.plgoo_found = False
-
-  # Print all the plugoons to stdout.
-  def list_plugoons(self):
-    print "Plugooni list:"
-    for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
-      if name not in RESERVED_NAMES:
-        print "\t%s" %(name.split("_")[0])
-
-  # Return name of the plgoo class of a plugin.
-  # We know because it always ends with "Plugin".
-  def get_plgoo_class(self,plugin):
-    for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
-      if memb.__name__.endswith("Plugin"):
-        return memb
-
-  # This function is responsible for loading and running the plugoons
-  # the user wants to run.
-  def run(self, command_object):
-    print "Plugooni: the ooni plgoo plugin module loader"
-
-    # iterate all modules
-    for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
-      # see if this module should be loaded
-      if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
-        self.plgoo_found = True # we found at least one plgoo!
-
-        file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
-        # load module
-        plugin = imp.load_module(name, file, pathname, desc)
-        # instantiate plgoo class and call its ooni_main()
-        self.get_plgoo_class(plugin)().ooni_main(command_object)
-
-    # if we couldn't find the plgoo; whine to the user
-    if self.plgoo_found is False:
-      print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
-
-if __name__ == '__main__':
-  self.main()
diff --git a/old-to-be-ported-code/very-old/ooni/transparenthttp.py b/old-to-be-ported-code/very-old/ooni/transparenthttp.py
deleted file mode 100644
index 311fb32..0000000
--- a/old-to-be-ported-code/very-old/ooni/transparenthttp.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-#
-# Captive Portal Detection With Multi-Vendor Emulation
-# by Jacob Appelbaum <jacob at appelbaum.net>
-#
-# This module performs multiple tests that match specific vendor
-# mitm proxies
-
-import sys
-import ooni.http
-import ooni.report
-
-class TransparentHTTPProxy():
-  def __init__(self, args):
-    self.in_ = sys.stdin
-    self.out = sys.stdout
-    self.debug = False
-    self.logger = ooni.report.Log().logger
-
-  def TransparentHTTPProxy_Tests(self):
-    print "Transparent HTTP Proxy:"
-    filter_name = "_TransparentHTTP_Tests"
-    tests = [ooni.http]
-    for test in tests:
-     for function_ptr in dir(test):
-       if function_ptr.endswith(filter_name):
-         filter_result = getattr(test, function_ptr)(self)
-         if filter_result == True:
-           print function_ptr + " thinks the network is clean"
-         elif filter_result == None:
-             print function_ptr + " failed"
-         else:
-           print function_ptr + " thinks the network is dirty"
-
-  def main(self):
-    for function_ptr in dir(self):
-      if function_ptr.endswith("_Tests"):
-        getattr(self, function_ptr)()
-
-if __name__ == '__main__':
-  self.main()
diff --git a/old-to-be-ported-code/very-old/traceroute.py b/old-to-be-ported-code/very-old/traceroute.py
deleted file mode 100644
index e8252c1..0000000
--- a/old-to-be-ported-code/very-old/traceroute.py
+++ /dev/null
@@ -1,108 +0,0 @@
-try:
-    from dns import resolver
-except:
-    print "Error: dnspython is not installed (http://www.dnspython.org/)"
-import gevent
-import os
-import plugoo
-
-try:
-    import scapy
-except:
-    print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
-
-from plugoo.assets import Asset
-from plugoo.tests import Test
-
-import socket
-
-__plugoo__ = "Traceroute"
-__desc__ = "Performs TTL walking tests"
-
-class TracerouteAsset(Asset):
-    def __init__(self, file=None):
-        self = Asset.__init__(self, file)
-
-
-class Traceroute(Test):
-    """A *very* quick and dirty traceroute implementation, UDP and TCP
-    """
-    def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
-        dest_addr = socket.gethostbyname(dst)
-        print "Doing traceroute on %s" % dst
-
-        recv = socket.getprotobyname('icmp')
-        send = socket.getprotobyname(proto)
-        ttl = 1
-        while True:
-            recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
-            if proto == "tcp":
-                send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
-            else:
-                send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
-            recv_sock.settimeout(10)
-            send_sock.settimeout(10)
-
-            send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
-            recv_sock.bind(("", src_port))
-            if proto == "tcp":
-                try:
-                    send_sock.settimeout(2)
-                    send_sock.connect((dst, dst_port))
-                except socket.timeout:
-                    pass
-
-                except Exception, e:
-                    print "Error doing connect %s" % e
-            else:
-                send_sock.sendto("", (dst, dst_port))
-
-            curr_addr = None
-            try:
-                print "receiving data..."
-                _, curr_addr = recv_sock.recvfrom(512)
-                curr_addr = curr_addr[0]
-
-            except socket.error, e:
-                print "SOCKET ERROR: %s" % e
-
-            except Exception, e:
-                print "ERROR: %s" % e
-
-            finally:
-                send_sock.close()
-                recv_sock.close()
-
-            if curr_addr is not None:
-                curr_host = "%s" % curr_addr
-            else:
-                curr_host = "*"
-
-            print "%d\t%s" % (ttl, curr_host)
-
-            if curr_addr == dest_addr or ttl > max_hops:
-                break
-
-            ttl += 1
-
-
-    def experiment(self, *a, **kw):
-        # this is just a dirty hack
-        address = kw['data'][0]
-
-        self.traceroute(address)
-
-def run(ooni):
-    """Run the test"""
-    config = ooni.config
-    urls = []
-
-    traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
-                                            config.tests.traceroute))
-
-    assets = [traceroute_experiment]
-
-    traceroute = Traceroute(ooni)
-    ooni.logger.info("starting traceroute test")
-    traceroute.run(assets)
-    ooni.logger.info("finished")
diff --git a/to-be-ported/TODO b/to-be-ported/TODO
new file mode 100644
index 0000000..81d834f
--- /dev/null
+++ b/to-be-ported/TODO
@@ -0,0 +1,418 @@
+This is a list of techniques that should be added as plugins or hooks or yamlooni
+
+Implement Plugooni - our plugin framework
+Implement Yamlooni - our output format
+Implement Proxooni - our proxy spec and program
+
+We should launch our own Tor on a special port (say, 127.0.0.1:9066)
+We should act as a controller with TorCtl to do this, etc
+We should take the Tor consensus file and pass it to plugins such as marco
+
+HTTP Host header comparsion of a vs b
+HTTP Content length header comparision of a vs b
+
+GET request splitting
+  "G E T "
+  Used in Iran
+
+General Malformed HTTP requests
+  Error pages are fingerprintable
+
+traceroute
+  icmp/udp/tcp
+  each network link is an edge, each hop is a vertex in a network graph
+
+traceroute hop count
+  "TTL walking"
+
+Latency measurement
+TCP reset detection
+Forged DNS spoofing detection
+
+DNS oracle query tool
+  given DNS server foo - test resolve and look for known block pages
+
+Test HTTP header order - do they get reordered?
+
+Look for these filter fingerprints:
+X-Squid-Error: ERR_SCC_SMARTFILTER_DENIED 0 
+X-Squid-Error: ERR_ACCESS_DENIED 0 
+X-Cache: MISS from SmartFilter 
+
+
+WWW-Authenticate: Basic realm="SmartFilter Control List HTTP Download" 
+
+
+Via: 1.1 WEBFILTER.CONSERVESCHOOL.ORG:8080 
+
+X-Cache: MISS from webfilter.whiteschneider.com 
+X-Cache: MISS from webfilter.whiteschneider.com 
+X-Cache: MISS from webfilter.whiteschneider.com 
+
+Location: http://192.168.0.244/webfilter/blockpage?nonce=7d2b7e500e99a0fe&tid=3 
+
+
+X-Cache: MISS from webfilter.imscs.local 
+X-Cache: MISS from webfilter.tjs.at
+
+
+Via: 1.1 webwasher (Webwasher 6.8.7.9396) 
+
+Websense:
+HTTP/1.0 301 Moved Permanently  -> Location: http://www.websense.com/
+
+Via: HTTP/1.1 localhost.localdomain (Websense-Content_Gateway/7.1.4 [c s f ]), HTTP/1.0 localhost.localdomain (Websense-Content_Gateway/7.1.4 [cMsSf ]) 
+
+
+BlueCoat:
+
+Via: 1.1 testrating.dc5.es.bluecoat.com 
+403 -> 
+Set-Cookie: BIGipServerpool_bluecoat=1185677834.20480.0000; expires=Fri, 15-Apr-2011 10:13:21 GMT; path=/ 
+
+HTTP/1.0 407 Proxy Authentication Required ( The ISA Server requires authorization to fulfill the request. Access to the Web Proxy filter is denied. )  -> Via: 1.1 WEBSENSE 
+
+HTTP/1.0 302 Found -> Location: http://bluecoat/?cfru=aHR0cDovLzIwMC4yNy4xMjMuMTc4Lw== 
+
+HTTP/1.0 403 Forbidden 
+Server: squid/3.0.STABLE8 
+
+X-Squid-Error: ERR_ACCESS_DENIED 0 
+X-Cache: MISS from Bluecoat 
+X-Cache-Lookup: NONE from Bluecoat:3128 
+Via: 1.0 Bluecoat (squid/3.0.STABLE8) 
+
+ISA server:
+HTTP/1.0 403 Forbidden ( ISA Server is configured to block HTTP requests that require authentication. ) 
+
+
+Unknown:
+X-XSS-Protection: 1; mode=block 
+
+Rimon filter:
+
+Rimon: RWC_BLOCK
+HTTP/1.1 Rimon header
+Rimon header is only sent by lighttpd
+http://www.ynetnews.com/articles/0,7340,L-3446129,00.html
+http://btya.org/pdfs/rvienerbrochure.pdf
+
+Korea filtering:
+HTTP/1.0 302 Object Moved -> Location: http://www.willtechnology.co.kr/eng/BlockingMSGew.htm 
+Redirects to Korean filter:
+http://www.willtechnology.co.kr/eng/BlockingMSGew.htm
+
+UA filtering:
+HTTP/1.0 307 Temporary Redirect 
+https://my.best.net.ua/login/blocked/
+
+netsweeper:
+HTTP/1.0 302 Moved 
+Location: http://netsweeper1.gaggle.net:8080/webadmin/deny/index.php?dpid=53&dpruleid=53&cat=254&ttl=-905&groupname=default&policyname=default&username=-&userip=74.82.57.112&connectionip=1.0.0.127&nsphostname=netsweeper1.gaggle.net&protocol=nsef&dplanguage=-&url=http%3a%2f%2f184%2e105%2e199%2e252%2f 
+
+Set-cookie: RT_SID_netsweeper.com.80=68a6f5c564a9db297e8feb2bff69d73f; path=/ 
+X-Cache: MISS from netsweeper.irishbroadband.ie 
+X-Cache-Lookup: NONE from netsweeper.irishbroadband.ie:80 
+Via: 1.0 netsweeper.irishbroadband.ie:80 (squid/2.6.STABLE21)
+
+Nokia:
+Via: 1.1 saec-nokiaq05ca (NetCache NetApp/6.0.7) 
+Server: "Nokia" 
+
+CensorNet:
+HTTP/1.0 401 Authorization Required 
+WWW-Authenticate: Basic realm="CensorNet Administration Area" 
+Server: CensorNet/4.0 
+
+http://www.itcensor.com/censor 
+
+
+Server: ZyWALL Content Filter
+
+Apache/1.3.34 (Unix) filter/1.0
+
+HTTP/1.0 502 infiniteproxyloop 
+Via: 1.0 218.102.20.37 (McAfee Web Gateway 7.0.1.5.0.8505) 
+
+
+Set-Cookie: McAfee-SCM-URL-Filter-Coach="dD4OzXciEcp8Ihf1dD4ZzHM5FMZ2PSvRTllOnSR4RZkqfkmEIGgb3hZlVJsEaFaXNmNS3mgsdZAxaVOKIGgrrSx4Rb8hekmNKn4g02VZToogf1SbIQcVz3Q8G/U="; Comment="McAfee URL access coaching"; Version=1; Path=/; Max-Age=900; expires=Sat, 18 Dec 2010 06:47:11 GMT; 
+
+
+WWW-Authenticate: Basic realm="(Nancy McAfee)" 
+
+
+No known fingerprints for:
+NetNanny
+WebChaver
+accountable2you.com
+http://www.shodanhq.com/?q=barracuda
+http://www.shodanhq.com/?q=untangle
+http://www.shodanhq.com/?q=Lightspeed
+
+Server: Smarthouse Lightspeed 
+Server: Smarthouse Lightspeed2 
+Server: Smarthouse Lightspeed 3 
+
+Server: EdgePrism/3.8.1.1 
+
+
+X-Cache: MISS from Barracuda-WebFilter.jmpsecurities.com 
+Via: 1.0 Barracuda-WebFilter.jmpsecurities.com:8080 (http_scan/4.0.2.6.19) 
+
+HTTP/1.0 302 Redirected by M86 Web Filter
+http://www.m86security.com/products/web_security/m86-web-filter.asp
+
+Location: http://10.1.61.37:81/cgi/block.cgi?URL=http://70.182.111.99/&IP=96.9.174.54&CAT=WEMAIL&USER=DEFAULT&CE=0 
+
+
+Via: 1.1 WEBSENSE 
+
+
+Via: 1.1 192.168.1.251 (McAfee Web Gateway 7.1.0.1.0.10541) 
+Via: 1.1 McAfeeSA3000.cbcl.lan 
+
+
+X-Squid-Error: ERR_CONNECT_FAIL 111 
+X-Cache: MISS from CudaWebFilter.poten.com  
+
+http://212.50.251.82/ -iran squid
+
+HTTP/1.0 403 Forbidden ( Forefront TMG denied the specified Uniform Resource Locator (URL). ) 
+Via: 1.1 TMG 
+
+
+Server: NetCache appliance (NetApp/6.0.2) 
+
+
+Server: EdgePrism/3.8.1.1 
+
+
+Server: Mikrotik HttpProxy 
+
+
+Via: 1.1 TMG-04, 1.1 TMG-03 
+
+
+X-Squid-Error: ERR_INVALID_REQ 0 
+X-Cache: MISS from uspa150.trustedproxies.com 
+X-Cache-Lookup: NONE from uspa150.trustedproxies.com:80 
+
+http://www.shodanhq.com/host/view/93.125.95.177
+
+
+Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server 
+http://203.229.245.100/ <- korea block page
+
+
+
+Server: Asroc Intelligent Security Filter 4.1.8 
+
+
+
+Server: tinyproxy/1.8.2 
+
+http://www.shodanhq.com/host/view/64.104.95.251
+
+
+
+Server: Asroc Intelligent Security Filter 4.1.8 
+
+http://www.shodanhq.com/host/view/67.220.92.62
+
+
+Server: SarfX WEB: Self Automation Redirect & Filter Expernet.Ltd Security Web Server 
+http://www.shodanhq.com/host/view/203.229.245.100
+Location: http://192.168.3.20/redirect.cgi?Time=05%2FJul%2F2011%3A21%3A29%3A32%20%2B0800&ID=0000034097&Client_IP=173.212.232.58&User=-&Site=64.104.95.251&URI=-&Status_Code=403&Decision_Tag=BLOCK_ADMIN_PROTOCOL-DefaultGroup-DefaultGroup-NONE-NONE-NONE&URL_Cat=URL%20Filtering%20Bypassed&WBRS=-&DVS_Verdict=-&DVS_ThreatName=-&Reauth_URL=- 
+
+
+http://www.shodanhq.com/?q=%22content+filter%22+-squid+-apache+-ZyWall&page=4
+http://www.shodanhq.com/host/view/72.5.92.51
+http://www.microsoft.com/forefront/threat-management-gateway/en/us/pricing-licensing.aspx
+
+http://meta.wikimedia.org/wiki/Talk:XFF_project
+
+% dig nats.epiccash.com       
+
+; <<>> DiG 9.7.3 <<>> nats.epiccash.com
+;; global options: +cmd
+;; Got answer:
+;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 14920
+;; flags: qr rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 2, ADDITIONAL: 0
+
+;; QUESTION SECTION:
+;nats.epiccash.com.		IN	A
+
+;; ANSWER SECTION:
+nats.epiccash.com.	5	IN	A	172.27.0.1
+
+;; AUTHORITY SECTION:
+epiccash.com.		5	IN	NS	ns0.example.net.
+epiccash.com.		5	IN	NS	ns1.example.net.
+
+;; Query time: 81 msec
+;; SERVER: 172.16.42.2#53(172.16.42.2)
+;; WHEN: Sat Jul 16 16:14:11 2011
+;; MSG SIZE  rcvd: 98
+
+If we think it's squid, we can perhaps confirm it:
+echo -e "GET cache_object://localhost/info HTTP/1.0\r\n" | nc en.wikipedia.com 80                                                                                                                                                      
+Harvest urls from:
+http://urlblacklist.com/?sec=download
+
+https://secure.wikimedia.org/wikipedia/simple/wiki/User_talk:62.30.249.131
+
+mention WCCPv2 filters (http://www.cl.cam.ac.uk/~rnc1/talks/090528-uknof13.pdf)
+
+Cite a bunch of Richard's work:
+http://www.cl.cam.ac.uk/~rnc1/ignoring.pdf
+
+http://www.contentkeeper.com/products/web
+
+We should detect HTTP re-directs to rfc-1918 addresses; they're almost always captive portals.
+We should also detect HTTP MITM served from rfc-1918 addresses for the same reason.
+
+We should take a page from sshshuttle and run without touching the disk
+
+VIA Rail MITM's SSL In Ottawa:
+Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
+
+http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&password=al1852
+
+VIA Rail Via header (DONE):
+
+HTTP/1.0 301 Moved Permanently
+Location: http://www.google.com/
+Content-Type: text/html; charset=UTF-8
+Date: Sat, 23 Jul 2011 02:21:30 GMT
+Expires: Mon, 22 Aug 2011 02:21:30 GMT
+Cache-Control: public, max-age=2592000
+Server: gws
+Content-Length: 219
+X-XSS-Protection: 1; mode=block
+X-Cache: MISS from cache_server
+X-Cache-Lookup: MISS from cache_server:3128
+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+Connection: close
+
+<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
+<TITLE>301 Moved</TITLE></HEAD><BODY>
+<H1>301 Moved</H1>
+The document has moved
+<A HREF="http://www.google.com/">here</A>.
+</BODY></HTML>
+
+
+blocked site (DONE):
+
+HTTP/1.0 302 Moved Temporarily
+Server: squid/2.6.STABLE21
+Date: Sat, 23 Jul 2011 02:22:17 GMT
+Content-Length: 0
+Location: http://10.66.66.66/denied.html
+
+invalid request response:
+
+$ nc 8.8.8.8 80 (DONE)
+hjdashjkdsahjkdsa
+HTTP/1.0 400 Bad Request
+Server: squid/2.6.STABLE21
+Date: Sat, 23 Jul 2011 02:22:44 GMT
+Content-Type: text/html
+Content-Length: 1178
+Expires: Sat, 23 Jul 2011 02:22:44 GMT
+X-Squid-Error: ERR_INVALID_REQ 0
+X-Cache: MISS from cache_server
+X-Cache-Lookup: NONE from cache_server:3128
+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+Proxy-Connection: close
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
+</HEAD><BODY>
+<H1>ERROR</H1>
+<H2>The requested URL could not be retrieved</H2>
+<HR noshade size="1px">
+<P>
+While trying to process the request:
+<PRE>
+hjdashjkdsahjkdsa
+
+</PRE>
+<P>
+The following error was encountered:
+<UL>
+<LI>
+<STRONG>
+Invalid Request
+</STRONG>
+</UL>
+
+<P>
+Some aspect of the HTTP Request is invalid.  Possible problems:
+<UL>
+<LI>Missing or unknown request method
+<LI>Missing URL
+<LI>Missing HTTP Identifier (HTTP/1.0)
+<LI>Request is too large
+<LI>Content-Length missing for POST or PUT requests
+<LI>Illegal character in hostname; underscores are not allowed
+</UL>
+<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
+
+<BR clear="all">
+<HR noshade size="1px">
+<ADDRESS>
+Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
+</ADDRESS>
+</BODY></HTML>
+
+nc 10.66.66.66 80
+GET cache_object://localhost/info HTTP/1.0
+HTTP/1.0 403 Forbidden
+Server: squid/2.6.STABLE21
+Date: Sat, 23 Jul 2011 02:25:56 GMT
+Content-Type: text/html
+Content-Length: 1061
+Expires: Sat, 23 Jul 2011 02:25:56 GMT
+X-Squid-Error: ERR_ACCESS_DENIED 0
+X-Cache: MISS from cache_server
+X-Cache-Lookup: NONE from cache_server:3128
+Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
+Proxy-Connection: close
+
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
+<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
+<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
+</HEAD><BODY>
+<H1>ERROR</H1>
+<H2>The requested URL could not be retrieved</H2>
+<HR noshade size="1px">
+<P>
+While trying to retrieve the URL:
+<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
+<P>
+The following error was encountered:
+<UL>
+<LI>
+<STRONG>
+Access Denied.
+</STRONG>
+<P>
+Access control configuration prevents your request from
+being allowed at this time.  Please contact your service provider if
+you feel this is incorrect.
+</UL>
+<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
+
+
+<BR clear="all">
+<HR noshade size="1px">
+<ADDRESS>
+Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
+</ADDRESS>
+</BODY></HTML>
+
+
diff --git a/to-be-ported/old-api/.ropeproject/config.py b/to-be-ported/old-api/.ropeproject/config.py
new file mode 100644
index 0000000..ffebcd4
--- /dev/null
+++ b/to-be-ported/old-api/.ropeproject/config.py
@@ -0,0 +1,85 @@
+# The default ``config.py``
+
+
+def set_prefs(prefs):
+    """This function is called before opening the project"""
+
+    # Specify which files and folders to ignore in the project.
+    # Changes to ignored resources are not added to the history and
+    # VCSs.  Also they are not returned in `Project.get_files()`.
+    # Note that ``?`` and ``*`` match all characters but slashes.
+    # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
+    # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
+    # '.svn': matches 'pkg/.svn' and all of its children
+    # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
+    # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
+    prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
+                                  '.hg', '.svn', '_svn', '.git']
+
+    # Specifies which files should be considered python files.  It is
+    # useful when you have scripts inside your project.  Only files
+    # ending with ``.py`` are considered to be python files by
+    # default.
+    #prefs['python_files'] = ['*.py']
+
+    # Custom source folders:  By default rope searches the project
+    # for finding source folders (folders that should be searched
+    # for finding modules).  You can add paths to that list.  Note
+    # that rope guesses project source folders correctly most of the
+    # time; use this if you have any problems.
+    # The folders should be relative to project root and use '/' for
+    # separating folders regardless of the platform rope is running on.
+    # 'src/my_source_folder' for instance.
+    #prefs.add('source_folders', 'src')
+
+    # You can extend python path for looking up modules
+    #prefs.add('python_path', '~/python/')
+
+    # Should rope save object information or not.
+    prefs['save_objectdb'] = True
+    prefs['compress_objectdb'] = False
+
+    # If `True`, rope analyzes each module when it is being saved.
+    prefs['automatic_soa'] = True
+    # The depth of calls to follow in static object analysis
+    prefs['soa_followed_calls'] = 0
+
+    # If `False` when running modules or unit tests "dynamic object
+    # analysis" is turned off.  This makes them much faster.
+    prefs['perform_doa'] = True
+
+    # Rope can check the validity of its object DB when running.
+    prefs['validate_objectdb'] = True
+
+    # How many undos to hold?
+    prefs['max_history_items'] = 32
+
+    # Shows whether to save history across sessions.
+    prefs['save_history'] = True
+    prefs['compress_history'] = False
+
+    # Set the number spaces used for indenting.  According to
+    # :PEP:`8`, it is best to use 4 spaces.  Since most of rope's
+    # unit-tests use 4 spaces it is more reliable, too.
+    prefs['indent_size'] = 4
+
+    # Builtin and c-extension modules that are allowed to be imported
+    # and inspected by rope.
+    prefs['extension_modules'] = []
+
+    # Add all standard c-extensions to extension_modules list.
+    prefs['import_dynload_stdmods'] = True
+
+    # If `True` modules with syntax errors are considered to be empty.
+    # The default value is `False`; When `False` syntax errors raise
+    # `rope.base.exceptions.ModuleSyntaxError` exception.
+    prefs['ignore_syntax_errors'] = False
+
+    # If `True`, rope ignores unresolvable imports.  Otherwise, they
+    # appear in the importing namespace.
+    prefs['ignore_bad_imports'] = False
+
+
+def project_opened(project):
+    """This function is called after opening the project"""
+    # Do whatever you like here!
diff --git a/to-be-ported/old-api/.ropeproject/globalnames b/to-be-ported/old-api/.ropeproject/globalnames
new file mode 100644
index 0000000..2877ef5
Binary files /dev/null and b/to-be-ported/old-api/.ropeproject/globalnames differ
diff --git a/to-be-ported/old-api/.ropeproject/history b/to-be-ported/old-api/.ropeproject/history
new file mode 100644
index 0000000..fcd9c96
--- /dev/null
+++ b/to-be-ported/old-api/.ropeproject/history
@@ -0,0 +1 @@
+€]q(]q]qe.
\ No newline at end of file
diff --git a/to-be-ported/old-api/.ropeproject/objectdb b/to-be-ported/old-api/.ropeproject/objectdb
new file mode 100644
index 0000000..f276839
Binary files /dev/null and b/to-be-ported/old-api/.ropeproject/objectdb differ
diff --git a/to-be-ported/old-api/TESTS_ARE_MOVING.txt b/to-be-ported/old-api/TESTS_ARE_MOVING.txt
new file mode 100644
index 0000000..f4c0084
--- /dev/null
+++ b/to-be-ported/old-api/TESTS_ARE_MOVING.txt
@@ -0,0 +1,8 @@
+7/10/2012
+
+All new tests will be moved to the directory /nettests/.
+
+Tests that are in this directory are either here for historical reasons or have
+not yet been properly tested and fully supporting the new API.
+
+A.
diff --git a/to-be-ported/old-api/chinatrigger.py b/to-be-ported/old-api/chinatrigger.py
new file mode 100644
index 0000000..cf4bcb3
--- /dev/null
+++ b/to-be-ported/old-api/chinatrigger.py
@@ -0,0 +1,140 @@
+import random
+import string
+import struct
+import time
+
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, defer
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.scapyproto import ScapyTest
+
+from ooni.lib.txscapy import txsr, txsend
+
+class scapyArgs(usage.Options):
+    optParameters = [['dst', 'd', None, 'Specify the target address'],
+                     ['port', 'p', None, 'Specify the target port'],
+                     ['pcap', 'f', None, 'The pcap file to write with the sent and received packets'],
+                    ]
+
+class ChinaTriggerTest(ScapyTest):
+    """
+    This test is a OONI based implementation of the C tool written
+    by Philipp Winter to engage chinese probes in active scanning.
+
+    Example of running it:
+    ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap
+    """
+    implements(IPlugin, ITest)
+
+    shortName = "chinatrigger"
+    description = "Triggers the chinese probes into scanning"
+    requirements = ['root']
+    options = scapyArgs
+    blocking = False
+
+    receive = True
+    pcapfile = 'example_scapy.pcap'
+    timeout = 5
+
+    def initialize(self, reactor=None):
+        if not self.reactor:
+            from twisted.internet import reactor
+            self.reactor = reactor
+
+    @staticmethod
+    def set_random_servername(pkt):
+        ret = pkt[:121]
+        for i in range(16):
+            ret += random.choice(string.ascii_lowercase)
+        ret += pkt[121+16:]
+        return ret
+
+    @staticmethod
+    def set_random_time(pkt):
+        ret = pkt[:11]
+        ret += struct.pack('!I', int(time.time()))
+        ret += pkt[11+4:]
+        return ret
+
+    @staticmethod
+    def set_random_field(pkt):
+        ret = pkt[:15]
+        for i in range(28):
+            ret += chr(random.randint(0, 256))
+        ret += pkt[15+28:]
+        return ret
+
+    @staticmethod
+    def mutate(pkt, idx):
+        """
+        Slightly changed mutate function.
+        """
+        ret = pkt[:idx-1]
+        mutation = chr(random.randint(0, 256))
+        while mutation == pkt[idx]:
+            mutation = chr(random.randint(0, 256))
+        ret += mutation
+        ret += pkt[idx:]
+        return ret
+
+    @staticmethod
+    def set_all_random_fields(pkt):
+        pkt = ChinaTriggerTest.set_random_servername(pkt)
+        pkt = ChinaTriggerTest.set_random_time(pkt)
+        pkt = ChinaTriggerTest.set_random_field(pkt)
+        return pkt
+
+    def build_packets(self, *args, **kw):
+        """
+        Override this method to build scapy packets.
+        """
+        from scapy.all import IP, TCP
+        pkt = "\x16\x03\x01\x00\xcc\x01\x00\x00\xc8"\
+              "\x03\x01\x4f\x12\xe5\x63\x3f\xef\x7d"\
+              "\x20\xb9\x94\xaa\x04\xb0\xc1\xd4\x8c"\
+              "\x50\xcd\xe2\xf9\x2f\xa9\xfb\x78\xca"\
+              "\x02\xa8\x73\xe7\x0e\xa8\xf9\x00\x00"\
+              "\x3a\xc0\x0a\xc0\x14\x00\x39\x00\x38"\
+              "\xc0\x0f\xc0\x05\x00\x35\xc0\x07\xc0"\
+              "\x09\xc0\x11\xc0\x13\x00\x33\x00\x32"\
+              "\xc0\x0c\xc0\x0e\xc0\x02\xc0\x04\x00"\
+              "\x04\x00\x05\x00\x2f\xc0\x08\xc0\x12"\
+              "\x00\x16\x00\x13\xc0\x0d\xc0\x03\xfe"\
+              "\xff\x00\x0a\x00\xff\x01\x00\x00\x65"\
+              "\x00\x00\x00\x1d\x00\x1b\x00\x00\x18"\
+              "\x77\x77\x77\x2e\x67\x6e\x6c\x69\x67"\
+              "\x78\x7a\x70\x79\x76\x6f\x35\x66\x76"\
+              "\x6b\x64\x2e\x63\x6f\x6d\x00\x0b\x00"\
+              "\x04\x03\x00\x01\x02\x00\x0a\x00\x34"\
+              "\x00\x32\x00\x01\x00\x02\x00\x03\x00"\
+              "\x04\x00\x05\x00\x06\x00\x07\x00\x08"\
+              "\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00"\
+              "\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11"\
+              "\x00\x12\x00\x13\x00\x14\x00\x15\x00"\
+              "\x16\x00\x17\x00\x18\x00\x19\x00\x23"\
+              "\x00\x00"
+
+        pkt = ChinaTriggerTest.set_all_random_fields(pkt)
+        pkts = [IP(dst=self.dst)/TCP(dport=self.port)/pkt]
+        for x in range(len(pkt)):
+            mutation = IP(dst=self.dst)/TCP(dport=self.port)/ChinaTriggerTest.mutate(pkt, x)
+            pkts.append(mutation)
+        return pkts
+
+    def load_assets(self):
+        if self.local_options:
+            self.dst = self.local_options['dst']
+            self.port = int(self.local_options['port'])
+            if self.local_options['pcap']:
+                self.pcapfile = self.local_options['pcap']
+            if not self.port or not self.dst:
+                pass
+
+        return {}
+
+#chinatrigger = ChinaTriggerTest(None, None, None)
+
diff --git a/to-be-ported/old-api/daphn3.py b/to-be-ported/old-api/daphn3.py
new file mode 100644
index 0000000..bf4d60d
--- /dev/null
+++ b/to-be-ported/old-api/daphn3.py
@@ -0,0 +1,152 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet import protocol, endpoints
+
+from ooni.plugoo import reports
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import daphn3
+from ooni.utils import log
+
+class Daphn3ClientProtocol(daphn3.Daphn3Protocol):
+    def connectionMade(self):
+        self.next_state()
+
+class Daphn3ClientFactory(protocol.ClientFactory):
+    protocol = Daphn3ClientProtocol
+    mutator = None
+    steps = None
+    test = None
+
+    def buildProtocol(self, addr):
+        p = self.protocol()
+        p.factory = self
+        p.test = self.test
+
+        if self.steps:
+            p.steps = self.steps
+
+        if not self.mutator:
+            self.mutator = daphn3.Mutator(p.steps)
+
+        else:
+            print "Moving on to next mutation"
+            self.mutator.next()
+
+        p.mutator = self.mutator
+        p.current_state = self.mutator.state()
+        return p
+
+    def clientConnectionFailed(self, reason):
+        print "We failed connecting the the OONIB"
+        print "Cannot perform test. Perhaps it got blocked?"
+        print "Please report this to tor-assistants at torproject.org"
+        self.test.result['error'] = ('Failed in connecting to OONIB', reason)
+        self.test.end(d)
+
+    def clientConnectionLost(self, reason):
+        print "Connection Lost."
+
+class daphn3Args(usage.Options):
+    optParameters = [['pcap', 'f', None,
+                        'PCAP to read for generating the YAML output'],
+
+                     ['output', 'o', 'daphn3.yaml',
+                        'What file should be written'],
+
+                     ['yaml', 'y', None,
+                        'The input file to the test'],
+
+                     ['host', 'h', None, 'Target Hostname'],
+                     ['port', 'p', None, 'Target port number'],
+                     ['resume', 'r', 0, 'Resume at this index']]
+
+class daphn3Test(OONITest):
+    implements(IPlugin, ITest)
+
+    shortName = "daphn3"
+    description = "daphn3"
+    requirements = None
+    options = daphn3Args
+    blocking = False
+
+    local_options = None
+
+    steps = None
+
+    def initialize(self):
+        if not self.local_options:
+            self.end()
+            return
+
+        self.factory = Daphn3ClientFactory()
+        self.factory.test = self
+
+        if self.local_options['pcap']:
+            self.tool = True
+
+        elif self.local_options['yaml']:
+            self.steps = daphn3.read_yaml(self.local_options['yaml'])
+
+        else:
+            log.msg("Not enough inputs specified to the test")
+            self.end()
+
+    def runTool(self):
+        import yaml
+        pcap = daphn3.read_pcap(self.local_options['pcap'])
+        f = open(self.local_options['output'], 'w')
+        f.write(yaml.dump(pcap))
+        f.close()
+
+    def control(self, exp_res, args):
+        try:
+            mutation = self.factory.mutator.get(0)
+            self.result['censored'] = False
+        except:
+            mutation = None
+
+        return {'mutation_number': args['mutation'],
+                'value': mutation}
+
+    def _failure(self, *argc, **kw):
+        self.result['censored'] = True
+        self.result['error'] = ('Failed in connecting', (argc, kw))
+        self.end()
+
+    def experiment(self, args):
+        log.msg("Doing mutation %s" % args['mutation'])
+        self.factory.steps = self.steps
+        host = self.local_options['host']
+        port = int(self.local_options['port'])
+        log.msg("Connecting to %s:%s" % (host, port))
+
+        if self.ended:
+            return
+
+        endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
+        d = endpoint.connect(self.factory)
+        d.addErrback(self._failure)
+        return d
+
+    def load_assets(self):
+        if not self.local_options:
+            return {}
+        if not self.steps:
+            print "Error: No assets!"
+            self.end()
+            return {}
+        mutations = 0
+        for x in self.steps:
+            mutations += len(x['data'])
+        return {'mutation': range(mutations)}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#daphn3test = daphn3Test(None, None, None)
diff --git a/to-be-ported/old-api/domclass.py b/to-be-ported/old-api/domclass.py
new file mode 100644
index 0000000..3080c40
--- /dev/null
+++ b/to-be-ported/old-api/domclass.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+#-*- encoding: utf-8 -*-
+#
+#    domclass
+#    ********
+#
+#    :copyright: (c) 2012 by Arturo Filastò
+#    :license: see LICENSE for more details.
+#
+#    how this works
+#    --------------
+#
+#    This classifier uses the DOM structure of a website to determine how similar
+#    the two sites are.
+#    The procedure we use is the following:
+#        * First we parse all the DOM tree of the web page and we build a list of
+#          TAG parent child relationships (ex. <html><a><b></b></a><c></c></html> =>
+#          (html, a), (a, b), (html, c)).
+#
+#        * We then use this information to build a matrix (M) where m[i][j] = P(of
+#          transitioning from tag[i] to tag[j]). If tag[i] does not exists P() = 0.
+#          Note: M is a square matrix that is number_of_tags wide.
+#
+#        * We then calculate the eigenvectors (v_i) and eigenvalues (e) of M.
+#
+#        * The corelation between page A and B is given via this formula:
+#          correlation = dot_product(e_A, e_B), where e_A and e_B are
+#          resepectively the eigenvalues for the probability matrix A and the
+#          probability matrix B.
+#
+
+try:
+    import numpy
+except:
+    print "Error numpy not installed!"
+
+import yaml
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+from ooni.protocols.http import HTTPTest
+
+class domclassArgs(usage.Options):
+    optParameters = [['output', 'o', None, 'Output to write'],
+                     ['file', 'f', None, 'Corpus file'],
+                     ['fileb', 'b', None, 'Corpus file'],
+                     ['urls', 'u', None, 'URL List'],
+                     ['resume', 'r', 0, 'Resume at this index']]
+
+# All HTML4 tags
+# XXX add link to W3C page where these came from
+alltags = ['A', 'ABBR', 'ACRONYM', 'ADDRESS', 'APPLET', 'AREA', 'B', 'BASE',
+           'BASEFONT', 'BD', 'BIG', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+           'CENTER', 'CITE', 'CODE', 'COL', 'COLGROUP', 'DD', 'DEL', 'DFN', 'DIR', 'DIV',
+           'DL', 'DT', 'E M', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+           'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'I', 'IFRAME ', 'IMG',
+           'INPUT', 'INS', 'ISINDEX', 'KBD', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+           'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTGROUP', 'OPTION',
+           'P', 'PARAM', 'PRE', 'Q', 'S', 'SAMP', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+           'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+           'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL', 'VAR']
+
+# Reduced subset of only the most common tags
+commontags = ['A', 'B', 'BLOCKQUOTE', 'BODY', 'BR', 'BUTTON', 'CAPTION',
+           'CENTER', 'CITE', 'CODE', 'COL', 'DD', 'DIV',
+           'DL', 'DT', 'EM', 'FIELDSET', 'FONT', 'FORM', 'FRAME', 'FRAMESET', 'H1', 'H2',
+           'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'IFRAME ', 'IMG',
+           'INPUT', 'INS', 'LABEL', 'LEGEND', 'LI', 'LINK', 'MAP',
+           'MENU', 'META', 'NOFRAMES', 'NOSCRIPT', 'OBJECT', 'OL', 'OPTION',
+           'P', 'PRE', 'SCRIPT', 'SELECT', 'SMALL', 'SPAN',
+           'STRIKE', 'STRONG', 'STYLE', 'SUB', 'SUP', 'TABLE', 'TBODY', 'TD',
+           'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'U', 'UL']
+
+# The tags we are intested in using for our analysis
+thetags = ['A', 'DIV', 'FRAME', 'H1', 'H2',
+           'H3', 'H4', 'IFRAME ', 'INPUT',
+           'LABEL','LI', 'P', 'SCRIPT', 'SPAN',
+           'STYLE', 'TR']
+
+def compute_probability_matrix(dataset):
+    """
+    Compute the probability matrix based on the input dataset.
+
+    :dataset: an array of pairs representing the parent child relationships.
+    """
+    import itertools
+    ret = {}
+    matrix = numpy.zeros((len(thetags) + 1, len(thetags) + 1))
+
+    for data in dataset:
+        x = data[0].upper()
+        y = data[1].upper()
+        try:
+            x = thetags.index(x)
+        except:
+            x = len(thetags)
+
+        try:
+            y = thetags.index(y)
+        except:
+            y = len(thetags)
+
+        matrix[x,y] += 1
+
+    for x in xrange(len(thetags) + 1):
+        possibilities = 0
+        for y in matrix[x]:
+            possibilities += y
+
+        for i in xrange(len(matrix[x])):
+            if possibilities != 0:
+                matrix[x][i] = matrix[x][i]/possibilities
+
+    return matrix
+
+def compute_eigenvalues(matrix):
+    """
+    Returns the eigenvalues of the supplied square matrix.
+
+    :matrix: must be a square matrix and diagonalizable.
+    """
+    return numpy.linalg.eigvals(matrix)
+
+def readDOM(content=None, filename=None):
+    """
+    Parses the DOM of the HTML page and returns an array of parent, child
+    pairs.
+
+    :content: the content of the HTML page to be read.
+
+    :filename: the filename to be read from for getting the content of the
+               page.
+    """
+    from bs4 import BeautifulSoup
+
+    if filename:
+        f = open(filename)
+        content = ''.join(f.readlines())
+        f.close()
+
+    dom = BeautifulSoup(content)
+    couples = []
+    for x in dom.findAll():
+        couples.append((str(x.parent.name), str(x.name)))
+
+    return couples
+
+class domclassTest(HTTPTest):
+    implements(IPlugin, ITest)
+
+    shortName = "domclass"
+    description = "domclass"
+    requirements = None
+    options = domclassArgs
+    blocking = False
+
+    follow_redirects = True
+    #tool = True
+
+    def runTool(self):
+        site_a = readDOM(filename=self.local_options['file'])
+        site_b = readDOM(filename=self.local_options['fileb'])
+        a = {}
+        a['matrix'] = compute_probability_matrix(site_a)
+        a['eigen'] = compute_eigenvalues(a['matrix'])
+
+        self.result['eigenvalues'] = a['eigen']
+        b = {}
+        b['matrix'] = compute_probability_matrix(site_b)
+        b['eigen'] = compute_eigenvalues(b['matrix'])
+
+        #print "A: %s" % a
+        #print "B: %s" % b
+        correlation = numpy.vdot(a['eigen'],b['eigen'])
+        correlation /= numpy.linalg.norm(a['eigen'])*numpy.linalg.norm(b['eigen'])
+        correlation = (correlation + 1)/2
+        print "Corelation: %s" % correlation
+        self.end()
+        return a
+
+    def processResponseBody(self, data):
+        site_a = readDOM(data)
+        #site_b = readDOM(self.local_options['fileb'])
+        a = {}
+        a['matrix'] = compute_probability_matrix(site_a)
+        a['eigen'] = compute_eigenvalues(a['matrix'])
+
+
+        if len(data) == 0:
+            self.result['eigenvalues'] = None
+            self.result['matrix'] = None
+        else:
+            self.result['eigenvalues'] = a['eigen']
+            #self.result['matrix'] = a['matrix']
+        #self.result['content'] = data[:200]
+        #b = compute_matrix(site_b)
+        print "A: %s" % a
+        return a['eigen']
+
+    def load_assets(self):
+        if self.local_options:
+            if self.local_options['file']:
+                self.tool = True
+                return {}
+            elif self.local_options['urls']:
+                return {'url': Asset(self.local_options['urls'])}
+            else:
+                self.end()
+                return {}
+        else:
+            return {}
+
+#domclass = domclassTest(None, None, None)
diff --git a/to-be-ported/old-api/dropin.cache b/to-be-ported/old-api/dropin.cache
new file mode 100755
index 0000000..65c2187
--- /dev/null
+++ b/to-be-ported/old-api/dropin.cache
@@ -0,0 +1,243 @@
+(dp1
+S'tcpconnect'
+p2
+ccopy_reg
+_reconstructor
+p3
+(ctwisted.plugin
+CachedDropin
+p4
+c__builtin__
+object
+p5
+NtRp6
+(dp7
+S'moduleName'
+p8
+S'ooni.plugins.tcpconnect'
+p9
+sS'description'
+p10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p11
+sS'plugins'
+p12
+(lp13
+g3
+(ctwisted.plugin
+CachedPlugin
+p14
+g5
+NtRp15
+(dp16
+S'provided'
+p17
+(lp18
+ctwisted.plugin
+IPlugin
+p19
+acooni.plugoo.interface
+ITest
+p20
+asS'dropin'
+p21
+g6
+sS'name'
+p22
+S'tcpconnect'
+p23
+sg10
+NsbasbsS'domclass'
+p24
+g3
+(g4
+g5
+NtRp25
+(dp26
+g8
+S'ooni.plugins.domclass'
+p27
+sg10
+Nsg12
+(lp28
+g3
+(g14
+g5
+NtRp29
+(dp30
+g17
+(lp31
+g19
+ag20
+asg21
+g25
+sg22
+S'domclass'
+p32
+sg10
+NsbasbsS'bridget'
+p33
+g3
+(g4
+g5
+NtRp34
+(dp35
+g8
+S'ooni.plugins.bridget'
+p36
+sg10
+Nsg12
+(lp37
+g3
+(g14
+g5
+NtRp38
+(dp39
+g17
+(lp40
+g19
+ag20
+asg21
+g34
+sg22
+S'bridget'
+p41
+sg10
+S"\n    XXX fill me in\n\n    :ivar config:\n        An :class:`ooni.lib.txtorcon.TorConfig` instance.\n    :ivar relays:\n        A list of all provided relays to test.\n    :ivar bridges:\n        A list of all provided bridges to test.\n    :ivar socks_port:\n        Integer for Tor's SocksPort.\n    :ivar control_port:\n        Integer for Tor's ControlPort.\n    :ivar transport:\n        String defining the Tor's ClientTransportPlugin, for testing \n        a bridge's pluggable transport functionality.\n    :ivar tor_binary:\n        Path to the Tor binary to use, e.g. '/usr/sbin/tor'\n    "
+p42
+sbasbsS'daphn3'
+p43
+g3
+(g4
+g5
+NtRp44
+(dp45
+g8
+S'plugins.daphn3'
+p46
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p47
+sg12
+(lp48
+g3
+(g14
+g5
+NtRp49
+(dp50
+g17
+(lp51
+g19
+ag20
+asg21
+g44
+sg22
+S'daphn3test'
+p52
+sg10
+NsbasbsS'httpt'
+p53
+g3
+(g4
+g5
+NtRp54
+(dp55
+g8
+S'ooni.plugins.httpt'
+p56
+sg10
+S'\nThis is a self genrated test created by scaffolding.py.\nyou will need to fill it up with all your necessities.\nSafe hacking :).\n'
+p57
+sg12
+(lp58
+sbsS'chinatrigger'
+p59
+g3
+(g4
+g5
+NtRp60
+(dp61
+g8
+S'plugins.chinatrigger'
+p62
+sg10
+Nsg12
+(lp63
+g3
+(g14
+g5
+NtRp64
+(dp65
+g17
+(lp66
+g19
+ag20
+asg21
+g60
+sg22
+S'chinatrigger'
+p67
+sg10
+S'\n    This test is a OONI based implementation of the C tool written\n    by Philipp Winter to engage chinese probes in active scanning.\n\n    Example of running it:\n    ./ooni/ooniprobe.py chinatrigger -d 127.0.0.1 -p 8080 -f bla.pcap\n    '
+p68
+sbasbsS'dnstamper'
+p69
+g3
+(g4
+g5
+NtRp70
+(dp71
+g8
+S'ooni.plugins.dnstamper'
+p72
+sg10
+S'\n    dnstamper\n    *********\n\n    This test resolves DNS for a list of domain names, one per line, in the\n    file specified in the ooni-config under the setting "dns_experiment". If\n    the file is top-1m.txt, the test will be run using Amazon\'s list of top\n    one million domains. The experimental dns servers to query should\n    be specified one per line in assets/dns_servers.txt.\n\n    The test reports censorship if the cardinality of the intersection of\n    the query result set from the control server and the query result set\n    from the experimental server is zero, which is to say, if the two sets\n    have no matching results whatsoever.\n\n    NOTE: This test frequently results in false positives due to GeoIP-based\n    load balancing on major global sites such as google, facebook, and\n    youtube, etc.\n\n    :author: Isis Lovecruft, Arturo Filast\xc3\xb2\n    :license: see LICENSE for more details\n\n    TODO:\n         * Finish porting to twisted\n 
         * Finish the client.Resolver() subclass and test it\n         * Use the DNS tests from captiveportal\n         * Use plugoo/reports.py for final data\n'
+p73
+sg12
+(lp74
+g3
+(g14
+g5
+NtRp75
+(dp76
+g17
+(lp77
+g19
+ag20
+asg21
+g70
+sg22
+S'dnstamper'
+p78
+sg10
+S'\n    XXX fill me in\n    '
+p79
+sbasbsS'blocking'
+p80
+g3
+(g4
+g5
+NtRp81
+(dp82
+g8
+S'plugins.blocking'
+p83
+sg10
+Nsg12
+(lp84
+g3
+(g14
+g5
+NtRp85
+(dp86
+g17
+(lp87
+g19
+ag20
+asg21
+g81
+sg22
+S'blocking'
+p88
+sg10
+Nsbasbs.
\ No newline at end of file
diff --git a/to-be-ported/old-api/httpt.py b/to-be-ported/old-api/httpt.py
new file mode 100644
index 0000000..358f1ea
--- /dev/null
+++ b/to-be-ported/old-api/httpt.py
@@ -0,0 +1,94 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from ooni.plugoo.tests import ITest, OONITest
+from ooni.plugoo.assets import Asset
+from ooni.protocols import http
+from ooni.utils import log
+
+class httptArgs(usage.Options):
+    optParameters = [['urls', 'f', None, 'Urls file'],
+                     ['url', 'u', 'http://torproject.org/', 'Test single site'],
+                     ['resume', 'r', 0, 'Resume at this index'],
+                     ['rules', 'y', None, 'Specify the redirect rules file']]
+
+class httptTest(http.HTTPTest):
+    implements(IPlugin, ITest)
+
+    shortName = "httpt"
+    description = "httpt"
+    requirements = None
+    options = httptArgs
+    blocking = False
+
+
+    def testPattern(self, value, pattern, type):
+        if type == 'eq':
+            return value == pattern
+        elif type == 're':
+            import re
+            if re.match(pattern, value):
+                return True
+            else:
+                return False
+        else:
+            return None
+
+    def testPatterns(self, patterns, location):
+        test_result = False
+
+        if type(patterns) == list:
+            for pattern in patterns:
+                test_result |= self.testPattern(location, pattern['value'], pattern['type'])
+        else:
+            test_result |= self.testPattern(location, patterns['value'], patterns['type'])
+
+        return test_result
+
+    def testRules(self, rules, location):
+        result = {}
+        blocked = False
+        for rule, value in rules.items():
+            current_rule = {}
+            current_rule['name'] = value['name']
+            current_rule['patterns'] = value['patterns']
+            current_rule['test'] = self.testPatterns(value['patterns'], location)
+            blocked |= current_rule['test']
+            result[rule] = current_rule
+        result['blocked'] = blocked
+        return result
+
+    def processRedirect(self, location):
+        self.result['redirect'] = None
+        try:
+            rules_file = self.local_options['rules']
+            import yaml
+            rules = yaml.load(open(rules_file))
+            log.msg("Testing rules %s" % rules)
+            redirect = self.testRules(rules, location)
+            self.result['redirect'] = redirect
+        except TypeError:
+            log.msg("No rules file. Got a redirect, but nothing to do.")
+
+
+    def control(self, experiment_result, args):
+        print self.response
+        print self.request
+        # What you return here ends up inside of the report.
+        log.msg("Running control")
+        return {}
+
+    def load_assets(self):
+        if self.local_options and self.local_options['urls']:
+            return {'url': Asset(self.local_options['urls'])}
+        else:
+            return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#httpt = httptTest(None, None, None)
diff --git a/to-be-ported/old-api/tcpconnect.py b/to-be-ported/old-api/tcpconnect.py
new file mode 100644
index 0000000..7758a9e
--- /dev/null
+++ b/to-be-ported/old-api/tcpconnect.py
@@ -0,0 +1,65 @@
+"""
+This is a self genrated test created by scaffolding.py.
+you will need to fill it up with all your necessities.
+Safe hacking :).
+"""
+from zope.interface import implements
+from twisted.python import usage
+from twisted.plugin import IPlugin
+from twisted.internet.protocol import Factory, Protocol
+from twisted.internet.endpoints import TCP4ClientEndpoint
+
+from ooni.plugoo.interface import ITest
+from ooni.plugoo.tests import OONITest
+from ooni.plugoo.assets import Asset
+from ooni.utils import log
+
+class tcpconnectArgs(usage.Options):
+    optParameters = [['asset', 'a', None, 'File containing IP:PORT combinations, one per line.'],
+                     ['resume', 'r', 0, 'Resume at this index']]
+
+class tcpconnectTest(OONITest):
+    implements(IPlugin, ITest)
+
+    shortName = "tcpconnect"
+    description = "tcpconnect"
+    requirements = None
+    options = tcpconnectArgs
+    blocking = False
+
+    def experiment(self, args):
+        try:
+            host, port = args['asset'].split(':')
+        except:
+            raise Exception("Error in parsing asset. Wrong format?")
+        class DummyFactory(Factory):
+            def buildProtocol(self, addr):
+                return Protocol()
+
+        def gotProtocol(p):
+            p.transport.loseConnection()
+            log.msg("Got a connection!")
+            log.msg(str(p))
+            return {'result': True, 'target': [host, port]}
+
+        def gotError(err):
+            log.msg("Had error :(")
+            log.msg(err)
+            return {'result': False, 'target': [host, port]}
+
+        # What you return here gets handed as input to control
+        point = TCP4ClientEndpoint(self.reactor, host, int(port))
+        d = point.connect(DummyFactory())
+        d.addCallback(gotProtocol)
+        d.addErrback(gotError)
+        return d
+
+    def load_assets(self):
+        if self.local_options:
+            return {'asset': Asset(self.local_options['asset'])}
+        else:
+            return {}
+
+# We need to instantiate it otherwise getPlugins does not detect it
+# XXX Find a way to load plugins without instantiating them.
+#tcpconnect = tcpconnectTest(None, None, None)
diff --git a/to-be-ported/old-api/tcpscan.py b/to-be-ported/old-api/tcpscan.py
new file mode 100644
index 0000000..b371c88
--- /dev/null
+++ b/to-be-ported/old-api/tcpscan.py
@@ -0,0 +1,84 @@
+"""
+    TCP Port Scanner
+    ****************
+
+    Does a TCP connect scan on the IP:port pairs.
+
+"""
+import os
+from gevent import socket
+from datetime import datetime
+import socks
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+__plugoo__ = "TCP Port Scanner"
+__desc__ = "This a test template to be used to build your own tests"
+
+class TCPScanAsset(Asset):
+    """
+    This is the asset that should be used by the Test. It will
+    contain all the code responsible for parsing the asset file
+    and should be passed on instantiation to the test.
+    """
+    def __init__(self, file=None):
+        self = Asset.__init__(self, file)
+
+
+class TCPScan(Test):
+    """
+    The main Test class
+    """
+
+    def experiment(self, *a, **kw):
+        """
+        Fill this up with the tasks that should be performed
+        on the "dirty" network and should be compared with the
+        control.
+        """
+        addr = kw['data']
+        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        res = False
+        try:
+            self.logger.debug('Doing a connection to %s' % addr)
+            s.connect((addr.split(':')[0], int(addr.split(':')[1])))
+            res = True
+        except socket.error, msg:
+            self.logger.debug('Connection failed to %s: %s' % (addr, msg))
+
+        finally:
+            s.close()
+
+        return {'Time': datetime.now(),
+                'Address': addr,
+                'Status': res}
+
+    def control(self):
+        """
+        Fill this up with the control related code.
+        """
+        return True
+
+def run(ooni, asset=None):
+    """
+    This is the function that will be called by OONI
+    and it is responsible for instantiating and passing
+    the arguments to the Test class.
+    """
+    config = ooni.config
+
+    # This the assets array to be passed to the run function of
+    # the test
+    if asset:
+        assets = [TCPScanAsset(asset)]
+    else:
+        assets = [TCPScanAsset(os.path.join(config.main.assetdir, \
+                                            "tcpscan.txt"))]
+
+    # Instantiate the Test
+    thetest = TCPScan(ooni)
+    ooni.logger.info("starting TCP Scan...")
+    # Run the test with argument assets
+    thetest.run(assets)
+    ooni.logger.info("finished.")
diff --git a/to-be-ported/spec/proxooni-spec.txt b/to-be-ported/spec/proxooni-spec.txt
new file mode 100644
index 0000000..7cc476f
--- /dev/null
+++ b/to-be-ported/spec/proxooni-spec.txt
@@ -0,0 +1,65 @@
+
+                              Proxyooni specification
+                                   version 0.0
+                                  Jacob Appelbaum
+
+0. Preface
+
+ This document describes a new proxy that is required to support ooni-probe.
+
+1. Overview
+
+ There is no common proxy type that thwarts even the most basic traffic
+ monitoring. The Proxyooni specification aims to provide a proxy that is
+ encrypted by default, optionally authenticated, and will provide a way to run
+ specific ooni-probe tests natively on the system where the proxy is running.
+
+2. Implementation
+
+ Proxyooni may be written in any language, the reference implementation will be
+ implemented in Python. The program shall be called ooni-proxy and it will handle
+ running as a privileged user or an unprivileged user on supported systems. We
+ aim to support ooni-proxy on Debian Gnu/Linux as the reference platform.
+
+2.1 Connections
+
+ When ooni-proxy runs, it should open a single port and it will allow TLS 1.0
+ clients to connect with a cipher suite that provides perfect forward secrecy.
+
+2.2 Certificates
+
+ ooni-proxy should use a certificate if supplied or dynamically generate a
+ certificate on startup; any connecting client should bootstrap trust with a
+ TOFU model, a client may ignore the
+
+2.3 Authentication
+
+ ooni-proxy should provide open access by default with no authentication.
+ It should support TLS-PSK[0] if authentication is desired. Key distribution is
+ explictly an out of scope problem.
+
+3.0 Services offered
+
+ Post authentication, a remote client should treat ooni-proxy as a SOCKS4A[1]
+ proxy. It should be possible to chain as many Proxyooni proxies as desired.
+
+3.1 Additional services offered
+
+ ooni-proxy should allow for the sending of raw socket data - this is currently
+ left unspecified. This should be specified in the next revision of the
+ specification.
+
+3.2 Advanced meta-services
+
+ It may be desired to load code on the ooni-proxy from a client with newer
+ tests. This should be specified in the next revision of the specification.
+
+4. Security Concerns
+
+ It is probably not a good idea to run ooni-proxy unless you have permission to
+ do so. Consider your network context carefully; if it is dangerous to run a test
+ ensure that you do not run the test.
+
+[0] http://en.wikipedia.org/wiki/TLS-PSK
+[1] http://en.wikipedia.org/wiki/SOCKS#SOCKS_4a
+
diff --git a/to-be-ported/very-old/TODO.plgoons b/to-be-ported/very-old/TODO.plgoons
new file mode 100644
index 0000000..ace2a10
--- /dev/null
+++ b/to-be-ported/very-old/TODO.plgoons
@@ -0,0 +1,79 @@
+We should implement the following as plugoons:
+
+dns_plgoo.py - Various DNS checks
+
+As a start - we should perform a known good check against a name or list of
+names.  As input, we should take an ip address, a name or a list of names for
+testing; we also take dns servers for experiment or control data. For output we
+emit UDP or TCP packets - we should support proxying these requests when
+possible as is the case with TCP but probably not with UDP for certain DNS
+request types.
+
+http_plgoo.py - Various HTTP checks
+
+We should compare two pages and see if we have identical properties.
+At the very least, we should print the important differences - perhaps
+with a diff like output? We should look for fingerprints in URLS that are
+returned. We should detect 302 re-direction.
+
+As input, we should take an ip address, a name or a list of names for testing;
+we also take a list of headers such as random user agent strings and so on.
+We should emit TCP packets and ensure that we do not leak DNS for connections
+that we expect to proxy to a remote network.
+
+latency_plgoo.py - Measure latency for a host or a list of hosts
+
+As input, we should take an ip address, a name or a list of names for testing;
+We should measure the mean latency from the ooni-probe to the host with various
+traceroute tests. We should also measure the latency between the ooni-probe and
+a given server for any other protocol that is request and response oriented;
+HTTP latency may be calculated by simply tracking the delta between requests
+and responses.
+
+tcptrace_plgoo.py udptrace_plgoo.py icmptrace_plgoo.py - Traceroute suites
+
+tcptrace_plgoo.py should allow for both stray and in-connection traceroute
+modes.
+
+udptrace_plgoo.py should use UDP 53 by default; 0 and 123 are also nice options
+- it may also be nice to simply make a random A record request in a DNS packet
+and use it as the payload for a UDP traceroute.
+
+reversetrace_plgoo.py should give a remote host the client's IP and return the
+output of a traceroute to that IP from the remote host. It will need a remote
+component if run against a web server. It would not need a remote component if
+run against route-views - we can simply telnet over Tor and ask it to trace to
+our detected client IP.
+
+keyword_plgoo.py should take a keyword or a list of keywords for use as a
+payload in a varity of protocols. This should be protocol aware - dns keyword
+filtering requires a sniffer to catch stray packets after the censor wins the
+race. HTTP payloads in open connections may be similar and in practice, we'll
+have to find tune it.
+
+icsi_plgoo.py - The ICSI Netalyzr tests; we should act as a client for their
+servers. They have dozens of tests and to implement this plgoo, we'll need to
+add many things to ooni. More details here:
+http://netalyzr.icsi.berkeley.edu/faq.html
+http://netalyzr.icsi.berkeley.edu/json/id=example-session
+
+HTML output:
+http://n2.netalyzr.icsi.berkeley.edu/summary/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+JSON output:
+http://n2.netalyzr.icsi.berkeley.edu/json/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+
+Netalyzer log:
+http://netalyzr.icsi.berkeley.edu/restore/id=43ca208a-3466-82f17207-9bc1-433f-9b43
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=client
+http://n2.netalyzr.icsi.berkeley.edu/transcript/id=43ca208a-3466-82f17207-9bc1-433f-9b43/side=server
+
+sniffer_plgoo.py - We need a generic method for capturing packets during a full
+run - this may be better as a core ooni-probe feature but we should implement
+packet capture in a plugin if it is done no where else.
+
+nmap_plgoo.py - We should take a list of hosts and run nmap against each of
+these hosts; many hosts are collected during testing and they should be scanned
+with something reasonable like "-A -O -T4 -sT --top-ports=10000" or something
+more reasonable.
+
diff --git a/to-be-ported/very-old/TO_BE_PORTED b/to-be-ported/very-old/TO_BE_PORTED
new file mode 100644
index 0000000..49ce5e0
--- /dev/null
+++ b/to-be-ported/very-old/TO_BE_PORTED
@@ -0,0 +1,14 @@
+
+The tests in this directory are very old, and have neither been ported to
+Twisted, nor to the new twisted.trial API framework. Although, they are not
+old in the sense of the *seriously old* OONI code which was written two years
+ago.
+
+These tests should be updated at least to use Twisted.
+
+If you want to hack on something care free, feel free to mess with these files
+because it would be difficult to not improve on them.
+
+<(A)3
+isis
+0x2cdb8b35
diff --git a/to-be-ported/very-old/ooni-probe.diff b/to-be-ported/very-old/ooni-probe.diff
new file mode 100644
index 0000000..fc61d3f
--- /dev/null
+++ b/to-be-ported/very-old/ooni-probe.diff
@@ -0,0 +1,358 @@
+diff --git a/TODO b/TODO
+index c2e19af..51fa559 100644
+--- a/TODO
++++ b/TODO
+@@ -293,3 +293,142 @@ VIA Rail MITM's SSL In Ottawa:
+ Jul 22 17:47:21.983 [Warning] Problem bootstrapping. Stuck at 85%: Finishing handshake with first hop. (DONE; DONE; count 13; recommendation warn)
+ 
+ http://wireless.colubris.com:81/goform/HtmlLoginRequest?username=al1852&password=al1852
++
++VIA Rail Via header:
++
++HTTP/1.0 301 Moved Permanently
++Location: http://www.google.com/
++Content-Type: text/html; charset=UTF-8
++Date: Sat, 23 Jul 2011 02:21:30 GMT
++Expires: Mon, 22 Aug 2011 02:21:30 GMT
++Cache-Control: public, max-age=2592000
++Server: gws
++Content-Length: 219
++X-XSS-Protection: 1; mode=block
++X-Cache: MISS from cache_server
++X-Cache-Lookup: MISS from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Connection: close
++
++<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
++<TITLE>301 Moved</TITLE></HEAD><BODY>
++<H1>301 Moved</H1>
++The document has moved
++<A HREF="http://www.google.com/">here</A>.
++</BODY></HTML>
++
++
++blocked site:
++
++HTTP/1.0 302 Moved Temporarily
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:17 GMT
++Content-Length: 0
++Location: http://10.66.66.66/denied.html
++
++invalid request response:
++
++$ nc 8.8.8.8 80
++hjdashjkdsahjkdsa
++HTTP/1.0 400 Bad Request
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:22:44 GMT
++Content-Type: text/html
++Content-Length: 1178
++Expires: Sat, 23 Jul 2011 02:22:44 GMT
++X-Squid-Error: ERR_INVALID_REQ 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to process the request:
++<PRE>
++hjdashjkdsahjkdsa
++
++</PRE>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Invalid Request
++</STRONG>
++</UL>
++
++<P>
++Some aspect of the HTTP Request is invalid.  Possible problems:
++<UL>
++<LI>Missing or unknown request method
++<LI>Missing URL
++<LI>Missing HTTP Identifier (HTTP/1.0)
++<LI>Request is too large
++<LI>Content-Length missing for POST or PUT requests
++<LI>Illegal character in hostname; underscores are not allowed
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:22:44 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++nc 10.66.66.66 80
++GET cache_object://localhost/info HTTP/1.0
++HTTP/1.0 403 Forbidden
++Server: squid/2.6.STABLE21
++Date: Sat, 23 Jul 2011 02:25:56 GMT
++Content-Type: text/html
++Content-Length: 1061
++Expires: Sat, 23 Jul 2011 02:25:56 GMT
++X-Squid-Error: ERR_ACCESS_DENIED 0
++X-Cache: MISS from cache_server
++X-Cache-Lookup: NONE from cache_server:3128
++Via: 1.0 cache_server:3128 (squid/2.6.STABLE21)
++Proxy-Connection: close
++
++<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
++<HTML><HEAD><META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
++<TITLE>ERROR: The requested URL could not be retrieved</TITLE>
++<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}PRE{font-family:sans-serif}--></STYLE>
++</HEAD><BODY>
++<H1>ERROR</H1>
++<H2>The requested URL could not be retrieved</H2>
++<HR noshade size="1px">
++<P>
++While trying to retrieve the URL:
++<A HREF="cache_object://localhost/info">cache_object://localhost/info</A>
++<P>
++The following error was encountered:
++<UL>
++<LI>
++<STRONG>
++Access Denied.
++</STRONG>
++<P>
++Access control configuration prevents your request from
++being allowed at this time.  Please contact your service provider if
++you feel this is incorrect.
++</UL>
++<P>Your cache administrator is <A HREF="mailto:root">root</A>. 
++
++
++<BR clear="all">
++<HR noshade size="1px">
++<ADDRESS>
++Generated Sat, 23 Jul 2011 02:25:56 GMT by cache_server (squid/2.6.STABLE21)
++</ADDRESS>
++</BODY></HTML>
++
++
+diff --git a/ooni/command.py b/ooni/command.py
+index 361190f..df1a58c 100644
+--- a/ooni/command.py
++++ b/ooni/command.py
+@@ -13,6 +13,7 @@ import ooni.captive_portal
+ import ooni.namecheck
+ import ooni.dns_poisoning
+ import ooni.dns_cc_check
++import ooni.transparenthttp
+ 
+ class Command():
+     def __init__(self, args):
+@@ -48,6 +49,15 @@ class Command():
+             help="run captiveportal tests"
+         )
+ 
++        # --transhttp
++        def cb_transhttp(option, opt, value, oparser):
++            self.action = opt[2:]
++        optparser.add_option(
++            "--transhttp",
++            action="callback", callback=cb_transhttp,
++            help="run Transparent HTTP tests"
++        )
++
+         # --dns
+         def cb_dnstests(option, opt, value, oparser):
+             self.action = opt[2:]
+@@ -122,7 +132,7 @@ class Command():
+             if (not self.action):
+                 raise optparse.OptionError(
+                     'is required',
+-                    '--dns | --dnsbulk | --captiveportal | --help | --version'
++                    '--dns | --dnsbulk | --dnscccheck | [ --cc CC ] | --captiveportal | --transhttp | --help | --version'
+                 )
+ 
+         except optparse.OptionError, err:
+@@ -138,6 +148,10 @@ class Command():
+         captive_portal = ooni.captive_portal.CaptivePortal
+         captive_portal(self).main()
+ 
++    def transhttp(self):
++        transparent_http = ooni.transparenthttp.TransparentHTTPProxy
++        transparent_http(self).main()
++
+     def dns(self):
+         dnstests = ooni.namecheck.DNS
+         dnstests(self).main()
+diff --git a/ooni/dns.py b/ooni/dns.py
+index 95da6ef..90d50bd 100644
+--- a/ooni/dns.py
++++ b/ooni/dns.py
+@@ -8,7 +8,7 @@ from socket import gethostbyname
+ import ooni.common
+ 
+ # apt-get install python-dns
+-import DNS
++import dns
+ import random
+ 
+ """ Wrap gethostbyname """
+diff --git a/ooni/http.py b/ooni/http.py
+index 62365bb..bb72001 100644
+--- a/ooni/http.py
++++ b/ooni/http.py
+@@ -7,8 +7,14 @@
+ from socket import gethostbyname
+ import ooni.common
+ import urllib2
++import httplib
++from urlparse import urlparse
++from pprint import pprint
+ import pycurl
++import random
++import string
+ import re
++from BeautifulSoup import BeautifulSoup
+ 
+ # By default, we'll be Torbutton's UA
+ default_ua = { 'User-Agent' : 
+@@ -20,20 +26,8 @@ default_proxy_type = PROXYTYPE_SOCKS5
+ default_proxy_host = "127.0.0.1"
+ default_proxy_port = "9050"
+ 
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
++#class HTTPResponse(object):
++#  def __init__(self):
+ 
+ 
+ """A very basic HTTP fetcher that uses Tor by default and returns a curl
+@@ -51,7 +45,7 @@ def http_proxy_fetch(url, headers, proxy_type=5,
+    http_code = getinfo(pycurl.HTTP_CODE)
+    return response, http_code
+ 
+-"""A very basic HTTP fetcher that returns a urllib3 response object."""
++"""A very basic HTTP fetcher that returns a urllib2 response object."""
+ def http_fetch(url, 
+                headers= default_ua,
+                label="generic HTTP fetch"):
+@@ -136,6 +130,76 @@ def http_header_no_match(experiment_url, control_header, control_result):
+   else:
+     return True
+ 
++def http_request(self, method, url, path=None):
++  """Takes as argument url that is perfectly formed (http://hostname/REQUEST"""
++  purl = urlparse(url)
++  host = purl.netloc
++  conn = httplib.HTTPConnection(host, 80)
++  if path is None:
++    path = purl.path
++  conn.request(method, purl.path)
++  response = conn.getresponse()
++  headers = dict(response.getheaders())
++  self.headers = headers
++  self.data = response.read()
++  return True
++
++def search_headers(self, s_headers, url):
++  if http_request(self, "GET", url):
++    headers = self.headers
++  else:
++    return None
++  result = {}
++  for h in s_headers.items():
++    result[h[0]] = h[0] in headers
++  return result
++
++def http_header_match_dict(experimental_url, dict_header):
++  result = {}
++  url_header = http_get_header_dict(experimental_url)
++
++# XXX for testing
++#  [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]   
++      
++def search_squid_headers(self):
++  url = "http://securityfocus.org/blabla"
++  s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
++  ret = search_headers(self, s_headers, url)
++  for i in ret.items():
++    if i[1] is True:
++      return False
++  return True
++
++def random_bad_request(self):
++  url = "http://securityfocus.org/blabla"
++  r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
++  if http_request(self, r_str, url):
++    return True
++  else:
++    return None
++
++def squid_search_bad_request(self):
++  if random_bad_request(self):
++    s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
++    for i in s_headers.items():
++      if i[0] in self.headers:
++        return False
++    return True
++  else:
++    return None
++
++def squid_cacheobject_request(self):
++  url = "http://securityfocus.org/blabla"
++  if http_request(self, "GET", url, "cache_object://localhost/info"):
++    soup = BeautifulSoup(self.data)
++    if soup.find('strong') and soup.find('strong').string == "Access Denied.":
++      return False
++    else:
++      return True
++  else:
++    return None
++  
++
+ def MSHTTP_CP_Tests(self):
+   experiment_url = "http://www.msftncsi.com/ncsi.txt"
+   expectedResponse = "Microsoft NCSI" # Only this - nothing more
+@@ -186,6 +250,18 @@ def WC3_CP_Tests(self):
+ 
+ # Google ChromeOS fetches this url in guest mode
+ # and they expect the user to authenticate
+-  def googleChromeOSHTTPTest(self):
+-    print "noop"
+-    #url = "http://www.google.com/"
++def googleChromeOSHTTPTest(self):
++  print "noop"
++  #url = "http://www.google.com/"
++
++def SquidHeader_TransparentHTTP_Tests(self):
++  return search_squid_headers(self)
++
++def SquidBadRequest_TransparentHTTP_Tests(self):
++  squid_cacheobject_request(self)
++  return squid_search_bad_request(self)    
++
++def SquidCacheobject_TransparentHTTP_Tests(self):
++  return squid_cacheobject_request(self)
++
++
diff --git a/to-be-ported/very-old/ooni/#namecheck.py# b/to-be-ported/very-old/ooni/#namecheck.py#
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/to-be-ported/very-old/ooni/#namecheck.py#
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+  def __init__(self, args):
+    self.in_ = sys.stdin
+    self.out = sys.stdout
+    self.debug = False
+    self.randomize = args.randomize
+
+  def DNS_Tests(self):
+    print "DNS tampering detection:"
+    filter_name = "_DNS_Tests"
+    tests = [ooni.dnsooni]
+    for test in tests:
+      for function_ptr in dir(test):
+        if function_ptr.endswith(filter_name):
+          filter_result = getattr(test, function_ptr)(self)
+          if filter_result == True:
+            print function_ptr + " thinks the network is clean"
+          elif filter_result == None:
+              print function_ptr + " failed"
+          else:
+            print function_ptr + " thinks the network is dirty"
+
+  def main(self):
+    for function_ptr in dir(self):
+      if function_ptr.endswith("_Tests"):
+        getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+  self.main()
diff --git a/to-be-ported/very-old/ooni/.DS_Store b/to-be-ported/very-old/ooni/.DS_Store
new file mode 100644
index 0000000..f5738a5
Binary files /dev/null and b/to-be-ported/very-old/ooni/.DS_Store differ
diff --git a/to-be-ported/very-old/ooni/__init__.py b/to-be-ported/very-old/ooni/__init__.py
new file mode 100644
index 0000000..8f1b96e
--- /dev/null
+++ b/to-be-ported/very-old/ooni/__init__.py
@@ -0,0 +1,12 @@
+"""\
+This is your package, 'ooni'.
+
+It was provided by the package, `package`.
+
+Please change this documentation, and write this module!
+"""
+
+__version__ = '0.0.1'
+
+# If you run 'make test', this is your failing test.
+# raise Exception("\n\n\tNow it's time to write your 'ooni' module!!!\n\n")
diff --git a/to-be-ported/very-old/ooni/command.py b/to-be-ported/very-old/ooni/command.py
new file mode 100644
index 0000000..e5f8f9f
--- /dev/null
+++ b/to-be-ported/very-old/ooni/command.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8
+"""\
+Command line UI module for ooni-probe - heavily inspired by Ingy döt Net
+"""
+
+import os
+import sys
+import re
+import optparse
+
+# Only include high level ooni tests at this time
+import ooni.captive_portal
+import ooni.namecheck
+import ooni.dns_poisoning
+import ooni.dns_cc_check
+import ooni.transparenthttp
+import ooni.helpers
+import ooni.plugooni
+import ooni.input
+
+class Command():
+    def __init__(self, args):
+        sys.argv = sys.argv[0:1]
+        sys.argv.extend(args)
+        self.startup_options()
+
+    def startup_options(self):
+        self.action = None
+        self.from_ = None
+        self.to = None
+        self.parser = None
+        self.emitter = None
+        self.emit_header = None
+        self.emit_trailer = None
+        self.in_ = sys.stdin
+        self.out = sys.stdout
+        self.debug = False
+        self.randomize = True
+        self.cc = None
+        self.hostname = None
+        self.listfile = None
+        self.listplugooni = False
+        self.plugin_name = "all"
+        self.controlproxy = None # "socks4a://127.0.0.1:9050/"
+        self.experimentproxy = None
+
+        usage = """
+
+  'ooni' is the Open Observatory of Network Interference
+
+        command line usage:  ooni-probe [options]"""
+
+        optparser = optparse.OptionParser(usage=usage)
+
+        # --plugin
+        def cb_plugin(option, opt, value, oparser):
+            self.action = opt[2:]
+            self.plugin_name = str(value)
+        optparser.add_option(
+            "--plugin", type="string",
+            action="callback", callback=cb_plugin,
+            help="run the Plugooni plgoo plugin specified"
+        )
+
+        # --listplugins
+        def cb_list_plugins(option, opt, value, oparser):
+            self.action = opt[2:]
+        optparser.add_option(
+            "--listplugins",
+            action="callback", callback=cb_list_plugins,
+            help="list available Plugooni as plgoos plugin names"
+        )
+
+        # --captiveportal
+        def cb_captiveportal(option, opt, value, oparser):
+            self.action = opt[2:]
+        optparser.add_option(
+            "--captiveportal",
+            action="callback", callback=cb_captiveportal,
+            help="run vendor emulated captiveportal tests"
+        )
+
+        # --transhttp
+        def cb_transhttp(option, opt, value, oparser):
+            self.action = opt[2:]
+        optparser.add_option(
+            "--transhttp",
+            action="callback", callback=cb_transhttp,
+            help="run Transparent HTTP tests"
+        )
+
+        # --dns
+        def cb_dnstests(option, opt, value, oparser):
+            self.action = opt[2:]
+        optparser.add_option(
+            "--dns",
+            action="callback", callback=cb_dnstests,
+            help="run fixed generic dns tests"
+        )
+
+        # --dnsbulk
+        def cb_dnsbulktests(option, opt, value, oparser):
+            self.action = opt[2:]
+        optparser.add_option(
+            "--dnsbulk",
+            action="callback", callback=cb_dnsbulktests,
+            help="run bulk DNS tests in random.shuffle() order"
+        )
+
+        # --dns-cc-check
+        def cb_dnscccheck(option, opt, value, oparser):
+            self.action = opt[2:]
+        optparser.add_option(
+            "--dnscccheck",
+            action="callback", callback=cb_dnscccheck,
+            help="run cc specific bulk DNS tests in random.shuffle() order"
+        )
+
+        # --cc [country code]
+        def cb_cc(option, opt, value, optparser):
+          # XXX: We should check this against a list of supported county codes
+          # and then return the matching value from the list into self.cc
+          self.cc = str(value)
+        optparser.add_option(
+            "--cc", type="string",
+            action="callback", callback=cb_cc,
+            help="set a specific county code -- default is None",
+        )
+
+        # --list [url/hostname/ip list in file]
+        def cb_list(option, opt, value, optparser):
+          self.listfile = os.path.expanduser(value)
+          if not os.path.isfile(self.listfile):
+              print "Wrong file '" + value + "' in --list."
+              sys.exit(1)
+        optparser.add_option(
+            "--list", type="string",
+            action="callback", callback=cb_list,
+            help="file to read from -- default is None",
+        )
+
+        # --url [url/hostname/ip]
+        def cb_host(option, opt, value, optparser):
+          self.hostname = str(value)
+        optparser.add_option(
+            "--url", type="string",
+            action="callback", callback=cb_host,
+            help="set URL/hostname/IP for use in tests -- default is None",
+        )
+
+        # --controlproxy [scheme://host:port]
+        def cb_controlproxy(option, opt, value, optparser):
+          self.controlproxy = str(value)
+        optparser.add_option(
+            "--controlproxy", type="string",
+            action="callback", callback=cb_controlproxy,
+            help="proxy to be used as a control -- default is None",
+        )
+
+        # --experimentproxy [scheme://host:port]
+        def cb_experimentproxy(option, opt, value, optparser):
+          self.experimentproxy = str(value)
+        optparser.add_option(
+            "--experimentproxy", type="string",
+            action="callback", callback=cb_experimentproxy,
+            help="proxy to be used for experiments -- default is None",
+        )
+
+
+
+        # --randomize
+        def cb_randomize(option, opt, value, optparser):
+          self.randomize = bool(int(value))
+        optparser.add_option(
+            "--randomize", type="choice",
+            choices=['0', '1'], metavar="0|1",
+            action="callback", callback=cb_randomize,
+            help="randomize host order -- default is on",
+        )
+
+        # XXX TODO:
+        # pause/resume scans for dns_BULK_DNS_Tests()
+        # setting of control/experiment resolver
+        # setting of control/experiment proxy
+        #
+
+        def cb_version(option, opt, value, oparser):
+            self.action = 'version'
+        optparser.add_option(
+            "-v", "--version",
+            action="callback", callback=cb_version,
+            help="print ooni-probe version"
+        )
+
+        # parse options
+        (opts, args) = optparser.parse_args()
+
+        # validate options
+        try:
+            if (args):
+                raise optparse.OptionError('extra arguments found', args)
+            if (not self.action):
+                raise optparse.OptionError(
+                    'RTFS', 'required arguments missing'
+                )
+
+        except optparse.OptionError, err:
+            sys.stderr.write(str(err) + '\n\n')
+            optparser.print_help()
+            sys.exit(1)
+
+    def version(self):
+        print """
+ooni-probe pre-alpha
+Copyright (c) 2011, Jacob Appelbaum, Arturo Filastò
+See: https://www.torproject.org/ooni/
+
+"""
+
+    def run(self):
+        getattr(self, self.action)()
+
+    def plugin(self):
+        plugin_run = ooni.plugooni.Plugooni
+        plugin_run(self).run(self)
+
+    def listplugins(self):
+        plugin_run = ooni.plugooni.Plugooni
+        plugin_run(self).list_plugoons()
+
+    def captiveportal(self):
+        captive_portal = ooni.captive_portal.CaptivePortal
+        captive_portal(self).main()
+
+    def transhttp(self):
+        transparent_http = ooni.transparenthttp.TransparentHTTPProxy
+        transparent_http(self).main()
+
+    def dns(self):
+        dnstests = ooni.namecheck.DNS
+        dnstests(self).main()
+
+    def dnsbulk(self):
+        dnstests = ooni.dns_poisoning.DNSBulk
+        dnstests(self).main()
+
+    def dnscccheck(self):
+        dnstests = ooni.dns_cc_check.DNSBulk
+        dnstests(self).main()
+
diff --git a/to-be-ported/very-old/ooni/dns_poisoning.py b/to-be-ported/very-old/ooni/dns_poisoning.py
new file mode 100644
index 0000000..939391e
--- /dev/null
+++ b/to-be-ported/very-old/ooni/dns_poisoning.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+class DNSBulk():
+  def __init__(self, args):
+    self.in_ = sys.stdin
+    self.out = sys.stdout
+    self.randomize = args.randomize
+    self.debug = False
+
+  def DNS_Tests(self):
+    print "DNS tampering detection for list of domains:"
+    filter_name = "_DNS_BULK_Tests"
+    tests = [ooni.dnsooni]
+    for test in tests:
+      for function_ptr in dir(test):
+        if function_ptr.endswith(filter_name):
+          filter_result = getattr(test, function_ptr)(self)
+          if filter_result == True:
+            print function_ptr + " thinks the network is clean"
+          elif filter_result == None:
+              print function_ptr + " failed"
+          else:
+            print function_ptr + " thinks the network is dirty"
+  def main(self):
+    for function_ptr in dir(self):
+      if function_ptr.endswith("_Tests"):
+        getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+  self.main()
diff --git a/to-be-ported/very-old/ooni/dnsooni.py b/to-be-ported/very-old/ooni/dnsooni.py
new file mode 100644
index 0000000..bfdfe51
--- /dev/null
+++ b/to-be-ported/very-old/ooni/dnsooni.py
@@ -0,0 +1,356 @@
+#!/usr/bin/env python
+#
+# DNS support for ooni-probe
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#
+
+from socket import gethostbyname
+import ooni.common
+
+# requires python-dns
+# (pydns.sourceforge.net)
+try:
+  import DNS
+# Mac OS X needs this
+except:
+  try:
+    import dns as DNS
+  except:
+    pass                        # Never mind, let's break later.
+import random
+from pprint import pprint
+
+""" Wrap gethostbyname """
+def dns_resolve(hostname):
+  try:
+    resolved_host = gethostbyname(hostname)
+    return resolved_host
+  except:
+    return False
+
+"""Perform a resolution on test_hostname and compare it with the expected
+   control_resolved ip address. Optionally, a label may be set to customize
+   output. If the experiment matches the control, this returns True; otherwise
+   it returns False.
+"""
+def dns_resolve_match(experiment_hostname, control_resolved,
+                       label="generic DNS comparison"):
+  experiment_resolved = dns_resolve(experiment_hostname)
+  if experiment_resolved == False:
+    return None
+  if experiment_resolved:
+    if str(experiment_resolved) != str(control_resolved):
+      print label + " control " + str(control_resolved) + " data does not " \
+            "match experiment response: " + str(experiment_resolved)
+      return False
+    return True
+
+def generic_DNS_resolve(experiment_hostname, experiment_resolver):
+  if experiment_resolver == None:
+    req = DNS.Request(name=experiment_hostname) # local resolver
+  else:
+    req = DNS.Request(name=experiment_hostname, server=experiment_resolver) #overide
+  resolved_data = req.req().answers
+  return resolved_data
+
+""" Return a list of all known censors. """
+def load_list_of_known_censors(known_proxy_file=None):
+  proxyfile = "proxy-lists/ips.txt"
+  known_proxy_file = open(proxyfile, 'r', 1)
+  known_proxy_list = []
+  for known_proxy in known_proxy_file.readlines():
+    known_proxy_list.append(known_proxy)
+  known_proxy_file.close()
+  known_proxy_count = len(known_proxy_list)
+  print "Loading " + str(known_proxy_count) + " known proxies..."
+  return known_proxy_list, known_proxy_count
+
+def load_list_of_test_hosts(hostfile=None):
+  if hostfile == None:
+    hostfile="censorship-lists/norwegian-dns-blacklist.txt"
+  host_list_file = open(hostfile, 'r', 1)
+  host_list = []
+  for host_name in host_list_file.readlines():
+    if host_name.isspace():
+      continue
+    else:
+     host_list.append(host_name)
+  host_list_file.close()
+  host_count = len(host_list)
+  #print "Loading " + str(host_count) + " test host names..."
+  return host_list, host_count
+
+""" Return True with a list of censors if we find a known censor from
+    known_proxy_list in the experiment_data DNS response. Otherwise return
+    False and None. """
+def contains_known_censors(known_proxy_list, experiment_data):
+  match = False
+  proxy_list = []
+  for answer in range(len(experiment_data)):
+    for known_proxy in known_proxy_list:
+      if answer == known_proxy:
+        print "CONFLICT: known proxy discovered: " + str(known_proxy),
+        proxy_list.append(known_proxy)
+        match = True
+  return match, proxy_list
+
+""" Return True and the experiment response that failed to match."""
+def compare_control_with_experiment(known_proxy_list, control_data, experiment_data):
+  known_proxy_found, known_proxies = contains_known_censors(known_proxy_list, experiment_data)
+  conflict_list = []
+  conflict = False
+  if known_proxy_found:
+    print "known proxy discovered: " + str(known_proxies)
+  for answer in range(len(control_data)):
+    if control_data[answer]['data'] == experiment_data:
+      print "control_data[answer]['data'] = " + str(control_data[answer]['data']) + "and experiment_data = " + str(experiment_data)
+      continue
+    else:
+      conflict = True
+      conflict_list.append(experiment_data)
+      #print "CONFLICT: control_data: " + str(control_data) + " experiment_data: " + str(experiment_data),
+  return conflict, conflict_list
+
+def dns_DNS_BULK_Tests(self, hostfile=None,
+                       known_good_resolver="8.8.8.8", test_resolver=None):
+  tampering = False # By default we'll pretend the internet is nice
+  tampering_list = []
+  host_list, host_count = load_list_of_test_hosts()
+  known_proxies, proxy_count = load_list_of_known_censors()
+  check_count = 1
+  if test_resolver == None:
+    DNS.ParseResolvConf() # Set the local resolver as our default
+  if self.randomize:
+    random.shuffle(host_list) # This makes our list non-sequential for now
+  for host_name in host_list:
+    host_name = host_name.strip()
+    print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+    print "Resolving with control resolver..."
+    print "Testing " + host_name + " with control resolver: " + str(known_good_resolver)
+    print "Testing " + host_name + " with experiment resolver: " + str(test_resolver)
+    # XXX TODO - we need to keep track of the status of these requests and then resume them
+    while True:
+      try:
+        control_data = generic_DNS_resolve(host_name, known_good_resolver)
+        break
+      except KeyboardInterrupt:
+        print "bailing out..."
+        exit()
+      except DNS.Base.DNSError:
+        print "control resolver appears to be failing..."
+        continue
+      except:
+        print "Timeout; looping!"
+        continue
+
+    print "Resolving with experiment resolver..."
+    while True:
+      try:
+        experiment_data = generic_DNS_resolve(host_name, test_resolver)
+        break
+      except KeyboardInterrupt:
+        print "bailing out..."
+        exit()
+      except DNS.Base.DNSError:
+        print "experiment resolver appears to be failing..."
+        continue
+      except:
+        print "Timeout; looping!"
+        continue
+
+    print "Comparing control and experiment...",
+    tampering, conflicts = compare_control_with_experiment(known_proxies, control_data, experiment_data)
+    if tampering:
+      tampering_list.append(conflicts)
+      print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+    check_count = check_count + 1
+  host_list.close()
+  return tampering
+
+""" Attempt to resolve random_hostname and return True and None if empty. If an
+    address is returned we return False and the returned address.
+"""
+def dns_response_empty(random_hostname):
+  response = dns_resolve(random_hostname)
+  if response == False:
+    return True, None
+  return False, response
+
+def dns_multi_response_empty(count, size):
+  for i in range(count):
+    randName = ooni.common._randstring(size)
+    response_empty, response_ip = dns_response_empty(randName)
+    if response_empty == True and response_ip == None:
+      responses_are_empty = True
+    else:
+      print label + " " + randName + " found with value " + str(response_ip)
+      responses_are_empty = False
+  return responses_are_empty
+
+""" Attempt to resolve one random host name per tld in tld_list where the
+    hostnames are random strings with a length between min_length and
+    max_length. Return True if list is empty, otherwise return False."""
+def dns_list_empty(tld_list, min_length, max_length,
+                   label="generic DNS list test"):
+  for tld in tld_list:
+    randName = ooni.common._randstring(min_length, max_length) + tld
+    response_empty, response_ip = dns_response_empty(randName)
+  return response_empty
+
+# Known bad test
+# Test for their DNS breakage and their HTTP MITM
+# "Family Shield" is 208.67.222.123 and 208.67.220.123
+# returns 67.215.65.130 for filtered sites like kink.com
+# block.opendns.com is a block page where users are redirected
+# 208.67.216.135 208.67.217.135 are the block pages currently point
+# 67.215.65.132 is returned for NXDOMAINs and a visit with HTTP to that IP
+# results in redirection to http://guide.opendns.com/main?url=sdagsad.com or
+# whatever the HOST header says
+# Amusingly - their Server header is: "OpenDNS Guide"
+""" Return True if we are not being directed as known OpenDNS block pages."""
+def OpenDNS_DNS_Tests(self):
+  return OpenDNS_Censorship_DNS_TESTS(self)
+  return OpenDNS_NXDomain_DNS_TESTS(self)
+
+def OpenDNS_Censorship_DNS_TESTS(self):
+  known_filter = "67.215.65.130"
+  randName = ooni.common._randstring(10)
+  redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS Censorship comparison")
+  if redirected:
+    return False
+  else:
+    return True
+
+def OpenDNS_NXDomain_DNS_TESTS(self):
+  known_filter = "67.215.65.132"
+  randName = ooni.common._randstring(10)
+  redirected = dns_resolve_match(randName, known_filter, label="OpenDNS DNS NXDomain comparison")
+  if redirected:
+    return False
+  else:
+    return True
+
+"""Returns True if the experiment_url returns the well known Italian block page."""
+def cc_DNS_Tests_it(self):
+  tampering = False # By default we'll pretend the internet is nice
+  tampering_list = []
+  conflicts = []
+  known_good_resolver = "8.8.8.8"
+  host_list, host_count = load_list_of_test_hosts("censorship-lists/italy-gamble-blocklist-07-22-11.txt")
+  known_http_block_pages, known_block_count = load_list_of_test_hosts("proxy-lists/italy-http-ips.txt")
+  known_censoring_resolvers, censoring_resolver_count = load_list_of_test_hosts("proxy-lists/italy-dns-ips.txt")
+
+  check_count = 1
+  DNS.ParseResolvConf()
+  # Set the local resolver as our default
+  if self.randomize:
+    random.shuffle(host_list) # This makes our list non-sequential for now
+  print "We're testing (" + str(host_count) + ") URLs"
+  print "We're looking for (" + str(known_block_count) + ") block pages"
+  print "We're testing against (" + str(censoring_resolver_count) + ") censoring DNS resolvers"
+  for test_resolver in known_censoring_resolvers:
+    test_resolver = test_resolver.strip()
+    for host_name in host_list:
+      host_name = host_name.strip()
+      print "Total progress: " + str(check_count) + " of " + str(host_count) + " hosts to check"
+      print "Testing " + host_name + " with control resolver: " + known_good_resolver
+      print "Testing " + host_name + " with experiment resolver: " + test_resolver
+      while True:
+        try:
+          control_data = generic_DNS_resolve(host_name, known_good_resolver)
+          break
+        except KeyboardInterrupt:
+          print "bailing out..."
+          exit()
+        except DNS.Base.DNSError:
+          print "control resolver appears to be failing..."
+          break
+        except:
+          print "Timeout; looping!"
+          continue
+
+      while True:
+        try:
+          experiment_data = generic_DNS_resolve(host_name, test_resolver)
+          break
+        except KeyboardInterrupt:
+          print "bailing out..."
+          exit()
+        except DNS.Base.DNSError:
+          print "experiment resolver appears to be failing..."
+          continue
+        except:
+          print "Timeout; looping!"
+          continue
+
+      print "Comparing control and experiment...",
+      tampering, conflicts = compare_control_with_experiment(known_http_block_pages, control_data, experiment_data)
+      if tampering:
+        tampering_list.append(conflicts)
+        print "Conflicts with " + str(host_name) + " : " + str(conflicts)
+      check_count = check_count + 1
+
+  host_list.close()
+  return tampering
+
+
+## XXX TODO
+## Code up automatic tests for HTTP page checking in Italy - length + known strings, etc
+
+""" Returns True if the experiment_host returns a well known Australian filter
+    IP address."""
+def Australian_DNS_Censorship(self, known_filtered_host="badhost.com"):
+  # http://www.robtex.com/ip/61.88.88.88.html
+  # http://requests.optus.net.au/dns/
+  known_block_ip = "208.69.183.228" # http://interpol.contentkeeper.com/
+  known_censoring_resolvers = ["61.88.88.88"] # Optus
+  for resolver in known_censoring_resolvers:
+    blocked = generic_DNS_censorship(known_filtered_host, resolver, known_block_page)
+    if blocked:
+      return True
+
+"""Returns True if experiment_hostname as resolved by experiment_resolver
+   resolves to control_data. Returns False if there is no match or None if the
+   attempt fails."""
+def generic_DNS_censorship(self, experiment_hostname, experiment_resolver,
+                           control_data):
+  req = DNS.Request(name=experiment_hostname, server=experiment_resolver)
+  resolved_data = s.req().answers
+  for answer in range(len(resolved_data)):
+    if resolved_data[answer]['data'] == control_data:
+      return True
+  return False
+
+# See dns_launch_wildcard_checks in tor/src/or/dns.c for Tor implementation
+# details
+""" Return True if Tor would consider the network fine; False if it's hostile
+    and has no signs of DNS tampering. """
+def Tor_DNS_Tests(self):
+  response_rfc2606_empty = RFC2606_DNS_Tests(self)
+  tor_tld_list = ["", ".com", ".org", ".net"]
+  response_tor_empty = ooni.dnsooni.dns_list_empty(tor_tld_list, 8, 16, "TorDNSTest")
+  return response_tor_empty | response_rfc2606_empty
+
+""" Return True if RFC2606 would consider the network hostile; False if it's all
+    clear and has no signs of DNS tampering. """
+def RFC2606_DNS_Tests(self):
+  tld_list = [".invalid", ".test"]
+  return ooni.dnsooni.dns_list_empty(tld_list, 4, 18, "RFC2606Test")
+
+""" Return True if googleChromeDNSTest would consider the network OK."""
+def googleChrome_CP_Tests(self):
+    maxGoogleDNSTests = 3
+    GoogleDNSTestSize = 10
+    return ooni.dnsooni.dns_multi_response_empty(maxGoogleDNSTests,
+                                            GoogleDNSTestSize)
+def googleChrome_DNS_Tests(self):
+    return googleChrome_CP_Tests(self)
+
+""" Return True if MSDNSTest would consider the network OK."""
+def MSDNS_CP_Tests(self):
+    experimentHostname = "dns.msftncsi.com"
+    expectedResponse = "131.107.255.255"
+    return ooni.dnsooni.dns_resolve_match(experimentHostname, expectedResponse, "MS DNS")
+
+def MSDNS_DNS_Tests(self):
+    return MSDNS_CP_Tests(self)
diff --git a/to-be-ported/very-old/ooni/helpers.py b/to-be-ported/very-old/ooni/helpers.py
new file mode 100644
index 0000000..514e65f
--- /dev/null
+++ b/to-be-ported/very-old/ooni/helpers.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#    Arturo Filasto' <art at fuffa.org>
+
+import ooni.common
+import pycurl
+import random
+import zipfile
+import os
+from xml.dom import minidom
+try:
+   from BeautifulSoup import BeautifulSoup
+except:
+   pass                        # Never mind, let's break later.
+
+def get_random_url(self):
+   filepath = os.getcwd() + "/test-lists/top-1m.csv.zip"
+   fp = zipfile.ZipFile(filepath, "r")
+   fp.open("top-1m.csv")
+   content = fp.read("top-1m.csv")
+   return "http://" + random.choice(content.split("\n")).split(",")[1]
+
+"""Pick a random header and use that for the request"""
+def get_random_headers(self):
+  filepath = os.getcwd() + "/test-lists/whatheaders.xml"
+  headers = []
+  content = open(filepath, "r").read()
+  soup = BeautifulSoup(content)
+  measurements = soup.findAll('measurement')
+  i = random.randint(0,len(measurements))
+  for vals in measurements[i].findAll('header'):
+    name = vals.find('name').string
+    value = vals.find('value').string
+    if name != "host":
+      headers.append((name, value))
+  return headers
diff --git a/to-be-ported/very-old/ooni/http.py b/to-be-ported/very-old/ooni/http.py
new file mode 100644
index 0000000..59e2abb
--- /dev/null
+++ b/to-be-ported/very-old/ooni/http.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+#
+# HTTP support for ooni-probe
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#    Arturo Filasto' <art at fuffa.org>
+#
+
+from socket import gethostbyname
+import ooni.common
+import ooni.helpers
+import ooni.report
+import urllib2
+import httplib
+from urlparse import urlparse
+from pprint import pprint
+import pycurl
+import random
+import string
+import re
+from pprint import pprint
+try:
+   from BeautifulSoup import BeautifulSoup
+except:
+   pass                        # Never mind, let's break later.
+
+# By default, we'll be Torbutton's UA
+default_ua = { 'User-Agent' :
+               'Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0' }
+
+# Use pycurl to connect over a proxy
+PROXYTYPE_SOCKS5 = 5
+default_proxy_type = PROXYTYPE_SOCKS5
+default_proxy_host = "127.0.0.1"
+default_proxy_port = "9050"
+
+#class HTTPResponse(object):
+#  def __init__(self):
+
+
+"""A very basic HTTP fetcher that uses Tor by default and returns a curl
+   object."""
+def http_proxy_fetch(url, headers, proxy_type=5,
+                     proxy_host="127.0.0.1",
+                     proxy_port=9050):
+   request = pycurl.Curl()
+   request.setopt(pycurl.PROXY, proxy_host)
+   request.setopt(pycurl.PROXYPORT, proxy_port)
+   request.setopt(pycurl.PROXYTYPE, proxy_type)
+   request.setopt(pycurl.HTTPHEADER, ["User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"])
+   request.setopt(pycurl.URL, url)
+   response = request.perform()
+   http_code = getinfo(pycurl.HTTP_CODE)
+   return response, http_code
+
+"""A very basic HTTP fetcher that returns a urllib2 response object."""
+def http_fetch(url,
+               headers= default_ua,
+               label="generic HTTP fetch"):
+   request = urllib2.Request(url, None, headers)
+   response = urllib2.urlopen(request)
+   return response
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+   control_result. Optionally, a label may be set to customize
+   output. If the experiment matches the control, this returns True with the http
+   status code; otherwise it returns False.
+"""
+def http_content_match(experimental_url, control_result,
+                       headers= { 'User-Agent' : default_ua },
+                       label="generic HTTP content comparison"):
+  request = urllib2.Request(experimental_url, None, headers)
+  response = urllib2.urlopen(request)
+  responseContents = response.read()
+  responseCode = response.code
+  if responseContents != False:
+    if str(responseContents) != str(control_result):
+      print label + " control " + str(control_result) + " data does not " \
+            "match experiment response: " + str(responseContents)
+      return False, responseCode
+    return True, responseCode
+  else:
+    print "HTTP connection appears to have failed"
+  return False, False
+
+"""Connect to test_hostname on port 80, request url and compare it with the expected
+   control_result as a regex. Optionally, a label may be set to customize
+   output. If the experiment matches the control, this returns True with the HTTP
+   status code; otherwise it returns False.
+"""
+def http_content_fuzzy_match(experimental_url, control_result,
+                       headers= { 'User-Agent' : default_ua },
+                       label="generic HTTP content comparison"):
+  request = urllib2.Request(experimental_url, None, headers)
+  response = urllib2.urlopen(request)
+  responseContents = response.read()
+  responseCode = response.code
+  pattern = re.compile(control_result)
+  match = pattern.search(responseContents)
+  if responseContents != False:
+    if not match:
+      print label + " control " + str(control_result) + " data does not " \
+            "match experiment response: " + str(responseContents)
+      return False, responseCode
+    return True, responseCode
+  else:
+    print "HTTP connection appears to have failed"
+  return False, False
+
+"""Compare two HTTP status codes as integers and return True if they match."""
+def http_status_code_match(experiment_code, control_code):
+  if int(experiment_code) != int(control_code):
+    return False
+  return True
+
+"""Compare two HTTP status codes as integers and return True if they don't match."""
+def http_status_code_no_match(experiment_code, control_code):
+   if http_status_code_match(experiment_code, control_code):
+     return False
+   return True
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it matches, False if it does not."""
+def http_header_match(experiment_url, control_header, control_result):
+  response = http_fetch(url, label=label)
+  remote_header = response.get_header(control_header)
+  if str(remote_header) == str(control_result):
+    return True
+  else:
+    return False
+
+"""Connect to a URL and compare the control_header/control_result with the data
+served by the remote server. Return True if it does not matche, False if it does."""
+def http_header_no_match(experiment_url, control_header, control_result):
+  match = http_header_match(experiment_url, control_header, control_result)
+  if match:
+    return False
+  else:
+    return True
+
+def send_browser_headers(self, browser, conn):
+  headers = ooni.helpers.get_random_headers(self)
+  for h in headers:
+    conn.putheader(h[0], h[1])
+  conn.endheaders()
+  return True
+
+def http_request(self, method, url, path=None):
+  purl = urlparse(url)
+  host = purl.netloc
+  conn = httplib.HTTPConnection(host, 80)
+  conn.connect()
+  if path is None:
+    path = purl.path
+  conn.putrequest(method, purl.path)
+  send_browser_headers(self, None, conn)
+  response = conn.getresponse()
+  headers = dict(response.getheaders())
+  self.headers = headers
+  self.data = response.read()
+  return True
+
+def search_headers(self, s_headers, url):
+  if http_request(self, "GET", url):
+    headers = self.headers
+  else:
+    return None
+  result = {}
+  for h in s_headers.items():
+    result[h[0]] = h[0] in headers
+  return result
+
+# XXX for testing
+#  [('content-length', '9291'), ('via', '1.0 cache_server:3128 (squid/2.6.STABLE21)'), ('x-cache', 'MISS from cache_server'), ('accept-ranges', 'bytes'), ('server', 'Apache/2.2.16 (Debian)'), ('last-modified', 'Fri, 22 Jul 2011 03:00:31 GMT'), ('connection', 'close'), ('etag', '"105801a-244b-4a89fab1e51c0;49e684ba90c80"'), ('date', 'Sat, 23 Jul 2011 03:03:56 GMT'), ('content-type', 'text/html'), ('x-cache-lookup', 'MISS from cache_server:3128')]
+
+"""Search for squid headers by requesting a random site and checking if the headers have been rewritten (active, not fingerprintable)"""
+def search_squid_headers(self):
+  test_name = "squid header"
+  self.logger.info("RUNNING %s test" % test_name)
+  url = ooni.helpers.get_random_url(self)
+  s_headers = {'via': '1.0 cache_server:3128 (squid/2.6.STABLE21)', 'x-cache': 'MISS from cache_server', 'x-cache-lookup':'MISS from cache_server:3128'}
+  ret = search_headers(self, s_headers, url)
+  for i in ret.items():
+    if i[1] is True:
+      self.logger.info("the %s test returned False" % test_name)
+      return False
+  self.logger.info("the %s test returned True" % test_name)
+  return True
+
+def random_bad_request(self):
+  url = ooni.helpers.get_random_url(self)
+  r_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(5,20)))
+  if http_request(self, r_str, url):
+    return True
+  else:
+    return None
+
+"""Create a request made up of a random string of 5-20 chars (active technique, possibly fingerprintable)"""
+def squid_search_bad_request(self):
+  test_name = "squid bad request"
+  self.logger.info("RUNNING %s test" % test_name)
+  if random_bad_request(self):
+    s_headers = {'X-Squid-Error' : 'ERR_INVALID_REQ 0'}
+    for i in s_headers.items():
+      if i[0] in self.headers:
+        self.logger.info("the %s test returned False" % test_name)
+        return False
+    self.logger.info("the %s test returned True" % test_name)
+    return True
+  else:
+    self.logger.warning("the %s test returned failed" % test_name)
+    return None
+
+"""Try requesting cache_object and expect as output access denied (very active technique, fingerprintable) """
+def squid_cacheobject_request(self):
+  url = ooni.helpers.get_random_url(self)
+  test_name = "squid cacheobject"
+  self.logger.info("RUNNING %s test" % test_name)
+  if http_request(self, "GET", url, "cache_object://localhost/info"):
+    soup = BeautifulSoup(self.data)
+    if soup.find('strong') and soup.find('strong').string == "Access Denied.":
+      self.logger.info("the %s test returned False" % test_name)
+      return False
+    else:
+      self.logger.info("the %s test returned True" % test_name)
+      return True
+  else:
+    self.logger.warning("the %s test failed" % test_name)
+    return None
+
+
+def MSHTTP_CP_Tests(self):
+  test_name = "MS HTTP Captive Portal"
+  self.logger.info("RUNNING %s test" % test_name)
+  experiment_url = "http://www.msftncsi.com/ncsi.txt"
+  expectedResponse = "Microsoft NCSI" # Only this - nothing more
+  expectedResponseCode = "200" # Must be this - nothing else
+  label = "MS HTTP"
+  headers = { 'User-Agent' : 'Microsoft NCSI' }
+  content_match, experiment_code = http_content_match(experiment_url, expectedResponse,
+                         headers, label)
+  status_match = http_status_code_match(expectedResponseCode,
+                        experiment_code)
+  if status_match and content_match:
+    self.logger.info("the %s test returned True" % test_name)
+    return True
+  else:
+    print label + " experiment would conclude that the network is filtered."
+    self.logger.info("the %s test returned False" % test_name)
+    return False
+
+def AppleHTTP_CP_Tests(self):
+  test_name = "Apple HTTP Captive Portal"
+  self.logger.info("RUNNING %s test" % test_name)
+  experiment_url = "http://www.apple.com/library/test/success.html"
+  expectedResponse = "Success" # There is HTML that contains this string
+  expectedResponseCode = "200"
+  label = "Apple HTTP"
+  headers = { 'User-Agent' : 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
+                           'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0'
+                           ' Mobile/1A543a Safari/419.3' }
+  content_match, experiment_code = http_content_fuzzy_match(
+                                   experiment_url, expectedResponse, headers)
+  status_match = http_status_code_match(expectedResponseCode,
+                          experiment_code)
+  if status_match and content_match:
+    self.logger.info("the %s test returned True" % test_name)
+    return True
+  else:
+    print label + " experiment would conclude that the network is filtered."
+    print label + "content match:" + str(content_match) + " status match:" + str(status_match)
+    self.logger.info("the %s test returned False" % test_name)
+    return False
+
+def WC3_CP_Tests(self):
+  test_name = "W3 Captive Portal"
+  self.logger.info("RUNNING %s test" % test_name)
+  url = "http://tools.ietf.org/html/draft-nottingham-http-portal-02"
+  draftResponseCode = "428"
+  label = "WC3 draft-nottingham-http-portal"
+  response = http_fetch(url, label=label)
+  responseCode = response.code
+  if http_status_code_no_match(responseCode, draftResponseCode):
+    self.logger.info("the %s test returned True" % test_name)
+    return True
+  else:
+    print label + " experiment would conclude that the network is filtered."
+    print label + " status match:" + status_match
+    self.logger.info("the %s test returned False" % test_name)
+    return False
+
+# Google ChromeOS fetches this url in guest mode
+# and they expect the user to authenticate
+def googleChromeOSHTTPTest(self):
+  print "noop"
+  #url = "http://www.google.com/"
+
+def SquidHeader_TransparentHTTP_Tests(self):
+  return search_squid_headers(self)
+
+def SquidBadRequest_TransparentHTTP_Tests(self):
+  return squid_search_bad_request(self)
+
+def SquidCacheobject_TransparentHTTP_Tests(self):
+  return squid_cacheobject_request(self)
+
+
diff --git a/to-be-ported/very-old/ooni/input.py b/to-be-ported/very-old/ooni/input.py
new file mode 100644
index 0000000..c32ab48
--- /dev/null
+++ b/to-be-ported/very-old/ooni/input.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+class file:
+    def __init__(self, name=None):
+        if name:
+            self.name = name
+
+    def simple(self, name=None):
+        """ Simple file parsing method:
+        Read a file line by line and output an array with all it's lines, without newlines
+        """
+        if name:
+            self.name = name
+        output = []
+        try:
+            f = open(self.name, "r")
+            for line in f.readlines():
+                output.append(line.strip())
+            return output
+        except:
+            return output
+
+    def csv(self, name=None):
+        if name:
+            self.name = name
+
+    def yaml(self, name):
+        if name:
+            self.name = name
+
+    def consensus(self, name):
+        if name:
+            self.name = name
diff --git a/to-be-ported/very-old/ooni/namecheck.py b/to-be-ported/very-old/ooni/namecheck.py
new file mode 100644
index 0000000..1a2a3f0
--- /dev/null
+++ b/to-be-ported/very-old/ooni/namecheck.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#
+# This module performs multiple DNS tests.
+
+import sys
+import ooni.dnsooni
+
+class DNS():
+  def __init__(self, args):
+    self.in_ = sys.stdin
+    self.out = sys.stdout
+    self.debug = False
+    self.randomize = args.randomize
+
+  def DNS_Tests(self):
+    print "DNS tampering detection:"
+    filter_name = "_DNS_Tests"
+    tests = [ooni.dnsooni]
+    for test in tests:
+      for function_ptr in dir(test):
+        if function_ptr.endswith(filter_name):
+          filter_result = getattr(test, function_ptr)(self)
+          if filter_result == True:
+            print function_ptr + " thinks the network is clean"
+          elif filter_result == None:
+              print function_ptr + " failed"
+          else:
+            print function_ptr + " thinks the network is dirty"
+
+  def main(self):
+    for function_ptr in dir(self):
+      if function_ptr.endswith("_Tests"):
+        getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+  self.main()
diff --git a/to-be-ported/very-old/ooni/plugins/__init__.py b/to-be-ported/very-old/ooni/plugins/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/to-be-ported/very-old/ooni/plugins/dnstest_plgoo.py b/to-be-ported/very-old/ooni/plugins/dnstest_plgoo.py
new file mode 100644
index 0000000..0c0cfa7
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/dnstest_plgoo.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+
+import sys
+import re
+from pprint import pprint
+from twisted.internet import reactor, endpoints
+from twisted.names import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class DNSTestPlugin(Plugoo):
+    def __init__(self):
+        self.name = ""
+        self.type = ""
+        self.paranoia = ""
+        self.modules_to_import = []
+        self.output_dir = ""
+        self.buf = ""
+        self.control_response = []
+
+    def response_split(self, response):
+      a = []
+      b = []
+      for i in response:
+        a.append(i[0])
+        b.append(i[1])
+
+      return a,b
+
+    def cb(self, type, hostname, dns_server, value):
+      if self.control_response is None:
+        self.control_response = []
+      if type == 'control' and self.control_response != value:
+          print "%s %s" % (dns_server, value)
+          self.control_response.append((dns_server,value))
+          pprint(self.control_response)
+      if type == 'experiment':
+        pprint(self.control_response)
+        _, res = self.response_split(self.control_response)
+        if value not in res:
+          print "res (%s) : " % value
+          pprint(res)
+          print "---"
+          print "%s appears to be censored on %s (%s != %s)" % (hostname, dns_server, res[0], value)
+
+        else:
+          print "%s appears to be clean on %s" % (hostname, dns_server)
+        self.r2.servers = [('212.245.158.66',53)]
+      print "HN: %s %s" % (hostname, value)
+
+    def err(self, pck, error):
+      pprint(pck)
+      error.printTraceback()
+      reactor.stop()
+      print "error!"
+      pass
+
+    def ooni_main(self, args):
+        self.experimentalproxy = ''
+        self.test_hostnames = ['dio.it']
+        self.control_dns = [('8.8.8.8',53), ('4.4.4.8',53)]
+        self.experiment_dns = [('85.37.17.9',53),('212.245.158.66',53)]
+
+        self.control_res = []
+        self.control_response = None
+
+        self.r1 = client.Resolver(None, [self.control_dns.pop()])
+        self.r2 = client.Resolver(None, [self.experiment_dns.pop()])
+
+        for hostname in self.test_hostnames:
+          for dns_server in self.control_dns:
+            self.r1.servers = [dns_server]
+            f = self.r1.getHostByName(hostname)
+            pck = (hostname, dns_server)
+            f.addCallback(lambda x: self.cb('control', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+          for dns_server in self.experiment_dns:
+            self.r2.servers = [dns_server]
+            pck = (hostname, dns_server)
+            f = self.r2.getHostByName(hostname)
+            f.addCallback(lambda x: self.cb('experiment', hostname, dns_server, x)).addErrback(lambda x: self.err(pck, x))
+
+        reactor.run()
+
diff --git a/to-be-ported/very-old/ooni/plugins/http_plgoo.py b/to-be-ported/very-old/ooni/plugins/http_plgoo.py
new file mode 100644
index 0000000..021e863
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/http_plgoo.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+
+import sys
+import re
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+    def __init__(self):
+        self.name = ""
+        self.type = ""
+        self.paranoia = ""
+        self.modules_to_import = []
+        self.output_dir = ""
+        self.buf = ''
+
+    def cb(self, type, content):
+        print "got %d bytes from %s" % (len(content), type) # DEBUG
+        if not self.buf:
+            self.buf = content
+        else:
+            if self.buf == content:
+                print "SUCCESS"
+            else:
+                print "FAIL"
+            reactor.stop()
+
+    def endpoint(self, scheme, host, port):
+        ep = None
+        if scheme == 'http':
+            ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+        elif scheme == 'https':
+            ep = endpoints.SSL4ClientEndpoint(reactor, host, port, context)
+        return ep
+
+    def ooni_main(self):
+        # We don't have the Command object so cheating for now.
+        url = 'http://check.torproject.org/'
+        self.controlproxy = 'socks4a://127.0.0.1:9050'
+        self.experimentalproxy = ''
+
+        if not re.match("[a-zA-Z0-9]+\:\/\/[a-zA-Z0-9]+", url):
+          return None
+        scheme, host, port, path = client._parse(url)
+
+        ctrl_dest = self.endpoint(scheme, host, port)
+        if not ctrl_dest:
+            raise Exception('unsupported scheme %s in %s' % (scheme, url))
+        if self.controlproxy:
+            _, proxy_host, proxy_port, _ = client._parse(self.controlproxy)
+            control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+        else:
+            control = ctrl_dest
+        f = client.HTTPClientFactory(url)
+        f.deferred.addCallback(lambda x: self.cb('control', x))
+        control.connect(f)
+
+        exp_dest = self.endpoint(scheme, host, port)
+        if not exp_dest:
+            raise Exception('unsupported scheme %s in %s' % (scheme, url))
+        # FIXME: use the experiment proxy if there is one
+        experiment = exp_dest
+        f = client.HTTPClientFactory(url)
+        f.deferred.addCallback(lambda x: self.cb('experiment', x))
+        experiment.connect(f)
+
+        reactor.run()
+
diff --git a/to-be-ported/very-old/ooni/plugins/marco_plgoo.py b/to-be-ported/very-old/ooni/plugins/marco_plgoo.py
new file mode 100644
index 0000000..cb63df7
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/marco_plgoo.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python
+# Copyright 2009 The Tor Project, Inc.
+# License at end of file.
+#
+# This tests connections to a list of Tor nodes in a given Tor consensus file
+# while also recording the certificates - it's not a perfect tool but complete
+# or even partial failure should raise alarms.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+    from ooni.plugooni import Plugoo
+except:
+    print "Error importing Plugoo"
+
+try:
+    from ooni.common import Storage
+except:
+    print "Error importing Storage"
+
+try:
+    from ooni import output
+except:
+    print "Error importing output"
+
+try:
+    from ooni import input
+except:
+    print "Error importing output"
+
+
+
+ssl = OpenSSL = None
+
+try:
+    import ssl
+except ImportError:
+    pass
+
+if ssl is None:
+    try:
+        import OpenSSL.SSL
+        import OpenSSL.crypto
+    except ImportError:
+        pass
+
+if ssl is None and OpenSSL is None:
+    if socket.ssl:
+        print """Your Python is too old to have the ssl module, and you haven't
+installed pyOpenSSL.  I'll try to work with what you've got, but I can't
+record certificates so well."""
+    else:
+        print """Your Python has no OpenSSL support.  Upgrade to 2.6, install
+pyOpenSSL, or both."""
+        sys.exit(1)
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+HANDSHAKING = "nohandshake"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+def clean_pem_cert(cert):
+    idx = cert.find('-----END')
+    if idx > 1 and cert[idx-1] != '\n':
+        cert = cert.replace('-----END','\n-----END')
+    return cert
+
+def record((addr,port), state, extra=None, cert=None):
+    LOCK.acquire()
+    try:
+        OUT.append({'addr' : addr,
+                         'port' : port,
+                         'state' : state,
+                         'extra' : extra})
+        if cert:
+            CERT_OUT.append({'addr' : addr,
+                                  'port' : port,
+                                  'clean_cert' : clean_pem_cert(cert)})
+    finally:
+        LOCK.release()
+
+def probe(address,theCtx=None):
+    sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    logging.info("Opening socket to %s",address)
+    try:
+        s.connect(address)
+    except IOError, e:
+        logging.info("Error %s from socket connect.",e)
+        record(address, CONNECTING, e)
+        s.close()
+        return
+    logging.info("Socket to %s open.  Launching SSL handshake.",address)
+    if ssl:
+        try:
+            s = ssl.wrap_socket(s,cert_reqs=ssl.CERT_NONE,ca_certs=None)
+            # "MARCO!"
+            s.do_handshake()
+        except IOError, e:
+            logging.info("Error %s from ssl handshake",e)
+            record(address, HANDSHAKING, e)
+            s.close()
+            sock.close()
+            return
+        cert = s.getpeercert(True)
+        if cert != None:
+            cert = ssl.DER_cert_to_PEM_cert(cert)
+    elif OpenSSL:
+        try:
+            s = OpenSSL.SSL.Connection(theCtx, s)
+            s.set_connect_state()
+            s.setblocking(True)
+            s.do_handshake()
+            cert = s.get_peer_certificate()
+            if cert != None:
+                cert = OpenSSL.crypto.dump_certificate(
+                    OpenSSL.crypto.FILETYPE_PEM, cert)
+        except IOError, e:
+            logging.info("Error %s from OpenSSL handshake",e)
+            record(address, HANDSHAKING, e)
+            s.close()
+            sock.close()
+            return
+    else:
+        try:
+            s = socket.ssl(s)
+            s.write('a')
+            cert = s.server()
+        except IOError, e:
+            logging.info("Error %s from socket.ssl handshake",e)
+            record(address, HANDSHAKING, e)
+            sock.close()
+            return
+
+    logging.info("SSL handshake with %s finished",address)
+    # "POLO!"
+    record(address,OK, cert=cert)
+    if (ssl or OpenSSL):
+        s.close()
+    sock.close()
+
+def parseNetworkstatus(ns):
+    for line in ns:
+        if line.startswith('r '):
+            r = line.split()
+            yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+    for line in cd:
+        if line.startswith('router '):
+            r = line.split()
+            yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+    done = False
+    logging.info("Launching thread.")
+
+    if OpenSSL is not None:
+        context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
+    else:
+        context = None
+
+    while True:
+        LOCK.acquire()
+        try:
+            if addrList:
+                print "Starting test %d/%d"%(
+                    1+origLength-len(addrList),origLength)
+                addr = addrList.pop()
+            else:
+                return
+        finally:
+            LOCK.release()
+
+        try:
+            logging.info("Launching probe for %s",addr)
+            probe(addr, context)
+        except Exception, e:
+            logging.info("Unexpected error from %s",addr)
+            record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+    ts = []
+    origLen = len(addrList)
+    for num in xrange(nThreads):
+        t = threading.Thread(target=worker, args=(addrList,origLen))
+        t.setName("Th#%s"%num)
+        ts.append(t)
+        t.start()
+    for t in ts:
+        logging.info("Joining thread %s",t.getName())
+        t.join()
+
+def main(self, args):
+    # BEGIN
+    # This logic should be present in more or less all plugoos
+    global OUT
+    global CERT_OUT
+    global OUT_DATA
+    global CERT_OUT_DATA
+    OUT_DATA = []
+    CERT_OUT_DATA = []
+
+    try:
+        OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+    except:
+        print "No output file given. quitting..."
+        return -1
+
+    try:
+        CERT_OUT = output.data(args.output.certificates) #open(args.output.certificates, 'w')
+    except:
+        print "No output cert file given. quitting..."
+        return -1
+
+    logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+                        datefmt="%b %d %H:%M:%S",
+                        level=logging.INFO,
+                        filename=args.log)
+    logging.info("============== STARTING NEW LOG")
+    # END
+
+    if ssl is not None:
+        methodName = "ssl"
+    elif OpenSSL is not None:
+        methodName = "OpenSSL"
+    else:
+        methodName = "socket"
+    logging.info("Running marco with method '%s'", methodName)
+
+    addresses = []
+
+    if args.input.ips:
+        for fn in input.file(args.input.ips).simple():
+            a, b = fn.split(":")
+            addresses.append( (a,int(b)) )
+
+    elif args.input.consensus:
+        for fn in args:
+            print fn
+            for a,b in parseNetworkstatus(open(args.input.consensus)):
+                addresses.append( (a,b) )
+
+    if args.input.randomize:
+        # Take a random permutation of the set the knuth way!
+        for i in range(0, len(addresses)):
+            j = random.randint(0, i)
+            addresses[i], addresses[j] = addresses[j], addresses[i]
+
+    if len(addresses) == 0:
+        logging.error("No input source given, quiting...")
+        return -1
+
+    addresses = list(addresses)
+
+    if not args.input.randomize:
+        addresses.sort()
+
+    runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+  def __init__(self):
+    self.name = ""
+
+    self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+                     "OpenSSL.SSL", "OpenSSL.crypto", "os" ]
+
+    self.input = Storage()
+    self.input.ip = None
+    try:
+        c_file = os.path.expanduser("~/.tor/cached-consensus")
+        open(c_file)
+        self.input.consensus = c_file
+    except:
+        pass
+
+    try:
+        c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+        open(c_file)
+        self.input.consensus = c_file
+    except:
+        pass
+
+    if not self.input.consensus:
+        print "Error importing consensus file"
+        sys.exit(1)
+
+    self.output = Storage()
+    self.output.main = 'reports/marco-1.yamlooni'
+    self.output.certificates = 'reports/marco_certs-1.out'
+
+    # XXX This needs to be moved to a proper function
+    #     refactor, refactor and ... refactor!
+    if os.path.exists(self.output.main):
+        basedir = "/".join(self.output.main.split("/")[:-1])
+        fn = self.output.main.split("/")[-1].split(".")
+        ext = fn[1]
+        name = fn[0].split("-")[0]
+        i = fn[0].split("-")[1]
+        i = int(i) + 1
+        self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+    if os.path.exists(self.output.certificates):
+        basedir = "/".join(self.output.certificates.split("/")[:-1])
+        fn = self.output.certificates.split("/")[-1].split(".")
+        ext = fn[1]
+        name = fn[0].split("-")[0]
+        i = fn[0].split("-")[1]
+        i = int(i) + 1
+        self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+    # We require for Tor to already be running or have recently run
+    self.args = Storage()
+    self.args.input = self.input
+    self.args.output = self.output
+    self.args.log = 'reports/marco.log'
+
+  def ooni_main(self, cmd):
+    self.args.input.randomize = cmd.randomize
+    self.args.input.ips = cmd.listfile
+    main(self, self.args)
+
+if __name__ == '__main__':
+    if len(sys.argv) < 2:
+        print >> sys.stderr, ("This script takes one or more networkstatus "
+                              "files as an argument.")
+    self = None
+    main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+#     * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/to-be-ported/very-old/ooni/plugins/proxy_plgoo.py b/to-be-ported/very-old/ooni/plugins/proxy_plgoo.py
new file mode 100644
index 0000000..d175c1c
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/proxy_plgoo.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+
+import sys
+from twisted.internet import reactor, endpoints
+from twisted.web import client
+from ooni.plugooni import Plugoo
+from ooni.socksclient import SOCKSv4ClientProtocol, SOCKSWrapper
+
+class HttpPlugin(Plugoo):
+    def __init__(self):
+        self.name = ""
+        self.type = ""
+        self.paranoia = ""
+        self.modules_to_import = []
+        self.output_dir = ""
+        self.buf = ''
+
+    def cb(self, type, content):
+        print "got %d bytes from %s" % (len(content), type) # DEBUG
+        if not self.buf:
+            self.buf = content
+        else:
+            if self.buf == content:
+                print "SUCCESS"
+            else:
+                print "FAIL"
+            reactor.stop()
+
+    def endpoint(self, scheme, host, port):
+        ep = None
+        if scheme == 'http':
+            ep = endpoints.TCP4ClientEndpoint(reactor, host, port)
+        elif scheme == 'https':
+            from twisted.internet import ssl
+            ep = endpoints.SSL4ClientEndpoint(reactor, host, port,
+                                              ssl.ClientContextFactory())
+        return ep
+
+    def ooni_main(self, cmd):
+        # We don't have the Command object so cheating for now.
+        url = cmd.hostname
+
+        # FIXME: validate that url is on the form scheme://host[:port]/path
+        scheme, host, port, path = client._parse(url)
+
+        ctrl_dest = self.endpoint(scheme, host, port)
+        if not ctrl_dest:
+            raise Exception('unsupported scheme %s in %s' % (scheme, url))
+        if cmd.controlproxy:
+            assert scheme != 'https', "no support for proxied https atm, sorry"
+            _, proxy_host, proxy_port, _ = client._parse(cmd.controlproxy)
+            control = SOCKSWrapper(reactor, proxy_host, proxy_port, ctrl_dest)
+            print "proxy: ", proxy_host, proxy_port
+        else:
+            control = ctrl_dest
+        f = client.HTTPClientFactory(url)
+        f.deferred.addCallback(lambda x: self.cb('control', x))
+        control.connect(f)
+
+        exp_dest = self.endpoint(scheme, host, port)
+        if not exp_dest:
+            raise Exception('unsupported scheme %s in %s' % (scheme, url))
+        # FIXME: use the experiment proxy if there is one
+        experiment = exp_dest
+        f = client.HTTPClientFactory(url)
+        f.deferred.addCallback(lambda x: self.cb('experiment', x))
+        experiment.connect(f)
+
+        reactor.run()
diff --git a/to-be-ported/very-old/ooni/plugins/simple_dns_plgoo.py b/to-be-ported/very-old/ooni/plugins/simple_dns_plgoo.py
new file mode 100644
index 0000000..87d3684
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/simple_dns_plgoo.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+#
+# DNS tampering detection module
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#
+# This module performs DNS queries against a known good resolver and a possible
+# bad resolver. We compare every resolved name against a list of known filters
+# - if we match, we ring a bell; otherwise, we list possible filter IP
+# addresses. There is a high false positive rate for sites that are GeoIP load
+# balanced.
+#
+
+import sys
+import ooni.dnsooni
+
+from ooni.plugooni import Plugoo
+
+class DNSBulkPlugin(Plugoo):
+  def __init__(self):
+    self.in_ = sys.stdin
+    self.out = sys.stdout
+    self.randomize = True # Pass this down properly
+    self.debug = False
+
+  def DNS_Tests(self):
+    print "DNS tampering detection for list of domains:"
+    tests = self.get_tests_by_filter(("_DNS_BULK_Tests"), (ooni.dnsooni))
+    self.run_tests(tests)
+
+  def magic_main(self):
+    self.run_plgoo_tests("_Tests")
+
+  def ooni_main(self, args):
+    self.magic_main()
+
diff --git a/to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py b/to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py
new file mode 100644
index 0000000..01dee81
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/tcpcon_plgoo.py
@@ -0,0 +1,278 @@
+#!/usr/bin/python
+# Copyright 2011 The Tor Project, Inc.
+# License at end of file.
+#
+# This is a modified version of the marco plugoo. Given a list of #
+# IP:port addresses, this plugoo will attempt a TCP connection with each
+# host and write the results to a .yamlooni file.
+#
+# This plugoo uses threads and as a result, it's not friendly to SIGINT signals.
+#
+
+import logging
+import socket
+import time
+import random
+import threading
+import sys
+import os
+try:
+    from ooni.plugooni import Plugoo
+except:
+    print "Error importing Plugoo"
+
+try:
+    from ooni.common import Storage
+except:
+    print "Error importing Storage"
+
+try:
+    from ooni import output
+except:
+    print "Error importing output"
+
+try:
+    from ooni import input
+except:
+    print "Error importing output"
+
+################################################################
+
+# How many servers should we test in parallel?
+N_THREADS = 16
+
+# How long do we give individual socket operations to succeed or fail?
+# (Seconds)
+TIMEOUT = 10
+
+################################################################
+
+CONNECTING = "noconnect"
+OK = "ok"
+ERROR = "err"
+
+LOCK = threading.RLock()
+socket.setdefaulttimeout(TIMEOUT)
+
+# We will want to log the IP address, the port and the state
+def record((addr,port), state, extra=None):
+    LOCK.acquire()
+    try:
+        OUT.append({'addr' : addr,
+                    'port' : port,
+                    'state' : state,
+                    'extra' : extra})
+    finally:
+        LOCK.release()
+
+# For each IP address in the list, open a socket, write to the log and
+# then close the socket
+def probe(address,theCtx=None):
+    sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    logging.info("Opening socket to %s",address)
+    try:
+        s.connect(address)
+    except IOError, e:
+        logging.info("Error %s from socket connect.",e)
+        record(address, CONNECTING, e)
+        s.close()
+        return
+    logging.info("Socket to %s open.  Successfully launched TCP handshake.",address)
+    record(address, OK)
+    s.close()
+
+def parseNetworkstatus(ns):
+    for line in ns:
+        if line.startswith('r '):
+            r = line.split()
+            yield (r[-3],int(r[-2]))
+
+def parseCachedDescs(cd):
+    for line in cd:
+        if line.startswith('router '):
+            r = line.split()
+            yield (r[2],int(r[3]))
+
+def worker(addrList, origLength):
+    done = False
+    context = None
+
+    while True:
+        LOCK.acquire()
+        try:
+            if addrList:
+                print "Starting test %d/%d"%(
+                    1+origLength-len(addrList),origLength)
+                addr = addrList.pop()
+            else:
+                return
+        finally:
+            LOCK.release()
+
+        try:
+            logging.info("Launching probe for %s",addr)
+            probe(addr, context)
+        except Exception, e:
+            logging.info("Unexpected error from %s",addr)
+            record(addr, ERROR, e)
+
+def runThreaded(addrList, nThreads):
+    ts = []
+    origLen = len(addrList)
+    for num in xrange(nThreads):
+        t = threading.Thread(target=worker, args=(addrList,origLen))
+        t.setName("Th#%s"%num)
+        ts.append(t)
+        t.start()
+    for t in ts:
+        t.join()
+
+def main(self, args):
+    # BEGIN
+    # This logic should be present in more or less all plugoos
+    global OUT
+    global OUT_DATA
+    OUT_DATA = []
+
+    try:
+        OUT = output.data(name=args.output.main) #open(args.output.main, 'w')
+    except:
+        print "No output file given. quitting..."
+        return -1
+
+    logging.basicConfig(format='%(asctime)s [%(levelname)s] [%(threadName)s] %(message)s',
+                        datefmt="%b %d %H:%M:%S",
+                        level=logging.INFO,
+                        filename=args.log)
+    logging.info("============== STARTING NEW LOG")
+    # END
+
+    methodName = "socket"
+    logging.info("Running tcpcon with method '%s'", methodName)
+
+    addresses = []
+
+    if args.input.ips:
+        for fn in input.file(args.input.ips).simple():
+            a, b = fn.split(":")
+            addresses.append( (a,int(b)) )
+
+    elif args.input.consensus:
+        for fn in args:
+            print fn
+            for a,b in parseNetworkstatus(open(args.input.consensus)):
+                addresses.append( (a,b) )
+
+    if args.input.randomize:
+        # Take a random permutation of the set the knuth way!
+        for i in range(0, len(addresses)):
+            j = random.randint(0, i)
+            addresses[i], addresses[j] = addresses[j], addresses[i]
+
+    if len(addresses) == 0:
+        logging.error("No input source given, quiting...")
+        return -1
+
+    addresses = list(addresses)
+
+    if not args.input.randomize:
+        addresses.sort()
+
+    runThreaded(addresses, N_THREADS)
+
+class MarcoPlugin(Plugoo):
+  def __init__(self):
+    self.name = ""
+
+    self.modules = [ "logging", "socket", "time", "random", "threading", "sys",
+                     "os" ]
+
+    self.input = Storage()
+    self.input.ip = None
+    try:
+        c_file = os.path.expanduser("~/.tor/cached-consensus")
+        open(c_file)
+        self.input.consensus = c_file
+    except:
+        pass
+
+    try:
+        c_file = os.path.expanduser("~/tor/bundle/tor-browser_en-US/Data/Tor/cached-consensus")
+        open(c_file)
+        self.input.consensus = c_file
+    except:
+        pass
+
+    if not self.input.consensus:
+        print "Error importing consensus file"
+        sys.exit(1)
+
+    self.output = Storage()
+    self.output.main = 'reports/tcpcon-1.yamlooni'
+    self.output.certificates = 'reports/tcpcon_certs-1.out'
+
+    # XXX This needs to be moved to a proper function
+    #     refactor, refactor and ... refactor!
+    if os.path.exists(self.output.main):
+        basedir = "/".join(self.output.main.split("/")[:-1])
+        fn = self.output.main.split("/")[-1].split(".")
+        ext = fn[1]
+        name = fn[0].split("-")[0]
+        i = fn[0].split("-")[1]
+        i = int(i) + 1
+        self.output.main = os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+    if os.path.exists(self.output.certificates):
+        basedir = "/".join(self.output.certificates.split("/")[:-1])
+        fn = self.output.certificates.split("/")[-1].split(".")
+        ext = fn[1]
+        name = fn[0].split("-")[0]
+        i = fn[0].split("-")[1]
+        i = int(i) + 1
+        self.output.certificates= os.path.join(basedir, name + "-" + str(i) + "." + ext)
+
+    # We require for Tor to already be running or have recently run
+    self.args = Storage()
+    self.args.input = self.input
+    self.args.output = self.output
+    self.args.log = 'reports/tcpcon.log'
+
+  def ooni_main(self, cmd):
+    self.args.input.randomize = cmd.randomize
+    self.args.input.ips = cmd.listfile
+    main(self, self.args)
+
+if __name__ == '__main__':
+    if len(sys.argv) < 2:
+        print >> sys.stderr, ("This script takes one or more networkstatus "
+                              "files as an argument.")
+    self = None
+    main(self, sys.argv[1:])
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#
+#     * Neither the names of the copyright owners nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/to-be-ported/very-old/ooni/plugins/tor.py b/to-be-ported/very-old/ooni/plugins/tor.py
new file mode 100644
index 0000000..0d95d4d
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/tor.py
@@ -0,0 +1,80 @@
+import re
+import os.path
+import signal
+import subprocess
+import socket
+import threading
+import time
+import logging
+
+from pytorctl import TorCtl
+
+torrc = os.path.join(os.getcwd(),'torrc') #os.path.join(projroot, 'globaleaks', 'tor', 'torrc')
+# hiddenservice = os.path.join(projroot, 'globaleaks', 'tor', 'hiddenservice')
+
+class ThreadProc(threading.Thread):
+    def __init__(self, cmd):
+        threading.Thread.__init__(self)
+        self.cmd = cmd
+        self.proc = None
+
+    def run(self):
+        print "running"
+        try:
+            self.proc = subprocess.Popen(self.cmd,
+                                         shell = False, stdout = subprocess.PIPE,
+                                         stderr = subprocess.PIPE)
+
+        except OSError:
+           logging.fatal('cannot execute command')
+
+class Tor:
+    def __init__(self):
+        self.start()
+
+    def check(self):
+        conn = TorCtl.connect()
+        if conn != None:
+            conn.close()
+            return True
+
+        return False
+
+
+    def start(self):
+        if not os.path.exists(torrc):
+            raise OSError("torrc doesn't exist (%s)" % torrc)
+
+        tor_cmd = ["tor", "-f", torrc]
+
+        torproc = ThreadProc(tor_cmd)
+        torproc.run()
+
+        bootstrap_line = re.compile("Bootstrapped 100%: ")
+
+        while True:
+            if torproc.proc == None:
+                time.sleep(1)
+                continue
+
+            init_line = torproc.proc.stdout.readline().strip()
+
+            if not init_line:
+                torproc.proc.kill()
+                return False
+
+            if bootstrap_line.search(init_line):
+                break
+
+        return True
+
+    def stop(self):
+        if not self.check():
+            return
+
+        conn = TorCtl.connect()
+        if conn != None:
+            conn.send_signal("SHUTDOWN")
+            conn.close()
+
+t = Tor()
diff --git a/to-be-ported/very-old/ooni/plugins/torrc b/to-be-ported/very-old/ooni/plugins/torrc
new file mode 100644
index 0000000..b9ffc80
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugins/torrc
@@ -0,0 +1,9 @@
+SocksPort 9050
+ControlPort 9051
+VirtualAddrNetwork 10.23.47.0/10
+AutomapHostsOnResolve 1
+TransPort 9040
+TransListenAddress 127.0.0.1
+DNSPort 5353
+DNSListenAddress 127.0.0.1
+
diff --git a/to-be-ported/very-old/ooni/plugooni.py b/to-be-ported/very-old/ooni/plugooni.py
new file mode 100644
index 0000000..17f17b3
--- /dev/null
+++ b/to-be-ported/very-old/ooni/plugooni.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+#
+# Plugooni, ooni plugin module for loading plgoo files.
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#    Arturo Filasto' <art at fuffa.org>
+
+import sys
+import os
+
+import imp, pkgutil, inspect
+
+class Plugoo:
+  def __init__(self, name, plugin_type, paranoia, author):
+    self.name = name
+    self.author = author
+    self.type = plugin_type
+    self.paranoia = paranoia
+
+  """
+  Expect a tuple of strings in 'filters' and a tuple of ooni 'plugins'.
+  Return a list of (plugin, function) tuples that match 'filter' in 'plugins'.
+  """
+  def get_tests_by_filter(self, filters, plugins):
+    ret_functions = []
+
+    for plugin in plugins:
+     for function_ptr in dir(plugin):
+       if function_ptr.endswith(filters):
+         ret_functions.append((plugin,function_ptr))
+    return ret_functions
+
+  """
+  Expect a list of (plugin, function) tuples that must be ran, and three strings 'clean'
+  'dirty' and 'failed'.
+  Run the tests and print 'clean','dirty' or 'failed' according to the test result.
+  """
+  def run_tests(self, tests, clean="clean", dirty="dirty", failed="failed"):
+    for test in tests:
+      filter_result = getattr(test[0], test[1])(self)
+      if filter_result == True:
+        print test[1] + ": " + clean
+      elif filter_result == None:
+        print test[1] + ": " + failed
+      else:
+        print test[1] + ": " + dirty
+
+  """
+  Find all the tests belonging to plgoo 'self' and run them.
+  We know the tests when we see them because they end in 'filter'.
+  """
+  def run_plgoo_tests(self, filter):
+    for function_ptr in dir(self):
+      if function_ptr.endswith(filter):
+        getattr(self, function_ptr)()
+
+PLUGIN_PATHS = [os.path.join(os.getcwd(), "ooni", "plugins")]
+RESERVED_NAMES = [ "skel_plgoo" ]
+
+class Plugooni():
+  def __init__(self, args):
+    self.in_ = sys.stdin
+    self.out = sys.stdout
+    self.debug = False
+    self.loadall = True
+    self.plugin_name = args.plugin_name
+    self.listfile = args.listfile
+
+    self.plgoo_found = False
+
+  # Print all the plugoons to stdout.
+  def list_plugoons(self):
+    print "Plugooni list:"
+    for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+      if name not in RESERVED_NAMES:
+        print "\t%s" %(name.split("_")[0])
+
+  # Return name of the plgoo class of a plugin.
+  # We know because it always ends with "Plugin".
+  def get_plgoo_class(self,plugin):
+    for memb_name, memb in inspect.getmembers(plugin, inspect.isclass):
+      if memb.__name__.endswith("Plugin"):
+        return memb
+
+  # This function is responsible for loading and running the plugoons
+  # the user wants to run.
+  def run(self, command_object):
+    print "Plugooni: the ooni plgoo plugin module loader"
+
+    # iterate all modules
+    for loader, name, ispkg in pkgutil.iter_modules(PLUGIN_PATHS):
+      # see if this module should be loaded
+      if (self.plugin_name == "all") or (name == self.plugin_name+"_plgoo"):
+        self.plgoo_found = True # we found at least one plgoo!
+
+        file, pathname, desc = imp.find_module(name, PLUGIN_PATHS)
+        # load module
+        plugin = imp.load_module(name, file, pathname, desc)
+        # instantiate plgoo class and call its ooni_main()
+        self.get_plgoo_class(plugin)().ooni_main(command_object)
+
+    # if we couldn't find the plgoo; whine to the user
+    if self.plgoo_found is False:
+      print "Plugooni could not find plugin '%s'!" %(self.plugin_name)
+
+if __name__ == '__main__':
+  self.main()
diff --git a/to-be-ported/very-old/ooni/transparenthttp.py b/to-be-ported/very-old/ooni/transparenthttp.py
new file mode 100644
index 0000000..311fb32
--- /dev/null
+++ b/to-be-ported/very-old/ooni/transparenthttp.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Captive Portal Detection With Multi-Vendor Emulation
+# by Jacob Appelbaum <jacob at appelbaum.net>
+#
+# This module performs multiple tests that match specific vendor
+# mitm proxies
+
+import sys
+import ooni.http
+import ooni.report
+
+class TransparentHTTPProxy():
+  def __init__(self, args):
+    self.in_ = sys.stdin
+    self.out = sys.stdout
+    self.debug = False
+    self.logger = ooni.report.Log().logger
+
+  def TransparentHTTPProxy_Tests(self):
+    print "Transparent HTTP Proxy:"
+    filter_name = "_TransparentHTTP_Tests"
+    tests = [ooni.http]
+    for test in tests:
+     for function_ptr in dir(test):
+       if function_ptr.endswith(filter_name):
+         filter_result = getattr(test, function_ptr)(self)
+         if filter_result == True:
+           print function_ptr + " thinks the network is clean"
+         elif filter_result == None:
+             print function_ptr + " failed"
+         else:
+           print function_ptr + " thinks the network is dirty"
+
+  def main(self):
+    for function_ptr in dir(self):
+      if function_ptr.endswith("_Tests"):
+        getattr(self, function_ptr)()
+
+if __name__ == '__main__':
+  self.main()
diff --git a/to-be-ported/very-old/traceroute.py b/to-be-ported/very-old/traceroute.py
new file mode 100644
index 0000000..e8252c1
--- /dev/null
+++ b/to-be-ported/very-old/traceroute.py
@@ -0,0 +1,108 @@
+try:
+    from dns import resolver
+except:
+    print "Error: dnspython is not installed (http://www.dnspython.org/)"
+import gevent
+import os
+import plugoo
+
+try:
+    import scapy
+except:
+    print "Error: traceroute plugin requires scapy to be installed (http://www.secdev.org/projects/scapy)"
+
+from plugoo.assets import Asset
+from plugoo.tests import Test
+
+import socket
+
+__plugoo__ = "Traceroute"
+__desc__ = "Performs TTL walking tests"
+
+class TracerouteAsset(Asset):
+    def __init__(self, file=None):
+        self = Asset.__init__(self, file)
+
+
+class Traceroute(Test):
+    """A *very* quick and dirty traceroute implementation, UDP and TCP
+    """
+    def traceroute(self, dst, dst_port=3880, src_port=3000, proto="tcp", max_hops=30):
+        dest_addr = socket.gethostbyname(dst)
+        print "Doing traceroute on %s" % dst
+
+        recv = socket.getprotobyname('icmp')
+        send = socket.getprotobyname(proto)
+        ttl = 1
+        while True:
+            recv_sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, recv)
+            if proto == "tcp":
+                send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, send)
+            else:
+                send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, send)
+            recv_sock.settimeout(10)
+            send_sock.settimeout(10)
+
+            send_sock.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
+            recv_sock.bind(("", src_port))
+            if proto == "tcp":
+                try:
+                    send_sock.settimeout(2)
+                    send_sock.connect((dst, dst_port))
+                except socket.timeout:
+                    pass
+
+                except Exception, e:
+                    print "Error doing connect %s" % e
+            else:
+                send_sock.sendto("", (dst, dst_port))
+
+            curr_addr = None
+            try:
+                print "receiving data..."
+                _, curr_addr = recv_sock.recvfrom(512)
+                curr_addr = curr_addr[0]
+
+            except socket.error, e:
+                print "SOCKET ERROR: %s" % e
+
+            except Exception, e:
+                print "ERROR: %s" % e
+
+            finally:
+                send_sock.close()
+                recv_sock.close()
+
+            if curr_addr is not None:
+                curr_host = "%s" % curr_addr
+            else:
+                curr_host = "*"
+
+            print "%d\t%s" % (ttl, curr_host)
+
+            if curr_addr == dest_addr or ttl > max_hops:
+                break
+
+            ttl += 1
+
+
+    def experiment(self, *a, **kw):
+        # this is just a dirty hack
+        address = kw['data'][0]
+
+        self.traceroute(address)
+
+def run(ooni):
+    """Run the test"""
+    config = ooni.config
+    urls = []
+
+    traceroute_experiment = TracerouteAsset(os.path.join(config.main.assetdir, \
+                                            config.tests.traceroute))
+
+    assets = [traceroute_experiment]
+
+    traceroute = Traceroute(ooni)
+    ooni.logger.info("starting traceroute test")
+    traceroute.run(assets)
+    ooni.logger.info("finished")



More information about the tor-commits mailing list