tor-commits
  Threads by month 
                
            - ----- 2025 -----
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
February 2020
- 22 participants
- 1455 discussions
                    
                        commit 4fa9a1fb149ae3694fd82b628a936c6470c39726
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Sun Feb 9 18:08:43 2020 -0800
    Drop legacy pep8 support
    
    The pep8 module renamed itself to pycodestyle at Guido's request a while ago.
    Stem was compatible with both but this was long enough ago that we can drop
    support for the old module.
---
 stem/util/test_tools.py | 10 ++--------
 1 file changed, 2 insertions(+), 8 deletions(-)
diff --git a/stem/util/test_tools.py b/stem/util/test_tools.py
index 117d273e..5888eb86 100644
--- a/stem/util/test_tools.py
+++ b/stem/util/test_tools.py
@@ -44,7 +44,6 @@ import stem.util.enum
 import stem.util.system
 
 CONFIG = stem.util.conf.config_dict('test', {
-  'pep8.ignore': [],  # TODO: drop with stem 2.x, legacy alias for pycodestyle.ignore
   'pycodestyle.ignore': [],
   'pyflakes.ignore': [],
   'exclude_paths': [],
@@ -345,8 +344,6 @@ def is_pycodestyle_available():
 
   if _module_exists('pycodestyle'):
     import pycodestyle
-  elif _module_exists('pep8'):
-    import pep8 as pycodestyle
   else:
     return False
 
@@ -417,7 +414,7 @@ def stylistic_issues(paths, check_newlines = False, check_exception_keyword = Fa
   ignore_for_file = []
   ignore_all_for_files = []
 
-  for rule in CONFIG['pycodestyle.ignore'] + CONFIG['pep8.ignore']:
+  for rule in CONFIG['pycodestyle.ignore']:
     if '=>' in rule:
       path, rule_entry = rule.split('=>', 1)
 
@@ -441,10 +438,7 @@ def stylistic_issues(paths, check_newlines = False, check_exception_keyword = Fa
     return False
 
   if is_pycodestyle_available():
-    if _module_exists('pep8'):
-      import pep8 as pycodestyle
-    else:
-      import pycodestyle
+    import pycodestyle
 
     class StyleReport(pycodestyle.BaseReport):
       def init_file(self, filename, lines, expected, line_offset):
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    
                        
                            
                                
                            
                            [stem/master] Drop special handling for detached signature @type
                        
                        
by atagar@torproject.org 10 Feb '20
                    by atagar@torproject.org 10 Feb '20
10 Feb '20
                    
                        commit 93a132b1c8ae97440be4831dc3e6173bca2f8720
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Sun Feb 9 18:14:59 2020 -0800
    Drop special handling for detached signature @type
    
    Huh, wonder why this code was still around. Ticket 28615 was closed a long
    while ago...
---
 stem/descriptor/remote.py | 35 ++++++++---------------------------
 1 file changed, 8 insertions(+), 27 deletions(-)
diff --git a/stem/descriptor/remote.py b/stem/descriptor/remote.py
index be633862..0f411a71 100644
--- a/stem/descriptor/remote.py
+++ b/stem/descriptor/remote.py
@@ -111,14 +111,6 @@ MAX_MICRODESCRIPTOR_HASHES = 90
 
 SINGLETON_DOWNLOADER = None
 
-# Detached signatures do *not* have a specified type annotation. But our
-# parsers expect that all descriptors have a type. As such making one up.
-# This may change in the future if these ever get an official @type.
-#
-#   https://trac.torproject.org/projects/tor/ticket/28615
-
-DETACHED_SIGNATURE_TYPE = 'detached-signature'
-
 # Some authorities intentionally break their DirPort to discourage DOS. In
 # particular they throttle the rate to such a degree that requests can take
 # hours to complete. Unfortunately Python's socket timeouts only kick in
@@ -496,24 +488,13 @@ class Query(object):
           raise ValueError('BUG: _download_descriptors() finished without either results or an error')
 
         try:
-          # TODO: special handling until we have an official detatched
-          # signature @type...
-          #
-          #   https://trac.torproject.org/projects/tor/ticket/28615
-
-          if self.descriptor_type.startswith(DETACHED_SIGNATURE_TYPE):
-            results = stem.descriptor.networkstatus._parse_file_detached_sigs(
-              io.BytesIO(self.content),
-              validate = self.validate,
-            )
-          else:
-            results = stem.descriptor.parse_file(
-              io.BytesIO(self.content),
-              self.descriptor_type,
-              validate = self.validate,
-              document_handler = self.document_handler,
-              **self.kwargs
-            )
+          results = stem.descriptor.parse_file(
+            io.BytesIO(self.content),
+            self.descriptor_type,
+            validate = self.validate,
+            document_handler = self.document_handler,
+            **self.kwargs
+          )
 
           for desc in results:
             yield desc
@@ -1082,7 +1063,7 @@ def _guess_descriptor_type(resource):
     elif resource.endswith('/consensus-microdesc'):
       return 'network-status-microdesc-consensus-3 1.0'
     elif resource.endswith('/consensus-signatures'):
-      return '%s 1.0' % DETACHED_SIGNATURE_TYPE
+      return 'detached-signature-3 1.0'
     elif stem.util.tor_tools.is_valid_fingerprint(resource.split('/')[-1]):
       return 'network-status-consensus-3 1.0'
     elif resource.endswith('/bandwidth'):
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    10 Feb '20
                    
                        commit c9e64eaebf7905c122a973032c9f7e3d01583bbb
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Sun Feb 9 18:54:12 2020 -0800
    Rename NetworkStatusEvent desc attribute
---
 stem/response/events.py      | 7 ++-----
 test/unit/response/events.py | 2 +-
 2 files changed, 3 insertions(+), 6 deletions(-)
diff --git a/stem/response/events.py b/stem/response/events.py
index 9b8a5504..38708abc 100644
--- a/stem/response/events.py
+++ b/stem/response/events.py
@@ -704,7 +704,7 @@ class NetworkStatusEvent(Event):
 
   The NS event was introduced in tor version 0.1.2.3-alpha.
 
-  :var list desc: :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3` for the changed descriptors
+  :var list descriptors: :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3` for the changed descriptors
   """
 
   _SKIP_PARSING = True
@@ -713,10 +713,7 @@ class NetworkStatusEvent(Event):
   def _parse(self):
     content = str(self).lstrip('NS\n').rstrip('\nOK')
 
-    # TODO: For stem 2.0.0 consider changing 'desc' to 'descriptors' to match
-    # our other events.
-
-    self.desc = list(stem.descriptor.router_status_entry._parse_file(
+    self.descriptors = list(stem.descriptor.router_status_entry._parse_file(
       io.BytesIO(str_tools._to_bytes(content)),
       False,
       entry_class = stem.descriptor.router_status_entry.RouterStatusEntryV3,
diff --git a/test/unit/response/events.py b/test/unit/response/events.py
index ca8b3909..33557874 100644
--- a/test/unit/response/events.py
+++ b/test/unit/response/events.py
@@ -1036,7 +1036,7 @@ class TestEvents(unittest.TestCase):
     event = _get_event(NS_EVENT)
 
     self.assertTrue(isinstance(event, stem.response.events.NetworkStatusEvent))
-    self.assertEqual([expected_desc], event.desc)
+    self.assertEqual([expected_desc], event.descriptors)
 
   def test_orconn_event(self):
     event = _get_event(ORCONN_CLOSED)
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        commit 7ab6310b098955d5b27ea0f4ad7b30d723a820dd
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Sun Feb 9 19:05:08 2020 -0800
    Drop misspelling workaround
---
 stem/util/term.py | 6 ------
 1 file changed, 6 deletions(-)
diff --git a/stem/util/term.py b/stem/util/term.py
index 5554bf6c..06391441 100644
--- a/stem/util/term.py
+++ b/stem/util/term.py
@@ -87,12 +87,6 @@ def encoding(*attrs):
   term_encodings = []
 
   for attr in attrs:
-    # TODO: Account for an earlier misspelled attribute. This should be dropped
-    # in Stem. 2.0.x.
-
-    if attr == 'HILIGHT':
-      attr = 'HIGHLIGHT'
-
     attr = stem.util.str_tools._to_camel_case(attr)
     term_encoding = FG_ENCODING.get(attr, None)
     term_encoding = BG_ENCODING.get(attr, term_encoding)
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    10 Feb '20
                    
                        commit a8296d6071a253b4f2265b651b6d5b5c455278e7
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Sun Feb 9 18:58:59 2020 -0800
    Enforce read-only usage of manual database
    
    Now that we're using python 3.x our sqlite module's uri mode can allow us to
    open our cache in a read-only manner...
    
      https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
---
 stem/manual.py | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)
diff --git a/stem/manual.py b/stem/manual.py
index c1b4bd8f..1a0816c8 100644
--- a/stem/manual.py
+++ b/stem/manual.py
@@ -142,16 +142,11 @@ def query(query, *param):
   # The only reason to explicitly close the sqlite connection is to ensure
   # transactions are committed. Since we're only using read-only access this
   # doesn't matter, and can allow interpreter shutdown to do the needful.
-  #
-  # TODO: When we only support python 3.4+ we can use sqlite's uri argument
-  # to enforce a read-only connection...
-  #
-  #   https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
 
   global DATABASE
 
   if DATABASE is None:
-    DATABASE = sqlite3.connect(CACHE_PATH)
+    DATABASE = sqlite3.connect('file:%s?mode=ro' % CACHE_PATH, uri=True)
 
   return DATABASE.execute(query, param)
 
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        commit 0eea9842da850c2b95d74bf2bb9714ad8bb4d382
Merge: 886ec10e 7ab6310b
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Sun Feb 9 19:09:06 2020 -0800
    Remove deprecated code
    
    Doubtless there will be more opportunities to simplify, but for now cleaning
    out the low hanging fruit. Dropping Sphinx documented deprecations and simple
    TODO comments.
 docs/api.rst                                       |  10 +-
 docs/api/descriptor/export.rst                     |   5 -
 docs/api/descriptor/reader.rst                     |   5 -
 docs/contents.rst                                  |   5 +-
 stem/connection.py                                 |  59 --
 stem/control.py                                    | 296 ++--------
 stem/descriptor/__init__.py                        |  37 +-
 stem/descriptor/bandwidth_file.py                  |   5 +-
 stem/descriptor/certificate.py                     |  56 +-
 stem/descriptor/export.py                          | 111 ----
 stem/descriptor/extrainfo_descriptor.py            |  10 +-
 stem/descriptor/hidden_service.py                  |  49 +-
 stem/descriptor/hidden_service_descriptor.py       |   4 -
 stem/descriptor/microdescriptor.py                 |  20 +-
 stem/descriptor/networkstatus.py                   |  58 +-
 stem/descriptor/reader.py                          | 563 -------------------
 stem/descriptor/remote.py                          | 145 +----
 stem/descriptor/router_status_entry.py             |  26 +-
 stem/descriptor/server_descriptor.py               | 123 +---
 stem/directory.py                                  |  12 +-
 stem/exit_policy.py                                |  69 +--
 stem/manual.py                                     |  89 +--
 stem/process.py                                    |  28 +-
 stem/response/events.py                            |  83 +--
 stem/socket.py                                     |  41 --
 stem/util/connection.py                            |  22 +-
 stem/util/log.py                                   |  31 -
 stem/util/proc.py                                  |  17 -
 stem/util/str_tools.py                             |  14 -
 stem/util/system.py                                |  19 -
 stem/util/term.py                                  |   6 -
 stem/util/test_tools.py                            |  20 +-
 stem/version.py                                    |  63 +--
 test/integ/connection/authentication.py            |  19 +-
 test/integ/connection/connect.py                   |  48 +-
 test/integ/control/controller.py                   |  39 +-
 test/integ/process.py                              |  21 +-
 test/integ/response/protocolinfo.py                |   4 +-
 test/integ/socket/control_message.py               |   1 -
 test/integ/util/connection.py                      |   1 -
 test/require.py                                    |   6 +-
 test/settings.cfg                                  |   3 -
 test/unit/control/controller.py                    | 174 ------
 test/unit/descriptor/export.py                     |  94 ----
 test/unit/descriptor/hidden_service_v2.py          |   7 -
 test/unit/descriptor/microdescriptor.py            |   6 -
 .../networkstatus/directory_authority.py           |  10 +-
 test/unit/descriptor/networkstatus/document_v3.py  |   8 +-
 test/unit/descriptor/reader.py                     | 625 ---------------------
 test/unit/descriptor/remote.py                     |  16 +-
 test/unit/descriptor/router_status_entry.py        |   3 +-
 test/unit/descriptor/server_descriptor.py          |  33 --
 test/unit/exit_policy/policy.py                    |  83 +--
 test/unit/response/events.py                       |  49 +-
 test/unit/util/connection.py                       |  34 +-
 test/unit/util/log.py                              |  13 +-
 56 files changed, 239 insertions(+), 3159 deletions(-)
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    10 Feb '20
                    
                        commit 384987d61343abcc01e2129d7aba0b2c6291dda4
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Sun Feb 9 19:02:45 2020 -0800
    Drop legacy manual config cache support
    
    Earlier I replaced our usage of stem.util.conf as a cache with sqlite. Faster,
    cleaner, and clearly the right option. We can now drop the legacy code.
---
 stem/manual.py | 82 ----------------------------------------------------------
 1 file changed, 82 deletions(-)
diff --git a/stem/manual.py b/stem/manual.py
index 1a0816c8..3f385ba0 100644
--- a/stem/manual.py
+++ b/stem/manual.py
@@ -385,18 +385,9 @@ class Manual(object):
         it or the schema is out of date
     """
 
-    # TODO: drop _from_config_cache() with stem 2.x
-
     if path is None:
       path = CACHE_PATH
 
-    if path is not None and path.endswith('.sqlite'):
-      return Manual._from_sqlite_cache(path)
-    else:
-      return Manual._from_config_cache(path)
-
-  @staticmethod
-  def _from_sqlite_cache(path):
     if not stem.prereq.is_sqlite_available():
       raise ImportError('Reading a sqlite cache requires the sqlite3 module')
 
@@ -434,41 +425,6 @@ class Manual(object):
       return manual
 
   @staticmethod
-  def _from_config_cache(path):
-    conf = stem.util.conf.Config()
-    conf.load(path, commenting = False)
-
-    config_options = collections.OrderedDict()
-
-    for key in conf.keys():
-      if key.startswith('config_options.'):
-        key = key.split('.')[1]
-
-        if key not in config_options:
-          config_options[key] = ConfigOption(
-            conf.get('config_options.%s.name' % key, ''),
-            conf.get('config_options.%s.category' % key, ''),
-            conf.get('config_options.%s.usage' % key, ''),
-            conf.get('config_options.%s.summary' % key, ''),
-            conf.get('config_options.%s.description' % key, '')
-          )
-
-    manual = Manual(
-      conf.get('name', ''),
-      conf.get('synopsis', ''),
-      conf.get('description', ''),
-      conf.get('commandline_options', collections.OrderedDict()),
-      conf.get('signals', collections.OrderedDict()),
-      conf.get('files', collections.OrderedDict()),
-      config_options,
-    )
-
-    manual.man_commit = conf.get('man_commit', None)
-    manual.stem_commit = conf.get('stem_commit', None)
-
-    return manual
-
-  @staticmethod
   def from_man(man_path = 'tor'):
     """
     Reads and parses a given man page.
@@ -561,14 +517,6 @@ class Manual(object):
       * **IOError** if unsuccessful
     """
 
-    # TODO: drop _save_as_config() with stem 2.x
-
-    if path.endswith('.sqlite'):
-      return self._save_as_sqlite(path)
-    else:
-      return self._save_as_config(path)
-
-  def _save_as_sqlite(self, path):
     if not stem.prereq.is_sqlite_available():
       raise ImportError('Saving a sqlite cache requires the sqlite3 module')
 
@@ -601,36 +549,6 @@ class Manual(object):
 
     os.rename(tmp_path, path)
 
-  def _save_as_config(self, path):
-    conf = stem.util.conf.Config()
-    conf.set('name', self.name)
-    conf.set('synopsis', self.synopsis)
-    conf.set('description', self.description)
-
-    if self.man_commit:
-      conf.set('man_commit', self.man_commit)
-
-    if self.stem_commit:
-      conf.set('stem_commit', self.stem_commit)
-
-    for k, v in self.commandline_options.items():
-      conf.set('commandline_options', '%s => %s' % (k, v), overwrite = False)
-
-    for k, v in self.signals.items():
-      conf.set('signals', '%s => %s' % (k, v), overwrite = False)
-
-    for k, v in self.files.items():
-      conf.set('files', '%s => %s' % (k, v), overwrite = False)
-
-    for k, v in self.config_options.items():
-      conf.set('config_options.%s.category' % k, v.category)
-      conf.set('config_options.%s.name' % k, v.name)
-      conf.set('config_options.%s.usage' % k, v.usage)
-      conf.set('config_options.%s.summary' % k, v.summary)
-      conf.set('config_options.%s.description' % k, v.description)
-
-    conf.save(path)
-
   def __hash__(self):
     return stem.util._hash_attr(self, 'name', 'synopsis', 'description', 'commandline_options', 'signals', 'files', 'config_options', cache = True)
 
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        commit 8dddc2ab8473937bb6853e6f91db8b817a36cdd9
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Mon Jan 6 16:06:32 2020 -0800
    Remove get_config_policy
    
    Tor exit policies are well specified, but torrcs accept a looser format
    that includes a 'private' keyword, makes ports optional, etc.
    
    Stem 1.x made a best effort attempt to read these policies via a
    get_config_policy function, but their lack of a formal specification
    make them a pain. As such get_config_policy() has long been deprecated.
    
    Dropping this function, and with it the ability to read torrc exit
    policies. As a result Controllers can no longer call get_exit_policy() when
    'GETINFO exit-policy/full' fails. That said, it's probably better to fully
    rely on tor to get an exit policy rather than continuing to guess at this
    on our side.
---
 stem/control.py                 | 37 +-----------------
 stem/exit_policy.py             | 69 +---------------------------------
 test/unit/control/controller.py | 51 -------------------------
 test/unit/exit_policy/policy.py | 83 +----------------------------------------
 4 files changed, 5 insertions(+), 235 deletions(-)
diff --git a/stem/control.py b/stem/control.py
index 9fda9d34..5501d1c8 100644
--- a/stem/control.py
+++ b/stem/control.py
@@ -1295,41 +1295,8 @@ class Controller(BaseController):
     policy = self._get_cache('exit_policy')
 
     if not policy:
-      try:
-        policy = stem.exit_policy.ExitPolicy(*self.get_info('exit-policy/full').splitlines())
-        self._set_cache({'exit_policy': policy})
-      except stem.OperationFailed:
-        # There's a few situations where 'GETINFO exit-policy/full' will fail,
-        # most commonly...
-        #
-        #   * Error 551: Descriptor still rebuilding - not ready yet
-        #
-        #     Tor hasn't yet finished making our server descriptor. This often
-        #     arises when tor has first started.
-        #
-        #   * Error 552: Not running in server mode
-        #
-        #     We're not configured to be a relay (no ORPort), or haven't yet
-        #     been able to determine our externally facing IP address.
-        #
-        # When these arise best we can do is infer our policy from the torrc.
-        # Skipping caching so we'll retry GETINFO policy resolution next time
-        # we're called.
-
-        rules = []
-
-        if self.get_conf('ExitRelay') == '0':
-          rules.append('reject *:*')
-
-        if self.get_conf('ExitPolicyRejectPrivate') == '1':
-          rules.append('reject private:*')
-
-        for policy_line in self.get_conf('ExitPolicy', multiple = True):
-          rules += policy_line.split(',')
-
-        rules += self.get_info('exit-policy/default').split(',')
-
-        policy = stem.exit_policy.get_config_policy(rules, self.get_info('address', None))
+      policy = stem.exit_policy.ExitPolicy(*self.get_info('exit-policy/full').splitlines())
+      self._set_cache({'exit_policy': policy})
 
     return policy
 
diff --git a/stem/exit_policy.py b/stem/exit_policy.py
index 0d1e7e42..0150e190 100644
--- a/stem/exit_policy.py
+++ b/stem/exit_policy.py
@@ -50,8 +50,6 @@ exiting to a destination is permissible or not. For instance...
     |- is_private - flag indicating if this was expanded from a 'private' keyword
     +- __str__ - string representation for this rule
 
-  get_config_policy - provides the ExitPolicy based on torrc rules
-
 .. data:: AddressType (enum)
 
   Enumerations for IP address types that can be in an exit policy.
@@ -96,69 +94,6 @@ PRIVATE_ADDRESSES = (
 )
 
 
-def get_config_policy(rules, ip_address = None):
-  """
-  Converts an ExitPolicy found in a torrc to a proper exit pattern. This
-  accounts for...
-
-  * ports being optional
-  * the 'private' keyword
-
-  .. deprecated:: 1.7.0
-
-     Tor's torrc parameters lack a formal spec, making it difficult for this
-     method to be reliable. Callers are encouraged to move to
-     :func:`~stem.control.Controller.get_exit_policy` instead.
-
-  :param str,list rules: comma separated rules or list to be converted
-  :param str ip_address: this relay's IP address for the 'private' policy if
-    it's present, this defaults to the local address
-
-  :returns: :class:`~stem.exit_policy.ExitPolicy` reflected by the rules
-
-  :raises: **ValueError** if input isn't a valid tor exit policy
-  """
-
-  if ip_address and not (stem.util.connection.is_valid_ipv4_address(ip_address) or stem.util.connection.is_valid_ipv6_address(ip_address, allow_brackets = True)):
-    raise ValueError("%s isn't a valid IP address" % ip_address)
-  elif ip_address and stem.util.connection.is_valid_ipv6_address(ip_address, allow_brackets = True) and not (ip_address[0] == '[' and ip_address[-1] == ']'):
-    ip_address = '[%s]' % ip_address  # ExitPolicy validation expects IPv6 addresses to be bracketed
-
-  if isinstance(rules, (bytes, str)):
-    rules = rules.split(',')
-
-  result = []
-
-  for rule in rules:
-    rule = rule.strip()
-
-    if not rule:
-      continue
-
-    if not re.search(':[\\d\\-\\*]+$', rule):
-      rule = '%s:*' % rule
-
-    if 'private' in rule:
-      acceptance = rule.split(' ', 1)[0]
-      port = rule.rsplit(':', 1)[1]
-      addresses = list(PRIVATE_ADDRESSES)
-
-      if ip_address:
-        addresses.append(ip_address)
-      else:
-        try:
-          addresses.append(socket.gethostbyname(socket.gethostname()))
-        except:
-          pass  # we might not have a network connection
-
-      for private_addr in addresses:
-        result.append(ExitPolicyRule('%s %s:%s' % (acceptance, private_addr, port)))
-    else:
-      result.append(ExitPolicyRule(rule))
-
-  return ExitPolicy(*result)
-
-
 def _flag_private_rules(rules):
   """
   Determine if part of our policy was expanded from the 'private' keyword. This
@@ -184,9 +119,7 @@ def _flag_private_rules(rules):
     #   * all rules have the same port range
     #   * all rules have the same acceptance (all accept or reject entries)
     #
-    # The last rule is dynamically based on the relay's public address. It may
-    # not be present if get_config_policy() created this policy and we couldn't
-    # resolve our address.
+    # The last rule is dynamically based on the relay's public address.
 
     last_index = start_index + len(PRIVATE_ADDRESSES)
     rule_set = rules[start_index:last_index]
diff --git a/test/unit/control/controller.py b/test/unit/control/controller.py
index 9628c913..34429f49 100644
--- a/test/unit/control/controller.py
+++ b/test/unit/control/controller.py
@@ -196,57 +196,6 @@ class TestControl(unittest.TestCase):
 
   @patch('stem.control.Controller.get_info')
   @patch('stem.control.Controller.get_conf')
-  def test_get_exit_policy_if_not_relaying(self, get_conf_mock, get_info_mock):
-    # If tor lacks an ORPort, resolved extrnal address, hasn't finished making
-    # our server descriptor (ie. tor just started), etc 'GETINFO
-    # exit-policy/full' will fail.
-
-    get_conf_mock.side_effect = lambda param, **kwargs: {
-      'ExitRelay': '1',
-      'ExitPolicyRejectPrivate': '1',
-      'ExitPolicy': ['accept *:80,   accept *:443', 'accept 43.5.5.5,reject *:22'],
-    }[param]
-
-    expected = ExitPolicy(
-      'reject 0.0.0.0/8:*',
-      'reject 169.254.0.0/16:*',
-      'reject 127.0.0.0/8:*',
-      'reject 192.168.0.0/16:*',
-      'reject 10.0.0.0/8:*',
-      'reject 172.16.0.0/12:*',
-      'reject 1.2.3.4:*',
-      'accept *:80',
-      'accept *:443',
-      'accept 43.5.5.5:*',
-      'reject *:22',
-    )
-
-    # Unfortunate it's a bit tricky to have a mock that raises exceptions in
-    # response to some arguments, and returns a response for others. As such
-    # mapping it to the following function.
-
-    exit_policy_exception = None
-
-    def getinfo_response(param, default = None):
-      if param == 'address':
-        return '1.2.3.4'
-      elif param == 'exit-policy/default':
-        return ''
-      elif param == 'exit-policy/full' and exit_policy_exception:
-        raise exit_policy_exception
-      else:
-        raise ValueError("Unmocked request for 'GETINFO %s'" % param)
-
-    get_info_mock.side_effect = getinfo_response
-
-    exit_policy_exception = stem.OperationFailed('552', 'Not running in server mode')
-    self.assertEqual(str(expected), str(self.controller.get_exit_policy()))
-
-    exit_policy_exception = stem.OperationFailed('551', 'Descriptor still rebuilding - not ready yet')
-    self.assertEqual(str(expected), str(self.controller.get_exit_policy()))
-
-  @patch('stem.control.Controller.get_info')
-  @patch('stem.control.Controller.get_conf')
   def test_get_ports(self, get_conf_mock, get_info_mock):
     """
     Exercises the get_ports() and get_listeners() methods.
diff --git a/test/unit/exit_policy/policy.py b/test/unit/exit_policy/policy.py
index 0cb755ae..f1e74aeb 100644
--- a/test/unit/exit_policy/policy.py
+++ b/test/unit/exit_policy/policy.py
@@ -9,7 +9,6 @@ from unittest.mock import Mock, patch
 
 from stem.exit_policy import (
   DEFAULT_POLICY_RULES,
-  get_config_policy,
   ExitPolicy,
   MicroExitPolicy,
   ExitPolicyRule,
@@ -110,15 +109,8 @@ class TestExitPolicy(unittest.TestCase):
     policy = ExitPolicy('reject *:80-65535', 'accept *:1-65533', 'reject *:*')
     self.assertEqual('accept 1-79', policy.summary())
 
-  def test_without_port(self):
-    policy = get_config_policy('accept 216.58.193.78, reject *')
-    self.assertEqual([ExitPolicyRule('accept 216.58.193.78:*'), ExitPolicyRule('reject *:*')], list(policy))
-
-    policy = get_config_policy('reject6 [2a00:1450:4001:081e:0000:0000:0000:200e]')
-    self.assertEqual([ExitPolicyRule('reject [2a00:1450:4001:081e:0000:0000:0000:200e]:*')], list(policy))
-
   def test_non_private_non_default_policy(self):
-    policy = get_config_policy('reject *:80-65535, accept *:1-65533, reject *:*')
+    policy = ExitPolicy('reject *:80-65535', 'accept *:1-65533', 'reject *:*')
 
     for rule in policy:
       self.assertFalse(rule.is_private())
@@ -130,26 +122,6 @@ class TestExitPolicy(unittest.TestCase):
     self.assertEqual(policy, policy.strip_private())
     self.assertEqual(policy, policy.strip_default())
 
-  def test_all_private_policy(self):
-    for port in ('*', '80', '1-1024'):
-      private_policy = get_config_policy('reject private:%s' % port, '12.34.56.78')
-
-      for rule in private_policy:
-        self.assertTrue(rule.is_private())
-
-      self.assertEqual(ExitPolicy(), private_policy.strip_private())
-
-    # though not commonly done, technically private policies can be accept rules too
-
-    private_policy = get_config_policy('accept private:*')
-    self.assertEqual(ExitPolicy(), private_policy.strip_private())
-
-  @patch('socket.gethostname', Mock(side_effect = IOError('no address')))
-  def test_all_private_policy_without_network(self):
-    for rule in get_config_policy('reject private:80, accept *:80'):
-      # all rules except the ending accept are part of the private policy
-      self.assertEqual(str(rule) != 'accept *:80', rule.is_private())
-
   def test_all_default_policy(self):
     policy = ExitPolicy(*DEFAULT_POLICY_RULES)
 
@@ -159,14 +131,6 @@ class TestExitPolicy(unittest.TestCase):
     self.assertTrue(policy.has_default())
     self.assertEqual(ExitPolicy(), policy.strip_default())
 
-  def test_mixed_private_policy(self):
-    policy = get_config_policy('accept *:80, reject private:1-65533, accept *:*')
-
-    for rule in policy:
-      self.assertTrue(rule.is_accept != rule.is_private())  # only reject rules are the private ones
-
-    self.assertEqual(get_config_policy('accept *:80, accept *:*'), policy.strip_private())
-
   def test_mixed_default_policy(self):
     policy = ExitPolicy('accept *:80', 'accept 127.0.0.1:1-65533', *DEFAULT_POLICY_RULES)
 
@@ -174,12 +138,7 @@ class TestExitPolicy(unittest.TestCase):
       # only accept-all and reject rules are the default ones
       self.assertTrue(rule.is_accept != rule.is_default() or (rule.is_accept and rule.is_address_wildcard() and rule.is_port_wildcard()))
 
-    self.assertEqual(get_config_policy('accept *:80, accept 127.0.0.1:1-65533'), policy.strip_default())
-
-  def test_get_config_policy_with_ipv6(self):
-    # ensure our constructor accepts addresses both with and without brackets
-    self.assertTrue(get_config_policy('reject private:80', 'fe80:0000:0000:0000:0202:b3ff:fe1e:8329').is_exiting_allowed())
-    self.assertTrue(get_config_policy('reject private:80', '[fe80:0000:0000:0000:0202:b3ff:fe1e:8329]').is_exiting_allowed())
+    self.assertEqual(ExitPolicy('accept *:80', 'accept 127.0.0.1:1-65533'), policy.strip_default())
 
   def test_str(self):
     # sanity test for our __str__ method
@@ -268,44 +227,6 @@ class TestExitPolicy(unittest.TestCase):
     self.assertFalse(policy.can_exit_to('127.0.0.1', 79))
     self.assertTrue(policy.can_exit_to('127.0.0.1', 80))
 
-  def test_get_config_policy(self):
-    test_inputs = {
-      '': ExitPolicy(),
-      'reject *': ExitPolicy('reject *:*'),
-      'reject *:*': ExitPolicy('reject *:*'),
-      'reject private': ExitPolicy(
-        'reject 0.0.0.0/8:*',
-        'reject 169.254.0.0/16:*',
-        'reject 127.0.0.0/8:*',
-        'reject 192.168.0.0/16:*',
-        'reject 10.0.0.0/8:*',
-        'reject 172.16.0.0/12:*',
-        'reject 12.34.56.78:*',
-      ),
-      'accept *:80, reject *': ExitPolicy(
-        'accept *:80',
-        'reject *:*',
-      ),
-      '  accept *:80,     reject *   ': ExitPolicy(
-        'accept *:80',
-        'reject *:*',
-      ),
-    }
-
-    for test_input, expected in test_inputs.items():
-      self.assertEqual(expected, get_config_policy(test_input, '12.34.56.78'))
-
-    test_inputs = (
-      'blarg',
-      'accept *:*:*',
-      'acceptt *:80',
-      'accept 257.0.0.1:80',
-      'accept *:999999',
-    )
-
-    for test_input in test_inputs:
-      self.assertRaises(ValueError, get_config_policy, test_input)
-
   def test_pickleability(self):
     """
     Checks that we can unpickle ExitPolicy instances.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        commit c1c4e7a288d26a2895838f2ed121786078db42fe
Author: Damian Johnson <atagar(a)torproject.org>
Date:   Mon Jan 6 15:07:47 2020 -0800
    Remove deprecated modules
    
    We're dropping stem.descriptor's reader and export module due to lack of use...
    
      * I wrote stem.descriptor.reader at Karsten's suggestion to read descriptors
        from disk, and track when those on-disk files change. The design seemed to
        be for usage within CollecTor, but never was.
    
        In practice stem.descriptor.from_file() provides a simpler mechanism to
        read descriptors form disk.
    
      * stem.descriptor.export was contributed by a university student in Stem's
        early days. I've never used it nor found anyone else who does.
    
        This module serializes descriptors to a CSV, which is moot since
        descriptors already have a string representation we can read and
        write...
    
          with open('/path/to/descriptor', 'w') as descriptor_file:
            descriptor_file.write(str(my_descriptor))
    
          my_descriptor = stem.descriptor.from_file('/path/to/descriptor', 'server-descriptor 1.0')
---
 docs/api.rst                     |  10 +-
 docs/api/descriptor/export.rst   |   5 -
 docs/api/descriptor/reader.rst   |   5 -
 docs/contents.rst                |   5 +-
 stem/control.py                  |   1 -
 stem/descriptor/__init__.py      |  12 +-
 stem/descriptor/export.py        | 111 -------
 stem/descriptor/reader.py        | 563 -----------------------------------
 test/integ/control/controller.py |   1 -
 test/settings.cfg                |   3 -
 test/unit/descriptor/export.py   |  94 ------
 test/unit/descriptor/reader.py   | 625 ---------------------------------------
 12 files changed, 7 insertions(+), 1428 deletions(-)
diff --git a/docs/api.rst b/docs/api.rst
index cbbf0dd0..58604e90 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -21,11 +21,9 @@ Controller
 Descriptors
 -----------
 
-To read descriptors from disk use :func:`~stem.descriptor.__init__.parse_file` for
-individual files and `stem.descriptor.reader
-<api/descriptor/reader.html>`_ for batches. You can also use
-`stem.descriptor.remote <api/descriptor/remote.html>`_ to download descriptors
-remotely like Tor does.
+To read descriptors from disk use :func:`~stem.descriptor.__init__.parse_file`.
+You can also use `stem.descriptor.remote <api/descriptor/remote.html>`_ to
+download descriptors remotely like Tor does.
 
 * **Classes**
 
@@ -41,10 +39,8 @@ remotely like Tor does.
  * `stem.descriptor.certificate <api/descriptor/certificate.html>`_ - `Ed25519 certificates <https://gitweb.torproject.org/torspec.git/tree/cert-spec.txt>`_.
 
 * `stem.directory <api/directory.html>`_ - Directory authority and fallback directory information.
-* `stem.descriptor.reader <api/descriptor/reader.html>`_ - Reads and parses descriptor files from disk.
 * `stem.descriptor.remote <api/descriptor/remote.html>`_ - Downloads descriptors from directory mirrors and authorities.
 * `stem.descriptor.collector <api/descriptor/collector.html>`_ - Downloads past descriptors from `CollecTor <https://metrics.torproject.org/collector.html>`_.
-* `stem.descriptor.export <api/descriptor/export.html>`_ - Exports descriptors to other formats.
 
 Utilities
 ---------
diff --git a/docs/api/descriptor/export.rst b/docs/api/descriptor/export.rst
deleted file mode 100644
index a39e7773..00000000
--- a/docs/api/descriptor/export.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-Descriptor Exporter
-===================
-
-.. automodule:: stem.descriptor.export
-
diff --git a/docs/api/descriptor/reader.rst b/docs/api/descriptor/reader.rst
deleted file mode 100644
index 89c1a69f..00000000
--- a/docs/api/descriptor/reader.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-Descriptor Reader
-=================
-
-.. automodule:: stem.descriptor.reader
-
diff --git a/docs/contents.rst b/docs/contents.rst
index 87e75220..99ca686b 100644
--- a/docs/contents.rst
+++ b/docs/contents.rst
@@ -46,6 +46,7 @@ Contents
    api/descriptor/certificate
    api/descriptor/collector
    api/descriptor/descriptor
+   api/descriptor/remote
    api/descriptor/server_descriptor
    api/descriptor/extrainfo_descriptor
    api/descriptor/microdescriptor
@@ -54,10 +55,6 @@ Contents
    api/descriptor/hidden_service
    api/descriptor/tordnsel
 
-   api/descriptor/export
-   api/descriptor/reader
-   api/descriptor/remote
-
    api/util/init
    api/util/conf
    api/util/connection
diff --git a/stem/control.py b/stem/control.py
index 4adec330..9fda9d34 100644
--- a/stem/control.py
+++ b/stem/control.py
@@ -258,7 +258,6 @@ import threading
 import time
 
 import stem.descriptor.microdescriptor
-import stem.descriptor.reader
 import stem.descriptor.router_status_entry
 import stem.descriptor.server_descriptor
 import stem.exit_policy
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index fff08910..11fff944 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -110,12 +110,10 @@ __all__ = [
   'bandwidth_file',
   'certificate',
   'collector',
-  'export',
   'extrainfo_descriptor',
   'hidden_service',
   'microdescriptor',
   'networkstatus',
-  'reader',
   'remote',
   'router_status_entry',
   'server_descriptor',
@@ -297,10 +295,6 @@ def parse_file(descriptor_file, descriptor_type = None, validate = False, docume
   * The filename if it matches something from tor's data directory. For
     instance, tor's 'cached-descriptors' contains server descriptors.
 
-  This is a handy function for simple usage, but if you're reading multiple
-  descriptor files you might want to consider the
-  :class:`~stem.descriptor.reader.DescriptorReader`.
-
   Descriptor types include the following, including further minor versions (ie.
   if we support 1.1 then we also support everything from 1.0 and most things
   from 1.2, but not 2.0)...
@@ -940,9 +934,9 @@ class Descriptor(object):
   def get_archive_path(self):
     """
     If this descriptor came from an archive then provides its path within the
-    archive. This is only set if the descriptor came from a
-    :class:`~stem.descriptor.reader.DescriptorReader`, and is **None** if this
-    descriptor didn't come from an archive.
+    archive. This is only set if the descriptor was read by
+    :class:`~stem.descriptor.__init__.parse_file` from an archive, and **None**
+    otherwise.
 
     :returns: **str** with the descriptor's path within the archive
     """
diff --git a/stem/descriptor/export.py b/stem/descriptor/export.py
deleted file mode 100644
index 35835d7c..00000000
--- a/stem/descriptor/export.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright 2012-2020, Damian Johnson and The Tor Project
-# See LICENSE for licensing information
-
-"""
-Toolkit for exporting descriptors to other formats.
-
-**Module Overview:**
-
-::
-
-  export_csv - Exports descriptors to a CSV
-  export_csv_file - Writes exported CSV output to a file
-
-.. deprecated:: 1.7.0
-
-   This module will likely be removed in Stem 2.0 due to lack of usage. If you
-   use this modle please `let me know <https://www.atagar.com/contact/>`_.
-"""
-
-import io
-import csv
-
-import stem.descriptor
-import stem.prereq
-
-
-class _ExportDialect(csv.excel):
-  lineterminator = '\n'
-
-
-def export_csv(descriptors, included_fields = (), excluded_fields = (), header = True):
-  """
-  Provides a newline separated CSV for one or more descriptors. If simply
-  provided with descriptors then the CSV contains all of its attributes,
-  labeled with a header row. Either 'included_fields' or 'excluded_fields' can
-  be used for more granular control over its attributes and the order.
-
-  :param Descriptor,list descriptors: either a
-    :class:`~stem.descriptor.Descriptor` or list of descriptors to be exported
-  :param list included_fields: attributes to include in the csv
-  :param list excluded_fields: attributes to exclude from the csv
-  :param bool header: if **True** then the first line will be a comma separated
-    list of the attribute names
-
-  :returns: **str** of the CSV for the descriptors, one per line
-  :raises: **ValueError** if descriptors contain more than one descriptor type
-  """
-
-  output_buffer = io.StringIO()
-  export_csv_file(output_buffer, descriptors, included_fields, excluded_fields, header)
-  return output_buffer.getvalue()
-
-
-def export_csv_file(output_file, descriptors, included_fields = (), excluded_fields = (), header = True):
-  """
-  Similar to :func:`stem.descriptor.export.export_csv`, except that the CSV is
-  written directly to a file.
-
-  :param file output_file: file to be written to
-  :param Descriptor,list descriptors: either a
-    :class:`~stem.descriptor.Descriptor` or list of descriptors to be exported
-  :param list included_fields: attributes to include in the csv
-  :param list excluded_fields: attributes to exclude from the csv
-  :param bool header: if **True** then the first line will be a comma separated
-    list of the attribute names
-
-  :returns: **str** of the CSV for the descriptors, one per line
-  :raises: **ValueError** if descriptors contain more than one descriptor type
-  """
-
-  if isinstance(descriptors, stem.descriptor.Descriptor):
-    descriptors = (descriptors,)
-
-  if not descriptors:
-    return
-
-  descriptor_type = type(descriptors[0])
-  descriptor_type_label = descriptor_type.__name__
-  included_fields = list(included_fields)
-
-  # If the user didn't specify the fields to include then export everything,
-  # ordered alphabetically. If they did specify fields then make sure that
-  # they exist.
-
-  desc_attr = sorted(vars(descriptors[0]).keys())
-
-  if included_fields:
-    for field in included_fields:
-      if field not in desc_attr:
-        raise ValueError("%s does not have a '%s' attribute, valid fields are: %s" % (descriptor_type_label, field, ', '.join(desc_attr)))
-  else:
-    included_fields = [attr for attr in desc_attr if not attr.startswith('_')]
-
-  for field in excluded_fields:
-    try:
-      included_fields.remove(field)
-    except ValueError:
-      pass
-
-  writer = csv.DictWriter(output_file, included_fields, dialect = _ExportDialect(), extrasaction='ignore')
-
-  if header:
-    writer.writeheader()
-
-  for desc in descriptors:
-    if not isinstance(desc, stem.descriptor.Descriptor):
-      raise ValueError('Unable to export a descriptor CSV since %s is not a descriptor.' % type(desc).__name__)
-    elif descriptor_type != type(desc):
-      raise ValueError('To export a descriptor CSV all of the descriptors must be of the same type. First descriptor was a %s but we later got a %s.' % (descriptor_type_label, type(desc)))
-
-    writer.writerow(vars(desc))
diff --git a/stem/descriptor/reader.py b/stem/descriptor/reader.py
deleted file mode 100644
index e75cdb7e..00000000
--- a/stem/descriptor/reader.py
+++ /dev/null
@@ -1,563 +0,0 @@
-# Copyright 2012-2020, Damian Johnson and The Tor Project
-# See LICENSE for licensing information
-
-"""
-Utilities for reading descriptors from local directories and archives. This is
-mostly done through the :class:`~stem.descriptor.reader.DescriptorReader`
-class, which is an iterator for the descriptor data in a series of
-destinations. For example...
-
-::
-
-  my_descriptors = [
-    '/tmp/server-descriptors-2012-03.tar.bz2',
-    '/tmp/archived_descriptors/',
-  ]
-
-  # prints the contents of all the descriptor files
-  with DescriptorReader(my_descriptors) as reader:
-    for descriptor in reader:
-      print descriptor
-
-This ignores files that cannot be processed due to read errors or unparsable
-content. To be notified of skipped files you can register a listener with
-:func:`~stem.descriptor.reader.DescriptorReader.register_skip_listener`.
-
-The :class:`~stem.descriptor.reader.DescriptorReader` keeps track of the last
-modified timestamps for descriptor files that it has read so it can skip
-unchanged files if run again. This listing of processed files can also be
-persisted and applied to other
-:class:`~stem.descriptor.reader.DescriptorReader` instances. For example, the
-following prints descriptors as they're changed over the course of a minute,
-and picks up where it left off if run again...
-
-::
-
-  reader = DescriptorReader(['/tmp/descriptor_data'])
-
-  try:
-    processed_files = load_processed_files('/tmp/used_descriptors')
-    reader.set_processed_files(processed_files)
-  except: pass # could not load, maybe this is the first run
-
-  start_time = time.time()
-
-  while (time.time() - start_time) < 60:
-    # prints any descriptors that have changed since last checked
-    with reader:
-      for descriptor in reader:
-        print descriptor
-
-    time.sleep(1)
-
-  save_processed_files('/tmp/used_descriptors', reader.get_processed_files())
-
-**Module Overview:**
-
-::
-
-  load_processed_files - Loads a listing of processed files
-  save_processed_files - Saves a listing of processed files
-
-  DescriptorReader - Iterator for descriptor data on the local file system
-    |- get_processed_files - provides the listing of files that we've processed
-    |- set_processed_files - sets our tracking of the files we have processed
-    |- register_read_listener - adds a listener for when files are read
-    |- register_skip_listener - adds a listener that's notified of skipped files
-    |- start - begins reading descriptor data
-    |- stop - stops reading descriptor data
-    |- __enter__ / __exit__ - manages the descriptor reader thread in the context
-    +- __iter__ - iterates over descriptor data in unread files
-
-  FileSkipped - Base exception for a file that was skipped
-    |- AlreadyRead - We've already read a file with this last modified timestamp
-    |- ParsingFailure - Contents can't be parsed as descriptor data
-    |- UnrecognizedType - File extension indicates non-descriptor data
-    +- ReadFailed - Wraps an error that was raised while reading the file
-       +- FileMissing - File does not exist
-
-.. deprecated:: 1.8.0
-
-   This module will likely be removed in Stem 2.0 due to lack of usage. If you
-   use this modle please `let me know <https://www.atagar.com/contact/>`_.
-"""
-
-import mimetypes
-import os
-import queue
-import tarfile
-import threading
-
-import stem.descriptor
-import stem.prereq
-import stem.util
-import stem.util.str_tools
-import stem.util.system
-
-# flag to indicate when the reader thread is out of descriptor files to read
-FINISHED = 'DONE'
-
-
-class FileSkipped(Exception):
-  "Base error when we can't provide descriptor data from a file."
-
-
-class AlreadyRead(FileSkipped):
-  """
-  Already read a file with this 'last modified' timestamp or later.
-
-  :param int last_modified: unix timestamp for when the file was last modified
-  :param int last_modified_when_read: unix timestamp for the modification time
-    when we last read this file
-  """
-
-  def __init__(self, last_modified, last_modified_when_read):
-    super(AlreadyRead, self).__init__('File has already been read since it was last modified. modification time: %s, last read: %s' % (last_modified, last_modified_when_read))
-    self.last_modified = last_modified
-    self.last_modified_when_read = last_modified_when_read
-
-
-class ParsingFailure(FileSkipped):
-  """
-  File contents could not be parsed as descriptor data.
-
-  :param ValueError exception: issue that arose when parsing
-  """
-
-  def __init__(self, parsing_exception):
-    super(ParsingFailure, self).__init__(parsing_exception)
-    self.exception = parsing_exception
-
-
-class UnrecognizedType(FileSkipped):
-  """
-  File doesn't contain descriptor data. This could either be due to its file
-  type or because it doesn't conform to a recognizable descriptor type.
-
-  :param tuple mime_type: the (type, encoding) tuple provided by mimetypes.guess_type()
-  """
-
-  def __init__(self, mime_type):
-    super(UnrecognizedType, self).__init__('Unrecognized mime type: %s (%s)' % mime_type)
-    self.mime_type = mime_type
-
-
-class ReadFailed(FileSkipped):
-  """
-  An IOError occurred while trying to read the file.
-
-  :param IOError exception: issue that arose when reading the file, **None** if
-    this arose due to the file not being present
-  """
-
-  def __init__(self, read_exception):
-    super(ReadFailed, self).__init__(read_exception)
-    self.exception = read_exception
-
-
-class FileMissing(ReadFailed):
-  'File does not exist.'
-
-  def __init__(self):
-    super(FileMissing, self).__init__('File does not exist')
-
-
-def load_processed_files(path):
-  """
-  Loads a dictionary of 'path => last modified timestamp' mappings, as
-  persisted by :func:`~stem.descriptor.reader.save_processed_files`, from a
-  file.
-
-  :param str path: location to load the processed files dictionary from
-
-  :returns: **dict** of 'path (**str**) => last modified unix timestamp
-    (**int**)' mappings
-
-  :raises:
-    * **IOError** if unable to read the file
-    * **TypeError** if unable to parse the file's contents
-  """
-
-  processed_files = {}
-
-  with open(path, 'rb') as input_file:
-    for line in input_file.readlines():
-      line = stem.util.str_tools._to_unicode(line.strip())
-
-      if not line:
-        continue  # skip blank lines
-
-      if ' ' not in line:
-        raise TypeError('Malformed line: %s' % line)
-
-      path, timestamp = line.rsplit(' ', 1)
-
-      if not os.path.isabs(path):
-        raise TypeError("'%s' is not an absolute path" % path)
-      elif not timestamp.isdigit():
-        raise TypeError("'%s' is not an integer timestamp" % timestamp)
-
-      processed_files[path] = int(timestamp)
-
-  return processed_files
-
-
-def save_processed_files(path, processed_files):
-  """
-  Persists a dictionary of 'path => last modified timestamp' mappings (as
-  provided by the DescriptorReader's
-  :func:`~stem.descriptor.reader.DescriptorReader.get_processed_files` method)
-  so that they can be loaded later and applied to another
-  :class:`~stem.descriptor.reader.DescriptorReader`.
-
-  :param str path: location to save the processed files dictionary to
-  :param dict processed_files: 'path => last modified' mappings
-
-  :raises:
-    * **IOError** if unable to write to the file
-    * **TypeError** if processed_files is of the wrong type
-  """
-
-  # makes the parent directory if it doesn't already exist
-
-  try:
-    path_dir = os.path.dirname(path)
-
-    if not os.path.exists(path_dir):
-      os.makedirs(path_dir)
-  except OSError as exc:
-    raise IOError(exc)
-
-  with open(path, 'w') as output_file:
-    for path, timestamp in list(processed_files.items()):
-      if not os.path.isabs(path):
-        raise TypeError('Only absolute paths are acceptable: %s' % path)
-
-      output_file.write('%s %i\n' % (path, timestamp))
-
-
-class DescriptorReader(object):
-  """
-  Iterator for the descriptor data on the local file system. This can process
-  text files, tarball archives (gzip or bzip2), or recurse directories.
-
-  By default this limits the number of descriptors that we'll read ahead before
-  waiting for our caller to fetch some of them. This is included to avoid
-  unbounded memory usage.
-
-  Our persistence_path argument is a convenient method to persist the listing
-  of files we have processed between runs, however it doesn't allow for error
-  handling. If you want that then use the
-  :func:`~stem.descriptor.reader.load_processed_files` and
-  :func:`~stem.descriptor.reader.save_processed_files` functions instead.
-
-  :param str,list target: path or list of paths for files or directories to be read from
-  :param bool validate: checks the validity of the descriptor's content if
-    **True**, skips these checks otherwise
-  :param bool follow_links: determines if we'll follow symlinks when traversing
-    directories
-  :param int buffer_size: descriptors we'll buffer before waiting for some to
-    be read, this is unbounded if zero
-  :param str persistence_path: if set we will load and save processed file
-    listings from this path, errors are ignored
-  :param stem.descriptor.__init__.DocumentHandler document_handler: method in
-    which to parse :class:`~stem.descriptor.networkstatus.NetworkStatusDocument`
-  :param dict kwargs: additional arguments for the descriptor constructor
-  """
-
-  def __init__(self, target, validate = False, follow_links = False, buffer_size = 100, persistence_path = None, document_handler = stem.descriptor.DocumentHandler.ENTRIES, **kwargs):
-    self._targets = [target] if isinstance(target, (bytes, str)) else target
-
-    # expand any relative paths we got
-
-    self._targets = list(map(os.path.abspath, self._targets))
-
-    self._validate = validate
-    self._follow_links = follow_links
-    self._persistence_path = persistence_path
-    self._document_handler = document_handler
-    self._kwargs = kwargs
-    self._read_listeners = []
-    self._skip_listeners = []
-    self._processed_files = {}
-
-    self._reader_thread = None
-    self._reader_thread_lock = threading.RLock()
-
-    self._iter_lock = threading.RLock()
-    self._iter_notice = threading.Event()
-
-    self._is_stopped = threading.Event()
-    self._is_stopped.set()
-
-    # Descriptors that we have read but not yet provided to the caller. A
-    # FINISHED entry is used by the reading thread to indicate the end.
-
-    self._unreturned_descriptors = queue.Queue(buffer_size)
-
-    if self._persistence_path:
-      try:
-        processed_files = load_processed_files(self._persistence_path)
-        self.set_processed_files(processed_files)
-      except:
-        pass
-
-  def get_processed_files(self):
-    """
-    For each file that we have read descriptor data from this provides a
-    mapping of the form...
-
-    ::
-
-      absolute path (str) => last modified unix timestamp (int)
-
-    This includes entries set through the
-    :func:`~stem.descriptor.reader.DescriptorReader.set_processed_files`
-    method. Each run resets this to only the files that were present during
-    that run.
-
-    :returns: **dict** with the absolute paths and unix timestamp for the last
-      modified times of the files we have processed
-    """
-
-    # make sure that we only provide back absolute paths
-    return dict((os.path.abspath(k), v) for (k, v) in list(self._processed_files.items()))
-
-  def set_processed_files(self, processed_files):
-    """
-    Sets the listing of the files we have processed. Most often this is used
-    with a newly created :class:`~stem.descriptor.reader.DescriptorReader` to
-    pre-populate the listing of descriptor files that we have seen.
-
-    :param dict processed_files: mapping of absolute paths (**str**) to unix
-      timestamps for the last modified time (**int**)
-    """
-
-    self._processed_files = dict(processed_files)
-
-  def register_read_listener(self, listener):
-    """
-    Registers a listener for when files are read. This is executed prior to
-    processing files. Listeners are expected to be of the form...
-
-    ::
-
-      my_listener(path)
-
-    :param functor listener: functor to be notified when files are read
-    """
-
-    self._read_listeners.append(listener)
-
-  def register_skip_listener(self, listener):
-    """
-    Registers a listener for files that are skipped. This listener is expected
-    to be a functor of the form...
-
-    ::
-
-      my_listener(path, exception)
-
-    :param functor listener: functor to be notified of files that are skipped
-      to read errors or because they couldn't be parsed as valid descriptor data
-    """
-
-    self._skip_listeners.append(listener)
-
-  def get_buffered_descriptor_count(self):
-    """
-    Provides the number of descriptors that are waiting to be iterated over.
-    This is limited to the buffer_size that we were constructed with.
-
-    :returns: **int** for the estimated number of currently enqueued
-      descriptors, this is not entirely reliable
-    """
-
-    return self._unreturned_descriptors.qsize()
-
-  def start(self):
-    """
-    Starts reading our descriptor files.
-
-    :raises: **ValueError** if we're already reading the descriptor files
-    """
-
-    with self._reader_thread_lock:
-      if self._reader_thread:
-        raise ValueError('Already running, you need to call stop() first')
-      else:
-        self._is_stopped.clear()
-        self._reader_thread = threading.Thread(target = self._read_descriptor_files, name='Descriptor reader')
-        self._reader_thread.setDaemon(True)
-        self._reader_thread.start()
-
-  def stop(self):
-    """
-    Stops further reading of descriptor files.
-    """
-
-    with self._reader_thread_lock:
-      self._is_stopped.set()
-      self._iter_notice.set()
-
-      # clears our queue to unblock enqueue calls
-
-      try:
-        while True:
-          self._unreturned_descriptors.get_nowait()
-      except queue.Empty:
-        pass
-
-      self._reader_thread.join()
-      self._reader_thread = None
-
-      if self._persistence_path:
-        try:
-          processed_files = self.get_processed_files()
-          save_processed_files(self._persistence_path, processed_files)
-        except:
-          pass
-
-  def _read_descriptor_files(self):
-    new_processed_files = {}
-    remaining_files = list(self._targets)
-
-    while remaining_files and not self._is_stopped.is_set():
-      target = remaining_files.pop(0)
-
-      if not os.path.exists(target):
-        self._notify_skip_listeners(target, FileMissing())
-        continue
-
-      if os.path.isdir(target):
-        walker = os.walk(target, followlinks = self._follow_links)
-        self._handle_walker(walker, new_processed_files)
-      else:
-        self._handle_file(target, new_processed_files)
-
-    self._processed_files = new_processed_files
-
-    if not self._is_stopped.is_set():
-      self._unreturned_descriptors.put(FINISHED)
-
-    self._iter_notice.set()
-
-  def __iter__(self):
-    with self._iter_lock:
-      while not self._is_stopped.is_set():
-        try:
-          descriptor = self._unreturned_descriptors.get_nowait()
-
-          if descriptor == FINISHED:
-            break
-          else:
-            yield descriptor
-        except queue.Empty:
-          self._iter_notice.wait()
-          self._iter_notice.clear()
-
-  def _handle_walker(self, walker, new_processed_files):
-    for root, _, files in walker:
-      for filename in files:
-        self._handle_file(os.path.join(root, filename), new_processed_files)
-
-        # this can take a while if, say, we're including the root directory
-        if self._is_stopped.is_set():
-          return
-
-  def _handle_file(self, target, new_processed_files):
-    # This is a file. Register its last modified timestamp and check if
-    # it's a file that we should skip.
-
-    try:
-      last_modified = int(os.stat(target).st_mtime)
-      last_used = self._processed_files.get(target)
-      new_processed_files[target] = last_modified
-    except OSError as exc:
-      self._notify_skip_listeners(target, ReadFailed(exc))
-      return
-
-    if last_used and last_used >= last_modified:
-      self._notify_skip_listeners(target, AlreadyRead(last_modified, last_used))
-      return
-
-    # Block devices and such are never descriptors, and can cause us to block
-    # for quite a while so skipping anything that isn't a regular file.
-
-    if not os.path.isfile(target):
-      return
-
-    # The mimetypes module only checks the file extension. To actually
-    # check the content (like the 'file' command) we'd need something like
-    # pymagic (https://github.com/cloudburst/pymagic)
-
-    target_type = mimetypes.guess_type(target)
-
-    if target_type[0] in (None, 'text/plain'):
-      # either '.txt' or an unknown type
-      self._handle_descriptor_file(target, target_type)
-    elif stem.util.system.is_tarfile(target):
-      # handles gzip, bz2, and decompressed tarballs among others
-      self._handle_archive(target)
-    else:
-      self._notify_skip_listeners(target, UnrecognizedType(target_type))
-
-  def _handle_descriptor_file(self, target, mime_type):
-    try:
-      self._notify_read_listeners(target)
-
-      with open(target, 'rb') as target_file:
-        for desc in stem.descriptor.parse_file(target_file, validate = self._validate, document_handler = self._document_handler, **self._kwargs):
-          if self._is_stopped.is_set():
-            return
-
-          self._unreturned_descriptors.put(desc)
-          self._iter_notice.set()
-    except TypeError:
-      self._notify_skip_listeners(target, UnrecognizedType(mime_type))
-    except ValueError as exc:
-      self._notify_skip_listeners(target, ParsingFailure(exc))
-    except IOError as exc:
-      self._notify_skip_listeners(target, ReadFailed(exc))
-
-  def _handle_archive(self, target):
-    try:
-      with tarfile.open(target) as tar_file:
-        self._notify_read_listeners(target)
-
-        for tar_entry in tar_file:
-          if tar_entry.isfile():
-            entry = tar_file.extractfile(tar_entry)
-
-            try:
-              for desc in stem.descriptor.parse_file(entry, validate = self._validate, document_handler = self._document_handler, **self._kwargs):
-                if self._is_stopped.is_set():
-                  return
-
-                desc._set_path(os.path.abspath(target))
-                desc._set_archive_path(tar_entry.name)
-                self._unreturned_descriptors.put(desc)
-                self._iter_notice.set()
-            except TypeError as exc:
-              self._notify_skip_listeners(target, ParsingFailure(exc))
-            except ValueError as exc:
-              self._notify_skip_listeners(target, ParsingFailure(exc))
-            finally:
-              entry.close()
-    except IOError as exc:
-      self._notify_skip_listeners(target, ReadFailed(exc))
-
-  def _notify_read_listeners(self, path):
-    for listener in self._read_listeners:
-      listener(path)
-
-  def _notify_skip_listeners(self, path, exception):
-    for listener in self._skip_listeners:
-      listener(path, exception)
-
-  def __enter__(self):
-    self.start()
-    return self
-
-  def __exit__(self, exit_type, value, traceback):
-    self.stop()
diff --git a/test/integ/control/controller.py b/test/integ/control/controller.py
index 257d9fbc..6903c65b 100644
--- a/test/integ/control/controller.py
+++ b/test/integ/control/controller.py
@@ -12,7 +12,6 @@ import unittest
 
 import stem.connection
 import stem.control
-import stem.descriptor.reader
 import stem.descriptor.router_status_entry
 import stem.directory
 import stem.response.protocolinfo
diff --git a/test/settings.cfg b/test/settings.cfg
index 1ec8176e..d22bec42 100644
--- a/test/settings.cfg
+++ b/test/settings.cfg
@@ -229,7 +229,6 @@ pyflakes.ignore stem/util/__init__.py => undefined name 'unicode'
 pyflakes.ignore stem/util/conf.py => undefined name 'unicode'
 pyflakes.ignore stem/util/test_tools.py => 'pyflakes' imported but unused
 pyflakes.ignore stem/util/test_tools.py => 'pycodestyle' imported but unused
-pyflakes.ignore test/unit/descriptor/reader.py => 'bz2' imported but unused
 pyflakes.ignore test/unit/response/events.py => 'from stem import *' used; unable to detect undefined names
 pyflakes.ignore test/unit/response/events.py => *may be undefined, or defined from star imports: stem
 pyflakes.ignore stem/util/str_tools.py => undefined name 'unicode'
@@ -254,8 +253,6 @@ test.unit_tests
 |test.unit.installation.TestInstallation
 |test.unit.descriptor.descriptor.TestDescriptor
 |test.unit.descriptor.compression.TestCompression
-|test.unit.descriptor.export.TestExport
-|test.unit.descriptor.reader.TestDescriptorReader
 |test.unit.descriptor.collector.TestCollector
 |test.unit.descriptor.remote.TestDescriptorDownloader
 |test.unit.descriptor.server_descriptor.TestServerDescriptor
diff --git a/test/unit/descriptor/export.py b/test/unit/descriptor/export.py
deleted file mode 100644
index d27ed241..00000000
--- a/test/unit/descriptor/export.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
-Unit tests for stem.descriptor.export.
-"""
-
-import io
-import unittest
-
-import stem.prereq
-
-from stem.descriptor.server_descriptor import RelayDescriptor, BridgeDescriptor
-from stem.descriptor.export import export_csv, export_csv_file
-
-
-class TestExport(unittest.TestCase):
-  def test_minimal_descriptor(self):
-    """
-    Exports a single minimal tor server descriptor.
-    """
-
-    desc = RelayDescriptor.create({
-      'router': 'caerSidi 71.35.133.197 9001 0 0',
-      'published': '2012-03-01 17:15:27',
-    })
-
-    desc_csv = export_csv(desc, included_fields = ('nickname', 'address', 'published'), header = False)
-    expected = 'caerSidi,71.35.133.197,2012-03-01 17:15:27\n'
-    self.assertEqual(expected, desc_csv)
-
-    desc_csv = export_csv(desc, included_fields = ('nickname', 'address', 'published'), header = True)
-    expected = 'nickname,address,published\n' + expected
-    self.assertEqual(expected, desc_csv)
-
-  def test_multiple_descriptors(self):
-    """
-    Exports multiple descriptors, making sure that we get them back in the same
-    order.
-    """
-
-    nicknames = ('relay1', 'relay3', 'relay2', 'caerSidi', 'zeus')
-    descriptors = []
-
-    for nickname in nicknames:
-      router_line = '%s 71.35.133.197 9001 0 0' % nickname
-      descriptors.append(RelayDescriptor.create({'router': router_line}))
-
-    expected = '\n'.join(nicknames) + '\n'
-    self.assertEqual(expected, export_csv(descriptors, included_fields = ('nickname',), header = False))
-
-  def test_file_output(self):
-    """
-    Basic test for the export_csv_file() function, checking that it provides
-    the same output as export_csv().
-    """
-
-    desc = RelayDescriptor.create()
-    desc_csv = export_csv(desc)
-
-    csv_buffer = io.StringIO()
-    export_csv_file(csv_buffer, desc)
-
-    self.assertEqual(desc_csv, csv_buffer.getvalue())
-
-  def test_excludes_private_attr(self):
-    """
-    Checks that the default attributes for our csv output doesn't include private fields.
-    """
-
-    desc = RelayDescriptor.create()
-    desc_csv = export_csv(desc)
-
-    self.assertTrue(',signature' in desc_csv)
-    self.assertFalse(',_digest' in desc_csv)
-    self.assertFalse(',_annotation_lines' in desc_csv)
-
-  def test_empty_input(self):
-    """
-    Exercises when we don't provide any descriptors.
-    """
-    self.assertEqual('', export_csv([]))
-
-  def test_invalid_attributes(self):
-    """
-    Attempts to make a csv with attributes that don't exist.
-    """
-
-    desc = RelayDescriptor.create()
-    self.assertRaises(ValueError, export_csv, desc, ('nickname', 'blarg!'))
-
-  def test_multiple_descriptor_types(self):
-    """
-    Attempts to make a csv with multiple descriptor types.
-    """
-
-    self.assertRaises(ValueError, export_csv, (RelayDescriptor.create(), BridgeDescriptor.create()))
diff --git a/test/unit/descriptor/reader.py b/test/unit/descriptor/reader.py
deleted file mode 100644
index f49183e5..00000000
--- a/test/unit/descriptor/reader.py
+++ /dev/null
@@ -1,625 +0,0 @@
-"""
-Unit tests for stem.descriptor.reader.
-"""
-
-import getpass
-import io
-import os
-import shutil
-import signal
-import sys
-import tarfile
-import tempfile
-import time
-import unittest
-
-import stem.descriptor.reader
-import stem.util.str_tools
-import stem.util.system
-
-import test.unit.descriptor
-
-from unittest.mock import patch
-
-BASIC_LISTING = """
-/tmp 123
-/bin/grep 4567
-/file with spaces/and \\ stuff 890
-"""
-
-my_dir = os.path.dirname(__file__)
-DESCRIPTOR_TEST_DATA = os.path.join(my_dir, 'data')
-
-TAR_DESCRIPTORS = None
-
-
-def _get_raw_tar_descriptors():
-  global TAR_DESCRIPTORS
-
-  if not TAR_DESCRIPTORS:
-    test_path = os.path.join(DESCRIPTOR_TEST_DATA, 'descriptor_archive.tar')
-    raw_descriptors = []
-
-    with tarfile.open(test_path) as tar_file:
-      for tar_entry in tar_file:
-        if tar_entry.isfile():
-          entry = tar_file.extractfile(tar_entry)
-          entry.readline()  # strip header
-          raw_descriptors.append(entry.read().decode('utf-8', 'replace'))
-          entry.close()
-
-    TAR_DESCRIPTORS = raw_descriptors
-
-  return TAR_DESCRIPTORS
-
-
-class SkipListener:
-  def __init__(self):
-    self.results = []  # (path, exception) tuples that we've received
-
-  def listener(self, path, exception):
-    self.results.append((path, exception))
-
-
-class TestDescriptorReader(unittest.TestCase):
-  def setUp(self):
-    self.temp_directory = tempfile.mkdtemp()
-    self.test_listing_path = os.path.join(self.temp_directory, 'descriptor_processed_files')
-
-  def tearDown(self):
-    shutil.rmtree(self.temp_directory)
-
-  @patch('stem.descriptor.reader.open', create = True)
-  def test_load_processed_files(self, open_mock):
-    """
-    Successful load of content.
-    """
-
-    test_lines = (
-      '/dir/ 0',
-      '/dir/file 12345',
-      '/dir/file with spaces 7138743',
-      '  /dir/with extra space 12345   ',
-      '   \t   ',
-      '',
-      '/dir/after empty line 12345',
-    )
-
-    expected_value = {
-      '/dir/': 0,
-      '/dir/file': 12345,
-      '/dir/file with spaces': 7138743,
-      '/dir/with extra space': 12345,
-      '/dir/after empty line': 12345,
-    }
-
-    open_mock.return_value = io.BytesIO(stem.util.str_tools._to_bytes('\n'.join(test_lines)))
-    self.assertEqual(expected_value, stem.descriptor.reader.load_processed_files(''))
-
-  @patch('stem.descriptor.reader.open', create = True)
-  def test_load_processed_files_empty(self, open_mock):
-    """
-    Tests the load_processed_files() function with an empty file.
-    """
-
-    open_mock.return_value = io.BytesIO(stem.util.str_tools._to_bytes(''))
-    self.assertEqual({}, stem.descriptor.reader.load_processed_files(''))
-
-  @patch('stem.descriptor.reader.open', create = True)
-  def test_load_processed_files_no_file(self, open_mock):
-    """
-    Tests the load_processed_files() function content that is malformed because
-    it is missing the file path.
-    """
-
-    open_mock.return_value = io.BytesIO(stem.util.str_tools._to_bytes(' 12345'))
-    self.assertRaises(TypeError, stem.descriptor.reader.load_processed_files, '')
-
-  @patch('stem.descriptor.reader.open', create = True)
-  def test_load_processed_files_no_timestamp(self, open_mock):
-    """
-    Tests the load_processed_files() function content that is malformed because
-    it is missing the timestamp.
-    """
-
-    open_mock.return_value = io.BytesIO(stem.util.str_tools._to_bytes('/dir/file '))
-    self.assertRaises(TypeError, stem.descriptor.reader.load_processed_files, '')
-
-  @patch('stem.descriptor.reader.open', create = True)
-  def test_load_processed_files_malformed_file(self, open_mock):
-    """
-    Tests the load_processed_files() function content that is malformed because
-    it has an invalid file path.
-    """
-
-    open_mock.return_value = io.BytesIO(stem.util.str_tools._to_bytes('not_an_absolute_file 12345'))
-    self.assertRaises(TypeError, stem.descriptor.reader.load_processed_files, '')
-
-  @patch('stem.descriptor.reader.open', create = True)
-  def test_load_processed_files_malformed_timestamp(self, open_mock):
-    """
-    Tests the load_processed_files() function content that is malformed because
-    it has a non-numeric timestamp.
-    """
-
-    open_mock.return_value = io.BytesIO(stem.util.str_tools._to_bytes('/dir/file 123a'))
-    self.assertRaises(TypeError, stem.descriptor.reader.load_processed_files, '')
-
-  def test_load_processed_files_from_data(self):
-    """
-    Basic sanity test for loading a processed files listing from disk.
-    """
-
-    test_listing_path = self._make_processed_files_listing(BASIC_LISTING)
-    loaded_listing = stem.descriptor.reader.load_processed_files(test_listing_path)
-
-    expected_listing = {
-      '/tmp': 123,
-      '/bin/grep': 4567,
-      '/file with spaces/and \\ stuff': 890,
-    }
-
-    self.assertEqual(expected_listing, loaded_listing)
-
-  def test_load_processed_files_missing(self):
-    """
-    Tests the load_processed_files() function with a file that doesn't exist.
-    """
-
-    self.assertRaises(IOError, stem.descriptor.reader.load_processed_files, '/non-existant/path')
-
-  def test_load_processed_files_permissions(self):
-    """
-    Tests the load_processed_files() function with a file that can't be read
-    due to permissions.
-    """
-
-    # test relies on being unable to read a file
-
-    if getpass.getuser() == 'root':
-      self.skipTest('(running as root)')
-
-    # Skip the test on windows, since you can only set the file's
-    # read-only flag with os.chmod(). For more information see...
-    # http://docs.python.org/library/os.html#os.chmod
-
-    if stem.util.system.is_windows():
-      self.skipTest('(chmod not functional)')
-
-    test_listing_path = self._make_processed_files_listing(BASIC_LISTING)
-    os.chmod(test_listing_path, 0o077)  # remove read permissions
-    self.assertRaises(IOError, stem.descriptor.reader.load_processed_files, test_listing_path)
-
-  def test_save_processed_files(self):
-    """
-    Basic sanity test for persisting files listings to disk.
-    """
-
-    initial_listing = {
-      '/tmp': 123,
-      '/bin/grep': 4567,
-      '/file with spaces/and \\ stuff': 890,
-    }
-
-    # saves the initial_listing to a file then reloads it
-
-    stem.descriptor.reader.save_processed_files(self.test_listing_path, initial_listing)
-    loaded_listing = stem.descriptor.reader.load_processed_files(self.test_listing_path)
-
-    self.assertEqual(initial_listing, loaded_listing)
-
-  def test_save_processed_files_malformed(self):
-    """
-    Tests the save_processed_files() function with malformed data.
-    """
-
-    missing_filename = {'': 123}
-    relative_filename = {'foobar': 123}
-    string_timestamp = {'/tmp': '123a'}
-    temp_path = tempfile.mkstemp(prefix = 'stem-unit-tests-', text = True)[1]
-
-    for listing in (missing_filename, relative_filename, string_timestamp):
-      self.assertRaises(TypeError, stem.descriptor.reader.save_processed_files, temp_path, listing)
-
-    # Though our attempts to save the processed files fail we'll write an empty
-    # file. Cleaning it up.
-
-    try:
-      os.remove(temp_path)
-    except:
-      pass
-
-  def test_basic_example(self):
-    """
-    Exercises something similar to the first example in the header
-    documentation, checking that some of the contents match what we'd expect.
-    """
-
-    # snag some of the plaintext descriptors so we can later make sure that we
-    # iterate over them
-
-    descriptor_entries = []
-
-    descriptor_path = os.path.join(DESCRIPTOR_TEST_DATA, 'example_descriptor')
-
-    with open(descriptor_path) as descriptor_file:
-      descriptor_file.readline()  # strip header
-      descriptor_entries.append(descriptor_file.read())
-
-    # running this test multiple times to flush out concurrency issues
-
-    for _ in range(15):
-      remaining_entries = list(descriptor_entries)
-
-      with stem.descriptor.reader.DescriptorReader(descriptor_path) as reader:
-        for descriptor in reader:
-          descriptor_str = str(descriptor)
-
-          if descriptor_str in remaining_entries:
-            remaining_entries.remove(descriptor_str)
-          else:
-            # iterator is providing output that we didn't expect
-            self.fail()
-
-      # check that we've seen all of the descriptor_entries
-      self.assertTrue(len(remaining_entries) == 0)
-
-  def test_multiple_runs(self):
-    """
-    Runs a DescriptorReader instance multiple times over the same content,
-    making sure that it can be used repeatedly.
-    """
-
-    descriptor_path = os.path.join(DESCRIPTOR_TEST_DATA, 'example_descriptor')
-    reader = stem.descriptor.reader.DescriptorReader(descriptor_path)
-
-    with reader:
-      self.assertEqual(1, len(list(reader)))
-
-    # run it a second time, this shouldn't provide any descriptors because we
-    # have already read it
-
-    with reader:
-      self.assertEqual(0, len(list(reader)))
-
-    # clear the DescriptorReader's memory of seeing the file and run it again
-
-    reader.set_processed_files([])
-
-    with reader:
-      self.assertEqual(1, len(list(reader)))
-
-  def test_buffer_size(self):
-    """
-    Checks that we can process sets of descriptors larger than our buffer size,
-    that we don't exceed it, and that we can still stop midway through reading
-    them.
-    """
-
-    reader = stem.descriptor.reader.DescriptorReader(DESCRIPTOR_TEST_DATA, buffer_size = 2)
-
-    with reader:
-      self.assertTrue(reader.get_buffered_descriptor_count() <= 2)
-      time.sleep(0.001)
-      self.assertTrue(reader.get_buffered_descriptor_count() <= 2)
-
-  def test_persistence_path(self):
-    """
-    Check that the persistence_path argument loads and saves a a processed
-    files listing.
-    """
-
-    descriptor_path = os.path.join(DESCRIPTOR_TEST_DATA, 'example_descriptor')
-
-    # First run where the persistence_path doesn't yet exist. This just tests
-    # the saving functionality.
-
-    reader = stem.descriptor.reader.DescriptorReader(descriptor_path, persistence_path = self.test_listing_path)
-
-    with reader:
-      self.assertEqual(1, len(list(reader)))
-
-    # check that we've saved reading example_descriptor
-    self.assertTrue(os.path.exists(self.test_listing_path))
-
-    with open(self.test_listing_path) as persistence_file:
-      persistance_file_contents = persistence_file.read()
-      self.assertTrue(persistance_file_contents.startswith(descriptor_path))
-
-    # Try running again with a new reader but the same persistance path, if it
-    # reads and takes the persistence_path into account then it won't read the
-    # descriptor file. This in essence just tests its loading functionality.
-
-    reader = stem.descriptor.reader.DescriptorReader(descriptor_path, persistence_path = self.test_listing_path)
-
-    with reader:
-      self.assertEqual(0, len(list(reader)))
-
-  def test_archived_paths(self):
-    """
-    Checks the get_path() and get_archive_path() for a tarball.
-    """
-
-    expected_archive_paths = (
-      'descriptor_archive/0/2/02c311d3d789f3f55c0880b5c85f3c196343552c',
-      'descriptor_archive/1/b/1bb798cae15e21479db0bc700767eee4733e9d4a',
-      'descriptor_archive/1/b/1ef75fef564180d8b3f72c6f8635ff0cd855f92c',
-    )
-
-    test_path = os.path.join(DESCRIPTOR_TEST_DATA, 'descriptor_archive.tar')
-
-    with stem.descriptor.reader.DescriptorReader(test_path) as reader:
-      for desc in reader:
-        self.assertEqual(test_path, desc.get_path())
-        self.assertTrue(desc.get_archive_path() in expected_archive_paths)
-
-  def test_archived_uncompressed(self):
-    """
-    Checks that we can read descriptors from an uncompressed archive.
-    """
-
-    expected_results = _get_raw_tar_descriptors()
-    test_path = os.path.join(DESCRIPTOR_TEST_DATA, 'descriptor_archive.tar')
-
-    with stem.descriptor.reader.DescriptorReader(test_path) as reader:
-      read_descriptors = [str(desc) for desc in list(reader)]
-      self.assertEqual(expected_results, read_descriptors)
-
-  def test_archived_gzip(self):
-    """
-    Checks that we can read descriptors from a gzipped archive.
-    """
-
-    expected_results = _get_raw_tar_descriptors()
-    test_path = os.path.join(DESCRIPTOR_TEST_DATA, 'descriptor_archive.tar.gz')
-
-    with stem.descriptor.reader.DescriptorReader(test_path) as reader:
-      read_descriptors = [str(desc) for desc in list(reader)]
-      self.assertEqual(expected_results, read_descriptors)
-
-  def test_archived_bz2(self):
-    """
-    Checks that we can read descriptors from an bzipped archive.
-    """
-
-    # when python's compiled it only optionally has bz2 support
-
-    try:
-      import bz2
-    except ImportError:
-      self.skipTest('(bz2 unsupported}')
-
-    expected_results = _get_raw_tar_descriptors()
-    test_path = os.path.join(DESCRIPTOR_TEST_DATA, 'descriptor_archive.tar.bz2')
-
-    with stem.descriptor.reader.DescriptorReader(test_path) as reader:
-      read_descriptors = [str(desc) for desc in list(reader)]
-      self.assertEqual(expected_results, read_descriptors)
-
-  def test_stop(self):
-    """
-    Runs a DescriptorReader over the root directory, then checks that calling
-    stop() makes it terminate in a timely fashion.
-    """
-
-    # Skip on windows since SIGALRM is unavailable
-
-    if stem.util.system.is_windows():
-      self.skipTest('(SIGALRM unavailable)')
-
-    is_test_running = True
-    reader = stem.descriptor.reader.DescriptorReader('/usr')
-
-    # Fails the test after a couple seconds if we don't finish successfully.
-    # Depending on what we're blocked on this might not work when the test
-    # fails, requiring that we give a manual kill to the test.
-
-    def timeout_handler(signum, frame):
-      if is_test_running:
-        self.fail()
-
-    signal.signal(signal.SIGALRM, timeout_handler)
-    signal.alarm(2)
-
-    reader.start()
-    time.sleep(0.001)
-    reader.stop()
-    is_test_running = False
-
-  def test_get_processed_files(self):
-    """
-    Checks that get_processed_files() provides the expected results after
-    iterating over our test data.
-    """
-
-    desc_path = os.path.join(DESCRIPTOR_TEST_DATA, 'example_descriptor')
-    last_modified = int(os.stat(desc_path).st_mtime)
-
-    reader = stem.descriptor.reader.DescriptorReader(desc_path)
-
-    with reader:
-      list(reader)  # iterates over all of the descriptors
-
-    self.assertEqual({desc_path: last_modified}, reader.get_processed_files())
-
-  def test_skip_nondescriptor_contents(self):
-    """
-    Checks that the reader properly reports when it skips both binary and
-    plaintext non-descriptor files.
-    """
-
-    skip_listener = SkipListener()
-    reader = stem.descriptor.reader.DescriptorReader(os.path.join(DESCRIPTOR_TEST_DATA, 'unparseable'))
-    reader.register_skip_listener(skip_listener.listener)
-
-    expected_skip_files = ('riddle', 'tiny.png', 'vote', 'new_metrics_type', 'cached-microdesc-consensus_with_carriage_returns', 'extrainfo_nonascii_v3_reqs')
-
-    with reader:
-      list(reader)  # iterates over all of the descriptors
-
-    # strip anything with a .swp suffix (vim tmp files)
-
-    skip_listener.results = [(path, exc) for (path, exc) in skip_listener.results if not path.endswith('.swp')]
-
-    if len(skip_listener.results) != len(expected_skip_files):
-      expected_label = ',\n  '.join(expected_skip_files)
-      results_label = ',\n  '.join(['%s (%s)' % (path, exc) for (path, exc) in skip_listener.results])
-
-      self.fail('Skipped files that we should have been able to parse.\n\nExpected:\n  %s\n\nResult:\n  %s' % (expected_label, results_label))
-
-    for skip_path, skip_exception in skip_listener.results:
-      if not os.path.basename(skip_path) in expected_skip_files:
-        self.fail('Unexpected non-descriptor content: %s' % skip_path)
-
-      self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.UnrecognizedType))
-
-  def test_skip_listener_already_read(self):
-    """
-    Checks that calling set_processed_files() prior to reading makes us skip
-    those files. This also doubles for testing that skip listeners are notified
-    of files that we've already read.
-    """
-
-    # path that we want the DescriptorReader to skip
-
-    test_path = os.path.join(DESCRIPTOR_TEST_DATA, 'example_descriptor')
-    initial_processed_files = {test_path: sys.maxsize}
-
-    skip_listener = SkipListener()
-    reader = stem.descriptor.reader.DescriptorReader(test_path)
-    reader.register_skip_listener(skip_listener.listener)
-    reader.set_processed_files(initial_processed_files)
-
-    self.assertEqual(initial_processed_files, reader.get_processed_files())
-
-    with reader:
-      list(reader)  # iterates over all of the descriptors
-
-    self.assertEqual(1, len(skip_listener.results))
-
-    skipped_path, skip_exception = skip_listener.results[0]
-    self.assertEqual(test_path, skipped_path)
-    self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.AlreadyRead))
-    self.assertEqual(sys.maxsize, skip_exception.last_modified_when_read)
-
-  def test_skip_listener_unrecognized_type(self):
-    """
-    Listens for a file that's skipped because its file type isn't recognized.
-    """
-
-    # types are solely based on file extensions so making something that looks
-    # like an png image
-
-    test_path = os.path.join(self.temp_directory, 'test.png')
-
-    try:
-      test_file = open(test_path, 'w')
-      test_file.write('test data for test_skip_listener_unrecognized_type()')
-      test_file.close()
-
-      skip_listener = SkipListener()
-      reader = stem.descriptor.reader.DescriptorReader(test_path)
-      reader.register_skip_listener(skip_listener.listener)
-
-      with reader:
-        list(reader)  # iterates over all of the descriptors
-
-      self.assertEqual(1, len(skip_listener.results))
-
-      skipped_path, skip_exception = skip_listener.results[0]
-      self.assertEqual(test_path, skipped_path)
-      self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.UnrecognizedType))
-      self.assertTrue(skip_exception.mime_type in (('image/png', None), ('image/x-png', None)))
-    finally:
-      if os.path.exists(test_path):
-        os.remove(test_path)
-
-  def test_skip_listener_read_failure(self):
-    """
-    Listens for a file that's skipped because we lack read permissions.
-    """
-
-    # test relies on being unable to read a file
-
-    if getpass.getuser() == 'root':
-      self.skipTest('(running as root)')
-    elif stem.util.system.is_windows():
-      self.skipTest('(chmod not functional)')
-
-    test_path = os.path.join(self.temp_directory, 'secret_file')
-
-    try:
-      test_file = open(test_path, 'w')
-      test_file.write('test data for test_skip_listener_unrecognized_type()')
-      test_file.close()
-
-      os.chmod(test_path, 0o077)  # remove read permissions
-
-      skip_listener = SkipListener()
-      reader = stem.descriptor.reader.DescriptorReader(test_path)
-      reader.register_skip_listener(skip_listener.listener)
-
-      with reader:
-        list(reader)  # iterates over all of the descriptors
-
-      self.assertEqual(1, len(skip_listener.results))
-
-      skipped_path, skip_exception = skip_listener.results[0]
-      self.assertEqual(test_path, skipped_path)
-      self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.ReadFailed))
-      self.assertTrue(isinstance(skip_exception.exception, IOError))
-    finally:
-      if os.path.exists(test_path):
-        os.remove(test_path)
-
-  def test_skip_listener_file_missing(self):
-    """
-    Listens for a file that's skipped because the file doesn't exist.
-    """
-
-    test_path = '/non-existant/path'
-
-    skip_listener = SkipListener()
-    reader = stem.descriptor.reader.DescriptorReader(test_path)
-    reader.register_skip_listener(skip_listener.listener)
-
-    with reader:
-      list(reader)  # iterates over all of the descriptors
-
-    self.assertEqual(1, len(skip_listener.results))
-
-    skipped_path, skip_exception = skip_listener.results[0]
-    self.assertEqual(test_path, skipped_path)
-    self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.FileMissing))
-
-  def test_unrecognized_metrics_type(self):
-    """
-    Parses a file that has a valid metrics header, but an unrecognized type.
-    """
-
-    test_path = test.unit.descriptor.get_resource('unparseable/new_metrics_type')
-
-    skip_listener = SkipListener()
-    reader = stem.descriptor.reader.DescriptorReader(test_path)
-    reader.register_skip_listener(skip_listener.listener)
-
-    with reader:
-      list(reader)  # iterates over all of the descriptors
-
-    self.assertEqual(1, len(skip_listener.results))
-
-    skipped_path, skip_exception = skip_listener.results[0]
-    self.assertEqual(test_path, skipped_path)
-    self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.UnrecognizedType))
-    self.assertEqual((None, None), skip_exception.mime_type)
-
-  def _make_processed_files_listing(self, contents):
-    """
-    Writes the given 'processed file' listing to disk, returning the path where
-    it is located.
-    """
-
-    with open(self.test_listing_path, 'w') as test_listing_file:
-      test_listing_file.write(contents)
-
-    return self.test_listing_path
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    10 Feb '20
                    
                        commit af17fda1a457cf3ec5845318556defd0d29272d7
Author: Matthew Finkel <sysrqb(a)torproject.org>
Date:   Mon Feb 10 02:31:45 2020 +0000
    Changelog update and build3
    
    Fix reproducibility issue in macOS build.
---
 projects/tor-browser/Bundle-Data/Docs/ChangeLog.txt | 2 ++
 rbm.conf                                            | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/projects/tor-browser/Bundle-Data/Docs/ChangeLog.txt b/projects/tor-browser/Bundle-Data/Docs/ChangeLog.txt
index c576cdf..c542ef0 100644
--- a/projects/tor-browser/Bundle-Data/Docs/ChangeLog.txt
+++ b/projects/tor-browser/Bundle-Data/Docs/ChangeLog.txt
@@ -12,6 +12,8 @@ Tor Browser 9.0.5 -- February 11 2020
  * Build System
    * All Platforms
      * Bug 32739: Bump clang to 8.0.1
+   * OS X
+     * Bug 33200: Fix permissions on bookmarks.html
 
 Tor Browser 9.0.4 -- January 9 2020
  * All Platforms
diff --git a/rbm.conf b/rbm.conf
index 5e43719..123d1a8 100644
--- a/rbm.conf
+++ b/rbm.conf
@@ -25,7 +25,7 @@ buildconf:
 
 var:
   torbrowser_version: '9.0.5'
-  torbrowser_build: 'build2'
+  torbrowser_build: 'build3'
   torbrowser_incremental_from:
     - 9.0.4
   project_name: tor-browser
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0