commit 749c8d37fd54ea6e41dfca04495c35b47a755f6a Author: Damian Johnson atagar@torproject.org Date: Sat Jul 20 09:52:07 2013 -0700
Adding a broken get_microdescriptors() method
Huh. Not really sure why this isn't working. Adding a get_microdescriptors() for how the spec seems to indicate that the method works but tor gives an empty reply.
% curl http://217.13.197.5:9030/tor/micro/d/6dCl6ab8CLo0LeMjxi/MZgVJiZgWN8WKTesWPBM...
Checking with karsten on irc t see if he knows what's up, and moving on to another part. --- stem/descriptor/remote.py | 40 +++++++++++++++++++++++++++++++++------ test/integ/descriptor/remote.py | 35 ++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+), 6 deletions(-)
diff --git a/stem/descriptor/remote.py b/stem/descriptor/remote.py index 9df09f4..e0ca612 100644 --- a/stem/descriptor/remote.py +++ b/stem/descriptor/remote.py @@ -52,9 +52,10 @@ from stem import Flag from stem.util import log
# Tor has a limit on the number of descriptors we can fetch explicitly by their -# fingerprint. +# fingerprint or hashes due to the url lenght of squid proxies.
-MAX_BATCH_SIZE = 96 +MAX_DESCRIPTOR_BATCH_SIZE = 96 +MAX_MICRODESCRIPTOR_BATCH_SIZE = 92
# Tor directory authorities as of commit f631b73 (7/4/13). This should only # include authorities with 'v3ident': @@ -318,8 +319,8 @@ class DescriptorDownloader(object): fingerprints = [fingerprints]
if fingerprints: - if len(fingerprints) > MAX_BATCH_SIZE: - raise ValueError("Unable to request more than %i descriptors at a time by their fingerprints" % MAX_BATCH_SIZE) + if len(fingerprints) > MAX_DESCRIPTOR_BATCH_SIZE: + raise ValueError("Unable to request more than %i descriptors at a time by their fingerprints" % MAX_DESCRIPTOR_BATCH_SIZE)
resource = '/tor/server/fp/%s' % '+'.join(fingerprints)
@@ -346,13 +347,40 @@ class DescriptorDownloader(object): fingerprints = [fingerprints]
if fingerprints: - if len(fingerprints) > MAX_BATCH_SIZE: - raise ValueError("Unable to request more than %i descriptors at a time by their fingerprints" % MAX_BATCH_SIZE) + if len(fingerprints) > MAX_DESCRIPTOR_BATCH_SIZE: + raise ValueError("Unable to request more than %i descriptors at a time by their fingerprints" % MAX_DESCRIPTOR_BATCH_SIZE)
resource = '/tor/extra/fp/%s' % '+'.join(fingerprints)
return self._query(resource, 'extra-info 1.0')
+ def get_microdescriptors(self, hashes): + """ + Provides the microdescriptors with the given hashes. To get these see the + 'microdescriptor_hashes' attribute of + :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3`. Note + that these are only provided via a microdescriptor consensus (such as + 'cached-microdesc-consensus' in your data directory). + + :param str,list hashes: microdescriptor hash or list of hashes to be + retrieved + + :returns: :class:`~stem.descriptor.remote.Query` for the microdescriptors + + :raises: **ValueError** if we request more than 92 microdescriptors by their + hashes (this is due to a limit on the url length by squid proxies). + """ + + if isinstance(hashes, str): + hashes = [hashes] + + if len(hashes) > MAX_MICRODESCRIPTOR_BATCH_SIZE: + raise ValueError("Unable to request more than %i microdescriptors at a time by their hashes" % MAX_MICRODESCRIPTOR_BATCH_SIZE) + + resource = '/tor/micro/d/%s' % '-'.join(hashes) + + return self._query(resource, 'microdescriptor 1.0') + def get_consensus(self, document_handler = stem.descriptor.DocumentHandler.ENTRIES, authority_v3ident = None): """ Provides the present router status entries. diff --git a/test/integ/descriptor/remote.py b/test/integ/descriptor/remote.py index a705486..e9d4e8a 100644 --- a/test/integ/descriptor/remote.py +++ b/test/integ/descriptor/remote.py @@ -5,6 +5,7 @@ Integration tests for stem.descriptor.remote. import unittest
import stem.descriptor.extrainfo_descriptor +import stem.descriptor.microdescriptor import stem.descriptor.remote import stem.descriptor.router_status_entry import stem.descriptor.server_descriptor @@ -128,6 +129,40 @@ class TestDescriptorReader(unittest.TestCase):
self.assertEqual(2, len(list(multiple_query)))
+ def test_get_microdescriptors(self): + """ + Exercises the downloader's get_microdescriptors() method. + """ + + # TODO: method needs to be fixed - not quite sure what's going wrong... + + test.runner.skip(self, '(test presently broken)') + return + + if test.runner.require_online(self): + return + elif test.runner.only_run_once(self, "test_get_microdescriptors"): + return + + downloader = stem.descriptor.remote.DescriptorDownloader() + + single_query = downloader.get_microdescriptors('6dCl6ab8CLo0LeMjxi/MZgVJiZgWN8WKTesWPBMtyTo') + + multiple_query = downloader.get_microdescriptors([ + '6dCl6ab8CLo0LeMjxi/MZgVJiZgWN8WKTesWPBMtyTo', # moria1 + 'oXBV80OwMACBJpqNeZrYSXF18l9EJCi4/mB8UOl9sME', # tor26 + ]) + + single_query.run() + multiple_query.run() + + single_query_results = list(single_query) + self.assertEqual(1, len(single_query_results)) + self.assertEqual('moria1', single_query_results[0].digest) + self.assertTrue(isinstance(single_query_results[0], stem.descriptor.microdescriptor.Microdescriptor)) + + self.assertEqual(2, len(list(multiple_query))) + def test_get_consensus(self): """ Exercises the downloader's get_consensus() method.
tor-commits@lists.torproject.org