tor-commits
Threads by month
- ----- 2025 -----
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
January 2015
- 20 participants
- 934 discussions
commit 6484250c4000673074a261da95e1db3ed9d69db8
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sun Jan 25 13:23:14 2015 -0800
Stylistic revisions
Just some quick non-impactful stylistic revisions.
---
stem/descriptor/networkstatus.py | 117 +++++++++++++-------------------
stem/descriptor/router_status_entry.py | 53 ++++-----------
stem/descriptor/server_descriptor.py | 3 -
3 files changed, 64 insertions(+), 109 deletions(-)
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index a8ec38a..feb8b37 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -69,6 +69,12 @@ from stem.descriptor import (
_parse_key_block,
)
+from stem.descriptor.router_status_entry import (
+ RouterStatusEntryV2,
+ RouterStatusEntryV3,
+ RouterStatusEntryMicroV3,
+)
+
# Version 2 network status document fields, tuples of the form...
# (keyword, is_mandatory)
@@ -153,6 +159,37 @@ KEY_CERTIFICATE_PARAMS = (
('dir-key-certification', True),
)
+# all parameters are constrained to int32 range
+MIN_PARAM, MAX_PARAM = -2147483648, 2147483647
+
+PARAM_RANGE = {
+ 'circwindow': (100, 1000),
+ 'CircuitPriorityHalflifeMsec': (-1, MAX_PARAM),
+ 'perconnbwrate': (-1, MAX_PARAM),
+ 'perconnbwburst': (-1, MAX_PARAM),
+ 'refuseunknownexits': (0, 1),
+ 'bwweightscale': (1, MAX_PARAM),
+ 'cbtdisabled': (0, 1),
+ 'cbtnummodes': (1, 20),
+ 'cbtrecentcount': (3, 1000),
+ 'cbtmaxtimeouts': (3, 10000),
+ 'cbtmincircs': (1, 10000),
+ 'cbtquantile': (10, 99),
+ 'cbtclosequantile': (MIN_PARAM, 99),
+ 'cbttestfreq': (1, MAX_PARAM),
+ 'cbtmintimeout': (500, MAX_PARAM),
+ 'UseOptimisticData': (0, 1),
+ 'Support022HiddenServices': (0, 1),
+ 'usecreatefast': (0, 1),
+ 'UseNTorHandshake': (0, 1),
+ 'FastFlagMinThreshold': (4, MAX_PARAM),
+ 'NumDirectoryGuards': (0, 10),
+ 'NumEntryGuards': (1, 10),
+ 'GuardLifetime': (2592000, 157766400), # min: 30 days, max: 1826 days
+ 'NumNTorsPerTAP': (1, 100000),
+ 'AllowNonearlyExtend': (0, 1),
+}
+
def _parse_file(document_file, document_type = None, validate = True, is_microdescriptor = False, document_handler = DocumentHandler.ENTRIES, **kwargs):
"""
@@ -184,16 +221,11 @@ def _parse_file(document_file, document_type = None, validate = True, is_microde
document_type = NetworkStatusDocumentV3
if document_type == NetworkStatusDocumentV2:
- document_type = NetworkStatusDocumentV2
- router_type = stem.descriptor.router_status_entry.RouterStatusEntryV2
+ document_type, router_type = NetworkStatusDocumentV2, RouterStatusEntryV2
elif document_type == NetworkStatusDocumentV3:
- if not is_microdescriptor:
- router_type = stem.descriptor.router_status_entry.RouterStatusEntryV3
- else:
- router_type = stem.descriptor.router_status_entry.RouterStatusEntryMicroV3
+ router_type = RouterStatusEntryMicroV3 if is_microdescriptor else RouterStatusEntryV3
elif document_type == BridgeNetworkStatusDocument:
- document_type = BridgeNetworkStatusDocument
- router_type = stem.descriptor.router_status_entry.RouterStatusEntryV2
+ document_type, router_type = BridgeNetworkStatusDocument, RouterStatusEntryV2
else:
raise ValueError("Document type %i isn't recognized (only able to parse v2, v3, and bridge)" % document_type)
@@ -391,7 +423,7 @@ class NetworkStatusDocumentV2(NetworkStatusDocument):
router_iter = stem.descriptor.router_status_entry._parse_file(
document_file,
validate,
- entry_class = stem.descriptor.router_status_entry.RouterStatusEntryV2,
+ entry_class = RouterStatusEntryV2,
entry_keyword = ROUTERS_START,
section_end_keywords = (V2_FOOTER_START,),
extra_args = (self,),
@@ -736,22 +768,16 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
if validate and self.is_vote and len(self.directory_authorities) != 1:
raise ValueError('Votes should only have an authority entry for the one that issued it, got %i: %s' % (len(self.directory_authorities), self.directory_authorities))
- if not self.is_microdescriptor:
- router_type = stem.descriptor.router_status_entry.RouterStatusEntryV3
- else:
- router_type = stem.descriptor.router_status_entry.RouterStatusEntryMicroV3
-
router_iter = stem.descriptor.router_status_entry._parse_file(
document_file,
validate,
- entry_class = router_type,
+ entry_class = RouterStatusEntryMicroV3 if self.is_microdescriptor else RouterStatusEntryV3,
entry_keyword = ROUTERS_START,
section_end_keywords = (FOOTER_START, V2_FOOTER_START),
extra_args = (self,),
)
self.routers = dict((desc.fingerprint, desc) for desc in router_iter)
-
self._footer(document_file, validate)
def get_unrecognized_lines(self):
@@ -853,59 +879,14 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
"""
for key, value in self.params.items():
- # all parameters are constrained to int32 range
- minimum, maximum = -2147483648, 2147483647
-
- if key == 'circwindow':
- minimum, maximum = 100, 1000
- elif key == 'CircuitPriorityHalflifeMsec':
- minimum = -1
- elif key in ('perconnbwrate', 'perconnbwburst'):
- minimum = 1
- elif key == 'refuseunknownexits':
- minimum, maximum = 0, 1
- elif key == 'bwweightscale':
- minimum = 1
- elif key == 'cbtdisabled':
- minimum, maximum = 0, 1
- elif key == 'cbtnummodes':
- minimum, maximum = 1, 20
- elif key == 'cbtrecentcount':
- minimum, maximum = 3, 1000
- elif key == 'cbtmaxtimeouts':
- minimum, maximum = 3, 10000
- elif key == 'cbtmincircs':
- minimum, maximum = 1, 10000
- elif key == 'cbtquantile':
- minimum, maximum = 10, 99
- elif key == 'cbtclosequantile':
- minimum, maximum = self.params.get('cbtquantile', minimum), 99
- elif key == 'cbttestfreq':
- minimum = 1
- elif key == 'cbtmintimeout':
- minimum = 500
+ minimum, maximum = PARAM_RANGE.get(key, (MIN_PARAM, MAX_PARAM))
+
+ # there's a few dynamic parameter ranges
+
+ if key == 'cbtclosequantile':
+ minimum = self.params.get('cbtquantile', minimum)
elif key == 'cbtinitialtimeout':
minimum = self.params.get('cbtmintimeout', minimum)
- elif key == 'UseOptimisticData':
- minimum, maximum = 0, 1
- elif key == 'Support022HiddenServices':
- minimum, maximum = 0, 1
- elif key == 'usecreatefast':
- minimum, maximum = 0, 1
- elif key == 'UseNTorHandshake':
- minimum, maximum = 0, 1
- elif key == 'FastFlagMinThreshold':
- minimum = 4
- elif key == 'NumDirectoryGuards':
- minimum, maximum = 0, 10
- elif key == 'NumEntryGuards':
- minimum, maximum = 1, 10
- elif key == 'GuardLifetime':
- minimum, maximum = 2592000, 157766400 # min: 30 days, max: 1826 days
- elif key == 'NumNTorsPerTAP':
- minimum, maximum = 1, 100000
- elif key == 'AllowNonearlyExtend':
- minimum, maximum = 0, 1
if value < minimum or value > maximum:
raise ValueError("'%s' value on the params line must be in the range of %i - %i, was %i" % (key, minimum, maximum, value))
@@ -1397,7 +1378,7 @@ class BridgeNetworkStatusDocument(NetworkStatusDocument):
router_iter = stem.descriptor.router_status_entry._parse_file(
document_file,
validate,
- entry_class = stem.descriptor.router_status_entry.RouterStatusEntryV2,
+ entry_class = RouterStatusEntryV2,
extra_args = (self,),
)
diff --git a/stem/descriptor/router_status_entry.py b/stem/descriptor/router_status_entry.py
index 292ec7e..e0421cd 100644
--- a/stem/descriptor/router_status_entry.py
+++ b/stem/descriptor/router_status_entry.py
@@ -403,41 +403,27 @@ class RouterStatusEntry(Descriptor):
entries = _get_descriptor_components(content, validate)
if validate:
- self._check_constraints(entries)
- self._parse(entries, validate)
- else:
- self._entries = entries
-
- def _check_constraints(self, entries):
- """
- Does a basic check that the entries conform to this descriptor type's
- constraints.
-
- :param dict entries: keyword => (value, pgp key) entries
-
- :raises: **ValueError** if an issue arises in validation
- """
+ for keyword in self._required_fields():
+ if keyword not in entries:
+ raise ValueError("%s must have a '%s' line:\n%s" % (self._name(True), keyword, str(self)))
- for keyword in self._required_fields():
- if keyword not in entries:
- raise ValueError("%s must have a '%s' line:\n%s" % (self._name(True), keyword, str(self)))
+ for keyword in self._single_fields():
+ if keyword in entries and len(entries[keyword]) > 1:
+ raise ValueError("%s can only have a single '%s' line, got %i:\n%s" % (self._name(True), keyword, len(entries[keyword]), str(self)))
- for keyword in self._single_fields():
- if keyword in entries and len(entries[keyword]) > 1:
- raise ValueError("%s can only have a single '%s' line, got %i:\n%s" % (self._name(True), keyword, len(entries[keyword]), str(self)))
+ if 'r' != list(entries.keys())[0]:
+ raise ValueError("%s are expected to start with a 'r' line:\n%s" % (self._name(True), str(self)))
- if 'r' != list(entries.keys())[0]:
- raise ValueError("%s are expected to start with a 'r' line:\n%s" % (self._name(True), str(self)))
+ self._parse(entries, validate)
+ else:
+ self._entries = entries
def _name(self, is_plural = False):
"""
Name for this descriptor type.
"""
- if is_plural:
- return 'Router status entries'
- else:
- return 'Router status entry'
+ return 'Router status entries' if is_plural else 'Router status entry'
def _required_fields(self):
"""
@@ -485,10 +471,7 @@ class RouterStatusEntryV2(RouterStatusEntry):
})
def _name(self, is_plural = False):
- if is_plural:
- return 'Router status entries (v2)'
- else:
- return 'Router status entry (v2)'
+ return 'Router status entries (v2)' if is_plural else 'Router status entry (v2)'
def _required_fields(self):
return ('r')
@@ -561,10 +544,7 @@ class RouterStatusEntryV3(RouterStatusEntry):
})
def _name(self, is_plural = False):
- if is_plural:
- return 'Router status entries (v3)'
- else:
- return 'Router status entry (v3)'
+ return 'Router status entries (v3)' if is_plural else 'Router status entry (v3)'
def _required_fields(self):
return ('r', 's')
@@ -621,10 +601,7 @@ class RouterStatusEntryMicroV3(RouterStatusEntry):
})
def _name(self, is_plural = False):
- if is_plural:
- return 'Router status entries (micro v3)'
- else:
- return 'Router status entry (micro v3)'
+ return 'Router status entries (micro v3)' if is_plural else 'Router status entry (micro v3)'
def _required_fields(self):
return ('r', 's', 'm')
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index b107b33..afcdac2 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -838,9 +838,6 @@ class BridgeDescriptor(ServerDescriptor):
'router-digest': _parse_router_digest_line,
})
- def __init__(self, raw_contents, validate = True, annotations = None):
- super(BridgeDescriptor, self).__init__(raw_contents, validate, annotations)
-
def digest(self):
return self._digest
1
0

25 Jan '15
commit eb69babe993d2b8c7a155428765aca251f569ba2
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sun Jan 11 15:44:59 2015 -0800
Lazy parsing for ServerDescriptor subclasses
Including ServerDescriptor's RelayDescriptor and BridgeDescriptor subclasses.
This is actually a tad nicer than what we orgininally had in that we no longer
need a _parse() function for subclasses. Rather, they simply specify the
additional fields they include.
---
stem/descriptor/server_descriptor.py | 261 ++++++++++++++++++----------------
1 file changed, 135 insertions(+), 126 deletions(-)
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index 7ecc9c3..944c6c0 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -396,73 +396,31 @@ def _parse_history_line(descriptor, entries, keyword):
return timestamp, interval, history_values
+def _parse_router_digest_line(descriptor, entries):
+ descriptor._digest = _value('router-digest', entries)
+
+ if not stem.util.tor_tools.is_hex_digits(descriptor._digest, 40):
+ raise ValueError('Router digest line had an invalid sha1 digest: router-digest %s' % descriptor._digest)
+
+
+def _key_block(entries, keyword, expected_block_type):
+ value, block_type, block_contents = entries[keyword][0]
+
+ if not block_contents or block_type != expected_block_type:
+ raise ValueError("'%s' should be followed by a %s block" % (keyword, expected_block_type))
+
+ return block_contents
+
+
_parse_ipv6_policy_line = lambda descriptor, entries: setattr(descriptor, 'exit_policy_v6', stem.exit_policy.MicroExitPolicy(_value('ipv6-policy', entries)))
_parse_allow_single_hop_exits_line = lambda descriptor, entries: setattr(descriptor, 'allow_single_hop_exits', True)
_parse_caches_extra_info_line = lambda descriptor, entries: setattr(descriptor, 'extra_info_cache', True)
_parse_family_line = lambda descriptor, entries: setattr(descriptor, 'family', set(_value('family', entries).split(' ')))
_parse_eventdns_line = lambda descriptor, entries: setattr(descriptor, 'eventdns', _value('eventdns', entries) == '1')
-
-
-SERVER_DESCRIPTOR_ATTRIBUTES = {
- 'nickname': (None, _parse_router_line),
- 'fingerprint': (None, _parse_fingerprint_line),
- 'published': (None, _parse_published_line),
-
- 'address': (None, _parse_router_line),
- 'or_port': (None, _parse_router_line),
- 'socks_port': (None, _parse_router_line),
- 'dir_port': (None, _parse_router_line),
-
- 'tor_version': (None, _parse_platform_line),
- 'operating_system': (None, _parse_platform_line),
- 'uptime': (None, _parse_uptime_line),
- 'exit_policy_v6': (DEFAULT_IPV6_EXIT_POLICY, _parse_ipv6_policy_line),
- 'family': (set(), _parse_family_line),
-
- 'average_bandwidth': (None, _parse_bandwidth_line),
- 'burst_bandwidth': (None, _parse_bandwidth_line),
- 'observed_bandwidth': (None, _parse_bandwidth_line),
-
- 'link_protocols': (None, _parse_protocols_line),
- 'circuit_protocols': (None, _parse_protocols_line),
- 'hibernating': (False, _parse_hibernating_line),
- 'allow_single_hop_exits': (False, _parse_allow_single_hop_exits_line),
- 'extra_info_cache': (False, _parse_caches_extra_info_line),
- 'extra_info_digest': (None, _parse_extrainfo_digest_line),
- 'hidden_service_dir': (None, _parse_hidden_service_dir_line),
- 'eventdns': (None, _parse_eventdns_line),
- 'or_addresses': ([], _parse_or_address_line),
-
- 'read_history_end': (None, _parse_read_history_line),
- 'read_history_interval': (None, _parse_read_history_line),
- 'read_history_values': (None, _parse_read_history_line),
-
- 'write_history_end': (None, _parse_write_history_line),
- 'write_history_interval': (None, _parse_write_history_line),
- 'write_history_values': (None, _parse_write_history_line),
-}
-
-
-PARSER_FOR_LINE = {
- 'router': _parse_router_line,
- 'bandwidth': _parse_bandwidth_line,
- 'platform': _parse_platform_line,
- 'published': _parse_published_line,
- 'fingerprint': _parse_fingerprint_line,
- 'hibernating': _parse_hibernating_line,
- 'extra-info-digest': _parse_extrainfo_digest_line,
- 'hidden-service-dir': _parse_hidden_service_dir_line,
- 'uptime': _parse_uptime_line,
- 'protocols': _parse_protocols_line,
- 'or-address': _parse_or_address_line,
- 'read-history': _parse_read_history_line,
- 'write-history': _parse_write_history_line,
- 'ipv6-policy': _parse_ipv6_policy_line,
- 'allow-single-hop-exits': _parse_allow_single_hop_exits_line,
- 'caches-extra-info': _parse_caches_extra_info_line,
- 'family': _parse_family_line,
- 'eventdns': _parse_eventdns_line,
-}
+_parse_onion_key_line = lambda descriptor, entries: setattr(descriptor, 'onion_key', _key_block(entries, 'onion-key', 'RSA PUBLIC KEY'))
+_parse_signing_key_line = lambda descriptor, entries: setattr(descriptor, 'signing_key', _key_block(entries, 'signing-key', 'RSA PUBLIC KEY'))
+_parse_router_signature_line = lambda descriptor, entries: setattr(descriptor, 'signature', _key_block(entries, 'router-signature', 'SIGNATURE'))
+_parse_ntor_onion_key_line = lambda descriptor, entries: setattr(descriptor, 'ntor_onion_key', _value('ntor-onion-key', entries))
class ServerDescriptor(Descriptor):
@@ -638,13 +596,13 @@ class ServerDescriptor(Descriptor):
# set defaults
- for attr in SERVER_DESCRIPTOR_ATTRIBUTES:
- setattr(self, attr, SERVER_DESCRIPTOR_ATTRIBUTES[attr][0])
+ for attr in self._attributes():
+ setattr(self, attr, self._attributes()[attr][0])
for keyword, values in list(entries.items()):
try:
- if keyword in PARSER_FOR_LINE:
- PARSER_FOR_LINE[keyword](self, entries)
+ if keyword in self._parser_for_line():
+ self._parser_for_line()[keyword](self, entries)
elif keyword == 'contact':
pass # parsed as a bytes field earlier
else:
@@ -710,11 +668,85 @@ class ServerDescriptor(Descriptor):
def _last_keyword(self):
return 'router-signature'
+ @lru_cache()
+ def _attributes(self):
+ """
+ Provides a mapping of attributes we should have...
+
+ attrubute => (default_value, parsing_function)
+ """
+
+ return {
+ 'nickname': (None, _parse_router_line),
+ 'fingerprint': (None, _parse_fingerprint_line),
+ 'published': (None, _parse_published_line),
+
+ 'address': (None, _parse_router_line),
+ 'or_port': (None, _parse_router_line),
+ 'socks_port': (None, _parse_router_line),
+ 'dir_port': (None, _parse_router_line),
+
+ 'tor_version': (None, _parse_platform_line),
+ 'operating_system': (None, _parse_platform_line),
+ 'uptime': (None, _parse_uptime_line),
+ 'exit_policy_v6': (DEFAULT_IPV6_EXIT_POLICY, _parse_ipv6_policy_line),
+ 'family': (set(), _parse_family_line),
+
+ 'average_bandwidth': (None, _parse_bandwidth_line),
+ 'burst_bandwidth': (None, _parse_bandwidth_line),
+ 'observed_bandwidth': (None, _parse_bandwidth_line),
+
+ 'link_protocols': (None, _parse_protocols_line),
+ 'circuit_protocols': (None, _parse_protocols_line),
+ 'hibernating': (False, _parse_hibernating_line),
+ 'allow_single_hop_exits': (False, _parse_allow_single_hop_exits_line),
+ 'extra_info_cache': (False, _parse_caches_extra_info_line),
+ 'extra_info_digest': (None, _parse_extrainfo_digest_line),
+ 'hidden_service_dir': (None, _parse_hidden_service_dir_line),
+ 'eventdns': (None, _parse_eventdns_line),
+ 'or_addresses': ([], _parse_or_address_line),
+
+ 'read_history_end': (None, _parse_read_history_line),
+ 'read_history_interval': (None, _parse_read_history_line),
+ 'read_history_values': (None, _parse_read_history_line),
+
+ 'write_history_end': (None, _parse_write_history_line),
+ 'write_history_interval': (None, _parse_write_history_line),
+ 'write_history_values': (None, _parse_write_history_line),
+ }
+
+ @lru_cache()
+ def _parser_for_line(self):
+ """
+ Provides the parsing function for the line with a given keyword.
+ """
+
+ return {
+ 'router': _parse_router_line,
+ 'bandwidth': _parse_bandwidth_line,
+ 'platform': _parse_platform_line,
+ 'published': _parse_published_line,
+ 'fingerprint': _parse_fingerprint_line,
+ 'hibernating': _parse_hibernating_line,
+ 'extra-info-digest': _parse_extrainfo_digest_line,
+ 'hidden-service-dir': _parse_hidden_service_dir_line,
+ 'uptime': _parse_uptime_line,
+ 'protocols': _parse_protocols_line,
+ 'or-address': _parse_or_address_line,
+ 'read-history': _parse_read_history_line,
+ 'write-history': _parse_write_history_line,
+ 'ipv6-policy': _parse_ipv6_policy_line,
+ 'allow-single-hop-exits': _parse_allow_single_hop_exits_line,
+ 'caches-extra-info': _parse_caches_extra_info_line,
+ 'family': _parse_family_line,
+ 'eventdns': _parse_eventdns_line,
+ }
+
def __getattr__(self, name):
# If attribute isn't already present we might be lazy loading it...
- if self._lazy_loading and name in SERVER_DESCRIPTOR_ATTRIBUTES:
- default, parsing_function = SERVER_DESCRIPTOR_ATTRIBUTES[name]
+ if self._lazy_loading and name in self._attributes():
+ default, parsing_function = self._attributes()[name]
try:
if parsing_function:
@@ -727,7 +759,11 @@ class ServerDescriptor(Descriptor):
del self._exit_policy_list
except (ValueError, KeyError):
- setattr(self, name, default)
+ try:
+ # despite having a validation failure check to see if we set something
+ return super(ServerDescriptor, self).__getattribute__(name)
+ except AttributeError:
+ setattr(self, name, default)
return super(ServerDescriptor, self).__getattribute__(name)
@@ -746,11 +782,6 @@ class RelayDescriptor(ServerDescriptor):
"""
def __init__(self, raw_contents, validate = True, annotations = None):
- self.onion_key = None
- self.ntor_onion_key = None
- self.signing_key = None
- self.signature = None
-
super(RelayDescriptor, self).__init__(raw_contents, validate, annotations)
# validate the descriptor if required
@@ -874,45 +905,30 @@ class RelayDescriptor(ServerDescriptor):
if digest != local_digest:
raise ValueError('Decrypted digest does not match local digest (calculated: %s, local: %s)' % (digest, local_digest))
- def _parse(self, entries, validate):
- entries = dict(entries) # shallow copy since we're destructive
-
- # handles fields only in server descriptors
-
- for keyword, values in list(entries.items()):
- value, block_type, block_contents = values[0]
- line = '%s %s' % (keyword, value)
-
- if keyword == 'onion-key':
- if validate and (not block_contents or block_type != 'RSA PUBLIC KEY'):
- raise ValueError("'onion-key' should be followed by a RSA PUBLIC KEY block: %s" % line)
-
- self.onion_key = block_contents
- del entries['onion-key']
- elif keyword == 'ntor-onion-key':
- self.ntor_onion_key = value
- del entries['ntor-onion-key']
- elif keyword == 'signing-key':
- if validate and (not block_contents or block_type != 'RSA PUBLIC KEY'):
- raise ValueError("'signing-key' should be followed by a RSA PUBLIC KEY block: %s" % line)
-
- self.signing_key = block_contents
- del entries['signing-key']
- elif keyword == 'router-signature':
- if validate and (not block_contents or block_type != 'SIGNATURE'):
- raise ValueError("'router-signature' should be followed by a SIGNATURE block: %s" % line)
-
- self.signature = block_contents
- del entries['router-signature']
-
- ServerDescriptor._parse(self, entries, validate)
-
def _compare(self, other, method):
if not isinstance(other, RelayDescriptor):
return False
return method(str(self).strip(), str(other).strip())
+ @lru_cache()
+ def _attributes(self):
+ return dict(super(RelayDescriptor, self)._attributes(), **{
+ 'onion_key': (None, _parse_onion_key_line),
+ 'ntor_onion_key': (None, _parse_ntor_onion_key_line),
+ 'signing_key': (None, _parse_signing_key_line),
+ 'signature': (None, _parse_router_signature_line),
+ })
+
+ @lru_cache()
+ def _parser_for_line(self):
+ return dict(super(RelayDescriptor, self)._parser_for_line(), **{
+ 'onion-key': _parse_onion_key_line,
+ 'ntor-onion-key': _parse_ntor_onion_key_line,
+ 'signing-key': _parse_signing_key_line,
+ 'router-signature': _parse_router_signature_line,
+ })
+
def __hash__(self):
return hash(str(self).strip())
@@ -947,30 +963,11 @@ class BridgeDescriptor(ServerDescriptor):
"""
def __init__(self, raw_contents, validate = True, annotations = None):
- self._digest = None
-
super(BridgeDescriptor, self).__init__(raw_contents, validate, annotations)
def digest(self):
return self._digest
- def _parse(self, entries, validate):
- entries = dict(entries)
-
- # handles fields only in bridge descriptors
- for keyword, values in list(entries.items()):
- value, block_type, block_contents = values[0]
- line = '%s %s' % (keyword, value)
-
- if keyword == 'router-digest':
- if validate and not stem.util.tor_tools.is_hex_digits(value, 40):
- raise ValueError('Router digest line had an invalid sha1 digest: %s' % line)
-
- self._digest = stem.util.str_tools._to_unicode(value)
- del entries['router-digest']
-
- ServerDescriptor._parse(self, entries, validate)
-
def is_scrubbed(self):
"""
Checks if we've been properly scrubbed in accordance with the `bridge
@@ -1040,6 +1037,18 @@ class BridgeDescriptor(ServerDescriptor):
def _last_keyword(self):
return None
+ @lru_cache()
+ def _attributes(self):
+ return dict(super(BridgeDescriptor, self)._attributes(), **{
+ '_digest': (None, _parse_router_digest_line),
+ })
+
+ @lru_cache()
+ def _parser_for_line(self):
+ return dict(super(BridgeDescriptor, self)._parser_for_line(), **{
+ 'router-digest': _parse_router_digest_line,
+ })
+
def _compare(self, other, method):
if not isinstance(other, BridgeDescriptor):
return False
1
0

25 Jan '15
commit b717bef5bb2a0d8a37fb154df28501af574c04df
Author: Damian Johnson <atagar(a)torproject.org>
Date: Wed Jan 14 10:40:27 2015 -0800
Move extrainfo descriptor parsing to helpers
Intermediate point so we can do the same pattern of lazy loading I did for
server descriptors.
---
stem/descriptor/extrainfo_descriptor.py | 772 ++++++++++++++------------
test/unit/descriptor/extrainfo_descriptor.py | 28 +-
2 files changed, 431 insertions(+), 369 deletions(-)
diff --git a/stem/descriptor/extrainfo_descriptor.py b/stem/descriptor/extrainfo_descriptor.py
index d6e6102..408cc92 100644
--- a/stem/descriptor/extrainfo_descriptor.py
+++ b/stem/descriptor/extrainfo_descriptor.py
@@ -69,6 +69,7 @@ Extra-info descriptors are available from a few sources...
===================== ===========
"""
+import functools
import hashlib
import re
@@ -225,6 +226,349 @@ def _parse_timestamp_and_interval(keyword, content):
raise ValueError("%s line's timestamp wasn't parsable: %s" % (keyword, line))
+def _value(line, entries):
+ return entries[line][0][0]
+
+
+def _values(line, entries):
+ return [entry[0] for entry in entries[line]]
+
+
+def _parse_extra_info_line(descriptor, entries):
+ # "extra-info" Nickname Fingerprint
+
+ value = _value('extra-info', entries)
+ extra_info_comp = value.split()
+
+ if len(extra_info_comp) < 2:
+ raise ValueError('Extra-info line must have two values: extra-info %s' % value)
+ elif not stem.util.tor_tools.is_valid_nickname(extra_info_comp[0]):
+ raise ValueError("Extra-info line entry isn't a valid nickname: %s" % extra_info_comp[0])
+ elif not stem.util.tor_tools.is_valid_fingerprint(extra_info_comp[1]):
+ raise ValueError('Tor relay fingerprints consist of forty hex digits: %s' % extra_info_comp[1])
+
+ descriptor.nickname = extra_info_comp[0]
+ descriptor.fingerprint = extra_info_comp[1]
+
+
+def _parse_geoip_db_digest_line(descriptor, entries):
+ # "geoip-db-digest" Digest
+
+ value = _value('geoip-db-digest', entries)
+
+ if not stem.util.tor_tools.is_hex_digits(value, 40):
+ raise ValueError('Geoip digest line had an invalid sha1 digest: geoip-db-digest %s' % value)
+
+ descriptor.geoip_db_digest = value
+
+
+def _parse_geoip6_db_digest_line(descriptor, entries):
+ # "geoip6-db-digest" Digest
+
+ value = _value('geoip6-db-digest', entries)
+
+ if not stem.util.tor_tools.is_hex_digits(value, 40):
+ raise ValueError('Geoip v6 digest line had an invalid sha1 digest: geoip6-db-digest %s' % value)
+
+ descriptor.geoip6_db_digest = value
+
+
+def _parse_transport_line(descriptor, entries):
+ # "transport" transportname address:port [arglist]
+ # Everything after the transportname is scrubbed in published bridge
+ # descriptors, so we'll never see it in practice.
+ #
+ # These entries really only make sense for bridges, but have been seen
+ # on non-bridges in the wild when the relay operator configured it this
+ # way.
+
+ for value in _values('transport', entries):
+ name, address, port, args = None, None, None, None
+
+ if ' ' not in value:
+ # scrubbed
+ name = value
+ else:
+ # not scrubbed
+ value_comp = value.split()
+
+ if len(value_comp) < 1:
+ raise ValueError('Transport line is missing its transport name: transport %s' % value)
+ elif len(value_comp) < 2:
+ raise ValueError('Transport line is missing its address:port value: transport %s' % value)
+ elif ':' not in value_comp[1]:
+ raise ValueError("Transport line's address:port entry is missing a colon: transport %s" % value)
+
+ name = value_comp[0]
+ address, port_str = value_comp[1].split(':', 1)
+
+ if not stem.util.connection.is_valid_ipv4_address(address) or \
+ stem.util.connection.is_valid_ipv6_address(address):
+ raise ValueError('Transport line has a malformed address: transport %s' % value)
+ elif not stem.util.connection.is_valid_port(port_str):
+ raise ValueError('Transport line has a malformed port: transport %s' % value)
+
+ port = int(port_str)
+ args = value_comp[2:] if len(value_comp) >= 3 else []
+
+ descriptor.transport[name] = (address, port, args)
+
+
+def _parse_cell_circuits_per_decline_line(descriptor, entries):
+ # "cell-circuits-per-decile" num
+
+ value = _value('cell-circuits-per-decile', entries)
+
+ if not value.isdigit():
+ raise ValueError('Non-numeric cell-circuits-per-decile value: %s' % value)
+ elif int(value) < 0:
+ raise ValueError('Negative cell-circuits-per-decile value: %s' % value)
+
+ descriptor.cell_circuits_per_decile = int(value)
+
+
+def _parse_dirreq_line(keyword, recognized_counts_attr, unrecognized_counts_attr, descriptor, entries):
+ value = _value(keyword, entries)
+
+ recognized_counts = {}
+ unrecognized_counts = {}
+
+ is_response_stats = keyword in ('dirreq-v2-resp', 'dirreq-v3-resp')
+ key_set = DirResponse if is_response_stats else DirStat
+
+ key_type = 'STATUS' if is_response_stats else 'STAT'
+ error_msg = '%s lines should contain %s=COUNT mappings: %s %s' % (keyword, key_type, keyword, value)
+
+ if value:
+ for entry in value.split(','):
+ if '=' not in entry:
+ raise ValueError(error_msg)
+
+ status, count = entry.split('=', 1)
+
+ if count.isdigit():
+ if status in key_set:
+ recognized_counts[status] = int(count)
+ else:
+ unrecognized_counts[status] = int(count)
+ else:
+ raise ValueError(error_msg)
+
+ setattr(descriptor, recognized_counts_attr, recognized_counts)
+ setattr(descriptor, unrecognized_counts_attr, unrecognized_counts)
+
+
+def _parse_dirreq_share_line(keyword, attribute, descriptor, entries):
+ value = _value(keyword, entries)
+
+ if not value.endswith('%'):
+ raise ValueError('%s lines should be a percentage: %s %s' % (keyword, keyword, value))
+ elif float(value[:-1]) < 0:
+ raise ValueError('Negative percentage value: %s %s' % (keyword, value))
+
+ # bug means it might be above 100%: https://lists.torproject.org/pipermail/tor-dev/2012-June/003679.html
+
+ setattr(descriptor, attribute, float(value[:-1]) / 100)
+
+
+def _parse_cell_line(keyword, attribute, descriptor, entries):
+ # "<keyword>" num,...,num
+
+ value = _value(keyword, entries)
+ entries, exc = [], None
+
+ if value:
+ for entry in value.split(','):
+ try:
+ # Values should be positive but as discussed in ticket #5849
+ # there was a bug around this. It was fixed in tor 0.2.2.1.
+
+ entries.append(float(entry))
+ except ValueError:
+ exc = ValueError('Non-numeric entry in %s listing: %s %s' % (keyword, keyword, value))
+
+ setattr(descriptor, attribute, entries)
+
+ if exc:
+ raise exc
+
+
+def _parse_timestamp_line(keyword, attribute, descriptor, entries):
+ # "<keyword>" YYYY-MM-DD HH:MM:SS
+
+ value = _value(keyword, entries)
+
+ try:
+ setattr(descriptor, attribute, stem.util.str_tools._parse_timestamp(value))
+ except ValueError:
+ raise ValueError("Timestamp on %s line wasn't parsable: %s %s" % (keyword, keyword, value))
+
+
+def _parse_timestamp_and_interval_line(keyword, end_attribute, interval_attribute, descriptor, entries):
+ # "<keyword>" YYYY-MM-DD HH:MM:SS (NSEC s)
+
+ timestamp, interval, _ = _parse_timestamp_and_interval(keyword, _value(keyword, entries))
+ setattr(descriptor, end_attribute, timestamp)
+ setattr(descriptor, interval_attribute, interval)
+
+
+def _parse_conn_bi_direct_line(descriptor, entries):
+ # "conn-bi-direct" YYYY-MM-DD HH:MM:SS (NSEC s) BELOW,READ,WRITE,BOTH
+
+ value = _value('conn-bi-direct', entries)
+ timestamp, interval, remainder = _parse_timestamp_and_interval('conn-bi-direct', value)
+ stats = remainder.split(',')
+
+ if len(stats) != 4 or not (stats[0].isdigit() and stats[1].isdigit() and stats[2].isdigit() and stats[3].isdigit()):
+ raise ValueError('conn-bi-direct line should end with four numeric values: conn-bi-direct %s' % value)
+
+ descriptor.conn_bi_direct_end = timestamp
+ descriptor.conn_bi_direct_interval = interval
+ descriptor.conn_bi_direct_below = int(stats[0])
+ descriptor.conn_bi_direct_read = int(stats[1])
+ descriptor.conn_bi_direct_write = int(stats[2])
+ descriptor.conn_bi_direct_both = int(stats[3])
+
+
+def _parse_history_line(keyword, end_attribute, interval_attribute, values_attribute, descriptor, entries):
+ # "<keyword>" YYYY-MM-DD HH:MM:SS (NSEC s) NUM,NUM,NUM,NUM,NUM...
+
+ value = _value(keyword, entries)
+ timestamp, interval, remainder = _parse_timestamp_and_interval(keyword, value)
+ history_values = []
+
+ if remainder:
+ try:
+ history_values = [int(entry) for entry in remainder.split(',')]
+ except ValueError:
+ raise ValueError('%s line has non-numeric values: %s %s' % (keyword, keyword, value))
+
+ setattr(descriptor, end_attribute, timestamp)
+ setattr(descriptor, interval_attribute, interval)
+ setattr(descriptor, values_attribute, history_values)
+
+
+def _parse_port_count_line(keyword, attribute, descriptor, entries):
+ # "<keyword>" port=N,port=N,...
+
+ value, port_mappings = _value(keyword, entries), {}
+ error_msg = 'Entries in %s line should only be PORT=N entries: %s %s' % (keyword, keyword, value)
+
+ if value:
+ for entry in value.split(','):
+ if '=' not in entry:
+ raise ValueError(error_msg)
+
+ port, stat = entry.split('=', 1)
+
+ if (port == 'other' or stem.util.connection.is_valid_port(port)) and stat.isdigit():
+ if port != 'other':
+ port = int(port)
+
+ port_mappings[port] = int(stat)
+ else:
+ raise ValueError(error_msg)
+
+ setattr(descriptor, attribute, port_mappings)
+
+
+def _parse_geoip_to_count_line(keyword, attribute, descriptor, entries):
+ # "<keyword>" CC=N,CC=N,...
+ #
+ # The maxmind geoip (https://www.maxmind.com/app/iso3166) has numeric
+ # locale codes for some special values, for instance...
+ # A1,"Anonymous Proxy"
+ # A2,"Satellite Provider"
+ # ??,"Unknown"
+
+ value, locale_usage = _value(keyword, entries), {}
+ error_msg = 'Entries in %s line should only be CC=N entries: %s %s' % (keyword, keyword, value)
+
+ if value:
+ for entry in value.split(','):
+ if '=' not in entry:
+ raise ValueError(error_msg)
+
+ locale, count = entry.split('=', 1)
+
+ if _locale_re.match(locale) and count.isdigit():
+ locale_usage[locale] = int(count)
+ else:
+ raise ValueError(error_msg)
+
+ setattr(descriptor, attribute, locale_usage)
+
+
+def _parse_bridge_ip_versions_line(descriptor, entries):
+ value, ip_versions = _value('bridge-ip-versions', entries), {}
+
+ if value:
+ for entry in value.split(','):
+ if '=' not in entry:
+ raise stem.ProtocolError("The bridge-ip-versions should be a comma separated listing of '<protocol>=<count>' mappings: bridge-ip-versions %s" % value)
+
+ protocol, count = entry.split('=', 1)
+
+ if not count.isdigit():
+ raise stem.ProtocolError('IP protocol count was non-numeric (%s): bridge-ip-versions %s' % (count, value))
+
+ ip_versions[protocol] = int(count)
+
+ descriptor.ip_versions = ip_versions
+
+
+def _parse_bridge_ip_transports_line(descriptor, entries):
+ value, ip_transports = _value('bridge-ip-transports', entries), {}
+
+ if value:
+ for entry in value.split(','):
+ if '=' not in entry:
+ raise stem.ProtocolError("The bridge-ip-transports should be a comma separated listing of '<protocol>=<count>' mappings: bridge-ip-transports %s" % value)
+
+ protocol, count = entry.split('=', 1)
+
+ if not count.isdigit():
+ raise stem.ProtocolError('Transport count was non-numeric (%s): bridge-ip-transports %s' % (count, value))
+
+ ip_transports[protocol] = int(count)
+
+ descriptor.ip_transports = ip_transports
+
+
+_parse_dirreq_v2_resp_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-resp', 'dir_v2_responses', 'dir_v2_responses_unknown')
+_parse_dirreq_v3_resp_line = functools.partial(_parse_dirreq_line, 'dirreq-v3-resp', 'dir_v3_responses', 'dir_v3_responses_unknown')
+_parse_dirreq_v2_direct_dl_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-direct-dl', 'dir_v2_direct_dl', 'dir_v2_direct_dl_unknown')
+_parse_dirreq_v3_direct_dl_line = functools.partial(_parse_dirreq_line, 'dirreq-v3-direct-dl', 'dir_v3_direct_dl', 'dir_v3_direct_dl_unknown')
+_parse_dirreq_v2_tunneled_dl_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-tunneled-dl', 'dir_v2_tunneled_dl', 'dir_v2_tunneled_dl_unknown')
+_parse_dirreq_v3_tunneled_dl_line = functools.partial(_parse_dirreq_line, 'dirreq-v3-tunneled-dl', 'dir_v3_tunneled_dl', 'dir_v3_tunneled_dl_unknown')
+_parse_dirreq_v2_share_line = functools.partial(_parse_dirreq_share_line, 'dirreq-v2-share', 'dir_v2_share')
+_parse_dirreq_v3_share_line = functools.partial(_parse_dirreq_share_line, 'dirreq-v3-share', 'dir_v3_share')
+_parse_cell_processed_cells_line = functools.partial(_parse_cell_line, 'cell-processed-cells', 'cell_processed_cells')
+_parse_cell_queued_cells_line = functools.partial(_parse_cell_line, 'cell-queued-cells', 'cell_queued_cells')
+_parse_cell_time_in_queue_line = functools.partial(_parse_cell_line, 'cell-time-in-queue', 'cell_time_in_queue')
+_parse_published_line = functools.partial(_parse_timestamp_line, 'published', 'published')
+_parse_geoip_start_time_line = functools.partial(_parse_timestamp_line, 'geoip-start-time', 'geoip_start_time')
+_parse_cell_stats_end_line = functools.partial(_parse_timestamp_and_interval_line, 'cell-stats-end', 'cell_stats_end', 'cell_stats_interval')
+_parse_entry_stats_end_line = functools.partial(_parse_timestamp_and_interval_line, 'entry-stats-end', 'entry_stats_end', 'entry_stats_interval')
+_parse_exit_stats_end_line = functools.partial(_parse_timestamp_and_interval_line, 'exit-stats-end', 'exit_stats_end', 'exit_stats_interval')
+_parse_bridge_stats_end_line = functools.partial(_parse_timestamp_and_interval_line, 'bridge-stats-end', 'bridge_stats_end', 'bridge_stats_interval')
+_parse_dirreq_stats_end_line = functools.partial(_parse_timestamp_and_interval_line, 'dirreq-stats-end', 'dir_stats_end', 'dir_stats_interval')
+_parse_read_history_line = functools.partial(_parse_history_line, 'read-history', 'read_history_end', 'read_history_interval', 'read_history_values')
+_parse_write_history_line = functools.partial(_parse_history_line, 'write-history', 'write_history_end', 'write_history_interval', 'write_history_values')
+_parse_dirreq_read_history_line = functools.partial(_parse_history_line, 'dirreq-read-history', 'dir_read_history_end', 'dir_read_history_interval', 'dir_read_history_values')
+_parse_dirreq_write_history_line = functools.partial(_parse_history_line, 'dirreq-write-history', 'dir_write_history_end', 'dir_write_history_interval', 'dir_write_history_values')
+_parse_exit_kibibytes_written_line = functools.partial(_parse_port_count_line, 'exit-kibibytes-written', 'exit_kibibytes_written')
+_parse_exit_kibibytes_read_line = functools.partial(_parse_port_count_line, 'exit-kibibytes-read', 'exit_kibibytes_read')
+_parse_exit_streams_opened_line = functools.partial(_parse_port_count_line, 'exit-streams-opened', 'exit_streams_opened')
+_parse_dirreq_v2_ips_line = functools.partial(_parse_geoip_to_count_line, 'dirreq-v2-ips', 'dir_v2_ips')
+_parse_dirreq_v3_ips_line = functools.partial(_parse_geoip_to_count_line, 'dirreq-v3-ips', 'dir_v3_ips')
+_parse_dirreq_v2_reqs_line = functools.partial(_parse_geoip_to_count_line, 'dirreq-v2-reqs', 'dir_v2_requests')
+_parse_dirreq_v3_reqs_line = functools.partial(_parse_geoip_to_count_line, 'dirreq-v3-reqs', 'dir_v3_requests')
+_parse_geoip_client_origins_line = functools.partial(_parse_geoip_to_count_line, 'geoip-client-origins', 'geoip_client_origins')
+_parse_entry_ips_line = functools.partial(_parse_geoip_to_count_line, 'entry-ips', 'entry_ips')
+_parse_bridge_ips_line = functools.partial(_parse_geoip_to_count_line, 'bridge-ips', 'bridge_ips')
+
+
class ExtraInfoDescriptor(Descriptor):
"""
Extra-info descriptor document.
@@ -465,376 +809,94 @@ class ExtraInfoDescriptor(Descriptor):
value, _, _ = values[0]
line = '%s %s' % (keyword, value) # original line
- if keyword == 'extra-info':
- # "extra-info" Nickname Fingerprint
- extra_info_comp = value.split()
-
- if len(extra_info_comp) < 2:
- if not validate:
- continue
-
- raise ValueError('Extra-info line must have two values: %s' % line)
-
- if validate:
- if not stem.util.tor_tools.is_valid_nickname(extra_info_comp[0]):
- raise ValueError("Extra-info line entry isn't a valid nickname: %s" % extra_info_comp[0])
- elif not stem.util.tor_tools.is_valid_fingerprint(extra_info_comp[1]):
- raise ValueError('Tor relay fingerprints consist of forty hex digits: %s' % extra_info_comp[1])
-
- self.nickname = extra_info_comp[0]
- self.fingerprint = extra_info_comp[1]
- elif keyword == 'geoip-db-digest':
- # "geoip-db-digest" Digest
-
- if validate and not stem.util.tor_tools.is_hex_digits(value, 40):
- raise ValueError('Geoip digest line had an invalid sha1 digest: %s' % line)
-
- self.geoip_db_digest = value
- elif keyword == 'geoip6-db-digest':
- # "geoip6-db-digest" Digest
-
- if validate and not stem.util.tor_tools.is_hex_digits(value, 40):
- raise ValueError('Geoip v6 digest line had an invalid sha1 digest: %s' % line)
-
- self.geoip6_db_digest = value
- elif keyword == 'transport':
- # "transport" transportname address:port [arglist]
- # Everything after the transportname is scrubbed in published bridge
- # descriptors, so we'll never see it in practice.
- #
- # These entries really only make sense for bridges, but have been seen
- # on non-bridges in the wild when the relay operator configured it this
- # way.
-
- for transport_value, _, _ in values:
- name, address, port, args = None, None, None, None
-
- if ' ' not in transport_value:
- # scrubbed
- name = transport_value
- else:
- # not scrubbed
- value_comp = transport_value.split()
-
- if len(value_comp) < 1:
- raise ValueError('Transport line is missing its transport name: %s' % line)
- else:
- name = value_comp[0]
-
- if len(value_comp) < 2:
- raise ValueError('Transport line is missing its address:port value: %s' % line)
- elif ':' not in value_comp[1]:
- raise ValueError("Transport line's address:port entry is missing a colon: %s" % line)
- else:
- address, port_str = value_comp[1].split(':', 1)
-
- if not stem.util.connection.is_valid_ipv4_address(address) or \
- stem.util.connection.is_valid_ipv6_address(address):
- raise ValueError('Transport line has a malformed address: %s' % line)
- elif not stem.util.connection.is_valid_port(port_str):
- raise ValueError('Transport line has a malformed port: %s' % line)
-
- port = int(port_str)
-
- if len(value_comp) >= 3:
- args = value_comp[2:]
- else:
- args = []
-
- self.transport[name] = (address, port, args)
- elif keyword == 'cell-circuits-per-decile':
- # "cell-circuits-per-decile" num
-
- if not value.isdigit():
- if validate:
- raise ValueError('Non-numeric cell-circuits-per-decile value: %s' % line)
- else:
- continue
-
- stat = int(value)
-
- if validate and stat < 0:
- raise ValueError('Negative cell-circuits-per-decile value: %s' % line)
-
- self.cell_circuits_per_decile = stat
- elif keyword in ('dirreq-v2-resp', 'dirreq-v3-resp', 'dirreq-v2-direct-dl', 'dirreq-v3-direct-dl', 'dirreq-v2-tunneled-dl', 'dirreq-v3-tunneled-dl'):
- recognized_counts = {}
- unrecognized_counts = {}
-
- is_response_stats = keyword in ('dirreq-v2-resp', 'dirreq-v3-resp')
- key_set = DirResponse if is_response_stats else DirStat
-
- key_type = 'STATUS' if is_response_stats else 'STAT'
- error_msg = '%s lines should contain %s=COUNT mappings: %s' % (keyword, key_type, line)
-
- if value:
- for entry in value.split(','):
- if '=' not in entry:
- if validate:
- raise ValueError(error_msg)
- else:
- continue
-
- status, count = entry.split('=', 1)
-
- if count.isdigit():
- if status in key_set:
- recognized_counts[status] = int(count)
- else:
- unrecognized_counts[status] = int(count)
- elif validate:
- raise ValueError(error_msg)
-
- if keyword == 'dirreq-v2-resp':
- self.dir_v2_responses = recognized_counts
- self.dir_v2_responses_unknown = unrecognized_counts
+ try:
+ if keyword == 'extra-info':
+ _parse_extra_info_line(self, entries)
+ elif keyword == 'geoip-db-digest':
+ _parse_geoip_db_digest_line(self, entries)
+ elif keyword == 'geoip6-db-digest':
+ _parse_geoip6_db_digest_line(self, entries)
+ elif keyword == 'transport':
+ _parse_transport_line(self, entries)
+ elif keyword == 'cell-circuits-per-decile':
+ _parse_cell_circuits_per_decline_line(self, entries)
+ elif keyword == 'dirreq-v2-resp':
+ _parse_dirreq_v2_resp_line(self, entries)
elif keyword == 'dirreq-v3-resp':
- self.dir_v3_responses = recognized_counts
- self.dir_v3_responses_unknown = unrecognized_counts
+ _parse_dirreq_v3_resp_line(self, entries)
elif keyword == 'dirreq-v2-direct-dl':
- self.dir_v2_direct_dl = recognized_counts
- self.dir_v2_direct_dl_unknown = unrecognized_counts
+ _parse_dirreq_v2_direct_dl_line(self, entries)
elif keyword == 'dirreq-v3-direct-dl':
- self.dir_v3_direct_dl = recognized_counts
- self.dir_v3_direct_dl_unknown = unrecognized_counts
+ _parse_dirreq_v3_direct_dl_line(self, entries)
elif keyword == 'dirreq-v2-tunneled-dl':
- self.dir_v2_tunneled_dl = recognized_counts
- self.dir_v2_tunneled_dl_unknown = unrecognized_counts
+ _parse_dirreq_v2_tunneled_dl_line(self, entries)
elif keyword == 'dirreq-v3-tunneled-dl':
- self.dir_v3_tunneled_dl = recognized_counts
- self.dir_v3_tunneled_dl_unknown = unrecognized_counts
- elif keyword in ('dirreq-v2-share', 'dirreq-v3-share'):
- # "<keyword>" num%
-
- try:
- if not value.endswith('%'):
- raise ValueError()
-
- percentage = float(value[:-1]) / 100
-
- # Bug lets these be above 100%, however they're soon going away...
- # https://lists.torproject.org/pipermail/tor-dev/2012-June/003679.html
-
- if validate and percentage < 0:
- raise ValueError('Negative percentage value: %s' % line)
-
- if keyword == 'dirreq-v2-share':
- self.dir_v2_share = percentage
- elif keyword == 'dirreq-v3-share':
- self.dir_v3_share = percentage
- except ValueError as exc:
- if validate:
- raise ValueError("Value can't be parsed as a percentage: %s" % line)
- elif keyword in ('cell-processed-cells', 'cell-queued-cells', 'cell-time-in-queue'):
- # "<keyword>" num,...,num
-
- entries = []
-
- if value:
- for entry in value.split(','):
- try:
- # Values should be positive but as discussed in ticket #5849
- # there was a bug around this. It was fixed in tor 0.2.2.1.
-
- entries.append(float(entry))
- except ValueError:
- if validate:
- raise ValueError('Non-numeric entry in %s listing: %s' % (keyword, line))
-
- if keyword == 'cell-processed-cells':
- self.cell_processed_cells = entries
+ _parse_dirreq_v3_tunneled_dl_line(self, entries)
+ elif keyword == 'dirreq-v2-share':
+ _parse_dirreq_v2_share_line(self, entries)
+ elif keyword == 'dirreq-v3-share':
+ _parse_dirreq_v3_share_line(self, entries)
+ elif keyword == 'cell-processed-cells':
+ _parse_cell_processed_cells_line(self, entries)
elif keyword == 'cell-queued-cells':
- self.cell_queued_cells = entries
+ _parse_cell_queued_cells_line(self, entries)
elif keyword == 'cell-time-in-queue':
- self.cell_time_in_queue = entries
- elif keyword in ('published', 'geoip-start-time'):
- # "<keyword>" YYYY-MM-DD HH:MM:SS
-
- try:
- timestamp = stem.util.str_tools._parse_timestamp(value)
-
- if keyword == 'published':
- self.published = timestamp
- elif keyword == 'geoip-start-time':
- self.geoip_start_time = timestamp
- except ValueError:
- if validate:
- raise ValueError("Timestamp on %s line wasn't parsable: %s" % (keyword, line))
- elif keyword in ('cell-stats-end', 'entry-stats-end', 'exit-stats-end', 'bridge-stats-end', 'dirreq-stats-end'):
- # "<keyword>" YYYY-MM-DD HH:MM:SS (NSEC s)
-
- try:
- timestamp, interval, _ = _parse_timestamp_and_interval(keyword, value)
-
- if keyword == 'cell-stats-end':
- self.cell_stats_end = timestamp
- self.cell_stats_interval = interval
- elif keyword == 'entry-stats-end':
- self.entry_stats_end = timestamp
- self.entry_stats_interval = interval
- elif keyword == 'exit-stats-end':
- self.exit_stats_end = timestamp
- self.exit_stats_interval = interval
- elif keyword == 'bridge-stats-end':
- self.bridge_stats_end = timestamp
- self.bridge_stats_interval = interval
- elif keyword == 'dirreq-stats-end':
- self.dir_stats_end = timestamp
- self.dir_stats_interval = interval
- except ValueError as exc:
- if validate:
- raise exc
- elif keyword == 'conn-bi-direct':
- # "conn-bi-direct" YYYY-MM-DD HH:MM:SS (NSEC s) BELOW,READ,WRITE,BOTH
-
- try:
- timestamp, interval, remainder = _parse_timestamp_and_interval(keyword, value)
- stats = remainder.split(',')
-
- if len(stats) != 4 or not \
- (stats[0].isdigit() and stats[1].isdigit() and stats[2].isdigit() and stats[3].isdigit()):
- raise ValueError('conn-bi-direct line should end with four numeric values: %s' % line)
-
- self.conn_bi_direct_end = timestamp
- self.conn_bi_direct_interval = interval
- self.conn_bi_direct_below = int(stats[0])
- self.conn_bi_direct_read = int(stats[1])
- self.conn_bi_direct_write = int(stats[2])
- self.conn_bi_direct_both = int(stats[3])
- except ValueError as exc:
- if validate:
- raise exc
- elif keyword in ('read-history', 'write-history', 'dirreq-read-history', 'dirreq-write-history'):
- # "<keyword>" YYYY-MM-DD HH:MM:SS (NSEC s) NUM,NUM,NUM,NUM,NUM...
- try:
- timestamp, interval, remainder = _parse_timestamp_and_interval(keyword, value)
- history_values = []
-
- if remainder:
- try:
- history_values = [int(entry) for entry in remainder.split(",")]
- except ValueError:
- raise ValueError('%s line has non-numeric values: %s' % (keyword, line))
-
- if keyword == 'read-history':
- self.read_history_end = timestamp
- self.read_history_interval = interval
- self.read_history_values = history_values
- elif keyword == 'write-history':
- self.write_history_end = timestamp
- self.write_history_interval = interval
- self.write_history_values = history_values
- elif keyword == 'dirreq-read-history':
- self.dir_read_history_end = timestamp
- self.dir_read_history_interval = interval
- self.dir_read_history_values = history_values
- elif keyword == 'dirreq-write-history':
- self.dir_write_history_end = timestamp
- self.dir_write_history_interval = interval
- self.dir_write_history_values = history_values
- except ValueError as exc:
- if validate:
- raise exc
- elif keyword in ('exit-kibibytes-written', 'exit-kibibytes-read', 'exit-streams-opened'):
- # "<keyword>" port=N,port=N,...
-
- port_mappings = {}
- error_msg = 'Entries in %s line should only be PORT=N entries: %s' % (keyword, line)
-
- if value:
- for entry in value.split(','):
- if '=' not in entry:
- if validate:
- raise ValueError(error_msg)
- else:
- continue
-
- port, stat = entry.split('=', 1)
-
- if (port == 'other' or stem.util.connection.is_valid_port(port)) and stat.isdigit():
- if port != 'other':
- port = int(port)
- port_mappings[port] = int(stat)
- elif validate:
- raise ValueError(error_msg)
-
- if keyword == 'exit-kibibytes-written':
- self.exit_kibibytes_written = port_mappings
+ _parse_cell_time_in_queue_line(self, entries)
+ elif keyword == 'published':
+ _parse_published_line(self, entries)
+ elif keyword == 'geoip-start-time':
+ _parse_geoip_start_time_line(self, entries)
+ elif keyword == 'cell-stats-end':
+ _parse_cell_stats_end_line(self, entries)
+ elif keyword == 'entry-stats-end':
+ _parse_entry_stats_end_line(self, entries)
+ elif keyword == 'exit-stats-end':
+ _parse_exit_stats_end_line(self, entries)
+ elif keyword == 'bridge-stats-end':
+ _parse_bridge_stats_end_line(self, entries)
+ elif keyword == 'dirreq-stats-end':
+ _parse_dirreq_stats_end_line(self, entries)
+ elif keyword == 'conn-bi-direct':
+ _parse_conn_bi_direct_line(self, entries)
+ elif keyword == 'read-history':
+ _parse_read_history_line(self, entries)
+ elif keyword == 'write-history':
+ _parse_write_history_line(self, entries)
+ elif keyword == 'dirreq-read-history':
+ _parse_dirreq_read_history_line(self, entries)
+ elif keyword == 'dirreq-write-history':
+ _parse_dirreq_write_history_line(self, entries)
+ elif keyword == 'exit-kibibytes-written':
+ _parse_exit_kibibytes_written_line(self, entries)
elif keyword == 'exit-kibibytes-read':
- self.exit_kibibytes_read = port_mappings
+ _parse_exit_kibibytes_read_line(self, entries)
elif keyword == 'exit-streams-opened':
- self.exit_streams_opened = port_mappings
- elif keyword in ('dirreq-v2-ips', 'dirreq-v3-ips', 'dirreq-v2-reqs', 'dirreq-v3-reqs', 'geoip-client-origins', 'entry-ips', 'bridge-ips'):
- # "<keyword>" CC=N,CC=N,...
- #
- # The maxmind geoip (https://www.maxmind.com/app/iso3166) has numeric
- # locale codes for some special values, for instance...
- # A1,"Anonymous Proxy"
- # A2,"Satellite Provider"
- # ??,"Unknown"
-
- locale_usage = {}
- error_msg = 'Entries in %s line should only be CC=N entries: %s' % (keyword, line)
-
- if value:
- for entry in value.split(','):
- if '=' not in entry:
- if validate:
- raise ValueError(error_msg)
- else:
- continue
-
- locale, count = entry.split('=', 1)
-
- if _locale_re.match(locale) and count.isdigit():
- locale_usage[locale] = int(count)
- elif validate:
- raise ValueError(error_msg)
-
- if keyword == 'dirreq-v2-ips':
- self.dir_v2_ips = locale_usage
+ _parse_exit_streams_opened_line(self, entries)
+ elif keyword == 'dirreq-v2-ips':
+ _parse_dirreq_v2_ips_line(self, entries)
elif keyword == 'dirreq-v3-ips':
- self.dir_v3_ips = locale_usage
+ _parse_dirreq_v3_ips_line(self, entries)
elif keyword == 'dirreq-v2-reqs':
- self.dir_v2_requests = locale_usage
+ _parse_dirreq_v2_reqs_line(self, entries)
elif keyword == 'dirreq-v3-reqs':
- self.dir_v3_requests = locale_usage
+ _parse_dirreq_v3_reqs_line(self, entries)
elif keyword == 'geoip-client-origins':
- self.geoip_client_origins = locale_usage
+ _parse_geoip_client_origins_line(self, entries)
elif keyword == 'entry-ips':
- self.entry_ips = locale_usage
+ _parse_entry_ips_line(self, entries)
elif keyword == 'bridge-ips':
- self.bridge_ips = locale_usage
- elif keyword == 'bridge-ip-versions':
- self.ip_versions = {}
-
- if value:
- for entry in value.split(','):
- if '=' not in entry:
- raise stem.ProtocolError("The bridge-ip-versions should be a comma separated listing of '<protocol>=<count>' mappings: %s" % line)
-
- protocol, count = entry.split('=', 1)
-
- if not count.isdigit():
- raise stem.ProtocolError('IP protocol count was non-numeric (%s): %s' % (count, line))
-
- self.ip_versions[protocol] = int(count)
- elif keyword == 'bridge-ip-transports':
- self.ip_transports = {}
-
- if value:
- for entry in value.split(','):
- if '=' not in entry:
- raise stem.ProtocolError("The bridge-ip-transports should be a comma separated listing of '<protocol>=<count>' mappings: %s" % line)
-
- protocol, count = entry.split('=', 1)
-
- if not count.isdigit():
- raise stem.ProtocolError('Transport count was non-numeric (%s): %s' % (count, line))
-
- self.ip_transports[protocol] = int(count)
- else:
- self._unrecognized_lines.append(line)
+ _parse_bridge_ips_line(self, entries)
+ elif keyword == 'bridge-ip-versions':
+ _parse_bridge_ip_versions_line(self, entries)
+ elif keyword == 'bridge-ip-transports':
+ _parse_bridge_ip_transports_line(self, entries)
+ else:
+ self._unrecognized_lines.append(line)
+ except ValueError as exc:
+ if validate:
+ raise exc
+ else:
+ continue
def digest(self):
"""
diff --git a/test/unit/descriptor/extrainfo_descriptor.py b/test/unit/descriptor/extrainfo_descriptor.py
index 7e67019..525ff06 100644
--- a/test/unit/descriptor/extrainfo_descriptor.py
+++ b/test/unit/descriptor/extrainfo_descriptor.py
@@ -200,10 +200,10 @@ k0d2aofcVbHr4fPQOSST0LXDrhFl5Fqo5um296zpJGvRUeO6S44U/EfJAGShtqWw
for entry in test_entries:
desc_text = get_relay_extrainfo_descriptor({'geoip-db-digest': entry}, content = True)
- self._expect_invalid_attr(desc_text, 'geoip_db_digest', entry)
+ self._expect_invalid_attr(desc_text, 'geoip_db_digest')
desc_text = get_relay_extrainfo_descriptor({'geoip6-db-digest': entry}, content = True)
- self._expect_invalid_attr(desc_text, 'geoip6_db_digest', entry)
+ self._expect_invalid_attr(desc_text, 'geoip6_db_digest')
def test_cell_circuits_per_decile(self):
"""
@@ -257,8 +257,8 @@ k0d2aofcVbHr4fPQOSST0LXDrhFl5Fqo5um296zpJGvRUeO6S44U/EfJAGShtqWw
for entry in test_entries:
desc_text = get_relay_extrainfo_descriptor({keyword: entry}, content = True)
desc = self._expect_invalid_attr(desc_text)
- self.assertEqual({}, getattr(desc, attr))
- self.assertEqual({}, getattr(desc, unknown_attr))
+ self.assertEqual(None, getattr(desc, attr))
+ self.assertEqual(None, getattr(desc, unknown_attr))
def test_dir_stat_lines(self):
"""
@@ -299,8 +299,8 @@ k0d2aofcVbHr4fPQOSST0LXDrhFl5Fqo5um296zpJGvRUeO6S44U/EfJAGShtqWw
for entry in test_entries:
desc_text = get_relay_extrainfo_descriptor({keyword: entry}, content = True)
desc = self._expect_invalid_attr(desc_text)
- self.assertEqual({}, getattr(desc, attr))
- self.assertEqual({}, getattr(desc, unknown_attr))
+ self.assertEqual(None, getattr(desc, attr))
+ self.assertEqual(None, getattr(desc, unknown_attr))
def test_conn_bi_direct(self):
"""
@@ -360,15 +360,15 @@ k0d2aofcVbHr4fPQOSST0LXDrhFl5Fqo5um296zpJGvRUeO6S44U/EfJAGShtqWw
self.assertEqual(expected_value, getattr(desc, attr))
test_entries = (
- ('', None),
- (' ', None),
- ('100', None),
- ('-5%', -0.05),
+ (''),
+ (' '),
+ ('100'),
+ ('-5%'),
)
- for entry, expected in test_entries:
+ for entry in test_entries:
desc_text = get_relay_extrainfo_descriptor({keyword: entry}, content = True)
- self._expect_invalid_attr(desc_text, attr, expected)
+ self._expect_invalid_attr(desc_text, attr)
def test_number_list_lines(self):
"""
@@ -525,7 +525,7 @@ k0d2aofcVbHr4fPQOSST0LXDrhFl5Fqo5um296zpJGvRUeO6S44U/EfJAGShtqWw
for entry in test_entries:
desc_text = get_relay_extrainfo_descriptor({keyword: entry}, content = True)
- self._expect_invalid_attr(desc_text, attr, {})
+ self._expect_invalid_attr(desc_text, attr)
def test_locale_mapping_lines(self):
"""
@@ -554,7 +554,7 @@ k0d2aofcVbHr4fPQOSST0LXDrhFl5Fqo5um296zpJGvRUeO6S44U/EfJAGShtqWw
for entry in test_entries:
desc_text = get_relay_extrainfo_descriptor({keyword: entry}, content = True)
- self._expect_invalid_attr(desc_text, attr, {})
+ self._expect_invalid_attr(desc_text, attr)
def test_minimal_bridge_descriptor(self):
"""
1
0
commit d2093ec0d8ac1fce5550f22dc87afcb33adb2bb5
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sun Jan 11 14:22:00 2015 -0800
Move trivial parsers to lambdas
This lets us have parsing functions for all attributes, which simplies it a bit.
---
stem/descriptor/server_descriptor.py | 58 ++++++++++++++--------------------
1 file changed, 24 insertions(+), 34 deletions(-)
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index c2451bb..7ecc9c3 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -396,6 +396,13 @@ def _parse_history_line(descriptor, entries, keyword):
return timestamp, interval, history_values
+_parse_ipv6_policy_line = lambda descriptor, entries: setattr(descriptor, 'exit_policy_v6', stem.exit_policy.MicroExitPolicy(_value('ipv6-policy', entries)))
+_parse_allow_single_hop_exits_line = lambda descriptor, entries: setattr(descriptor, 'allow_single_hop_exits', True)
+_parse_caches_extra_info_line = lambda descriptor, entries: setattr(descriptor, 'extra_info_cache', True)
+_parse_family_line = lambda descriptor, entries: setattr(descriptor, 'family', set(_value('family', entries).split(' ')))
+_parse_eventdns_line = lambda descriptor, entries: setattr(descriptor, 'eventdns', _value('eventdns', entries) == '1')
+
+
SERVER_DESCRIPTOR_ATTRIBUTES = {
'nickname': (None, _parse_router_line),
'fingerprint': (None, _parse_fingerprint_line),
@@ -409,8 +416,8 @@ SERVER_DESCRIPTOR_ATTRIBUTES = {
'tor_version': (None, _parse_platform_line),
'operating_system': (None, _parse_platform_line),
'uptime': (None, _parse_uptime_line),
- 'exit_policy_v6': (DEFAULT_IPV6_EXIT_POLICY, None),
- 'family': (set(), None),
+ 'exit_policy_v6': (DEFAULT_IPV6_EXIT_POLICY, _parse_ipv6_policy_line),
+ 'family': (set(), _parse_family_line),
'average_bandwidth': (None, _parse_bandwidth_line),
'burst_bandwidth': (None, _parse_bandwidth_line),
@@ -419,11 +426,11 @@ SERVER_DESCRIPTOR_ATTRIBUTES = {
'link_protocols': (None, _parse_protocols_line),
'circuit_protocols': (None, _parse_protocols_line),
'hibernating': (False, _parse_hibernating_line),
- 'allow_single_hop_exits': (False, None),
- 'extra_info_cache': (False, None),
+ 'allow_single_hop_exits': (False, _parse_allow_single_hop_exits_line),
+ 'extra_info_cache': (False, _parse_caches_extra_info_line),
'extra_info_digest': (None, _parse_extrainfo_digest_line),
'hidden_service_dir': (None, _parse_hidden_service_dir_line),
- 'eventdns': (None, None),
+ 'eventdns': (None, _parse_eventdns_line),
'or_addresses': ([], _parse_or_address_line),
'read_history_end': (None, _parse_read_history_line),
@@ -450,6 +457,11 @@ PARSER_FOR_LINE = {
'or-address': _parse_or_address_line,
'read-history': _parse_read_history_line,
'write-history': _parse_write_history_line,
+ 'ipv6-policy': _parse_ipv6_policy_line,
+ 'allow-single-hop-exits': _parse_allow_single_hop_exits_line,
+ 'caches-extra-info': _parse_caches_extra_info_line,
+ 'family': _parse_family_line,
+ 'eventdns': _parse_eventdns_line,
}
@@ -630,31 +642,19 @@ class ServerDescriptor(Descriptor):
setattr(self, attr, SERVER_DESCRIPTOR_ATTRIBUTES[attr][0])
for keyword, values in list(entries.items()):
- # most just work with the first (and only) value
- value, block_type, block_contents = values[0]
-
- line = '%s %s' % (keyword, value) # original line
-
- if block_contents:
- line += '\n%s' % block_contents
-
try:
if keyword in PARSER_FOR_LINE:
PARSER_FOR_LINE[keyword](self, entries)
- elif keyword == 'allow-single-hop-exits':
- self.allow_single_hop_exits = True
- elif keyword == 'caches-extra-info':
- self.extra_info_cache = True
elif keyword == 'contact':
pass # parsed as a bytes field earlier
- elif keyword == 'family':
- self.family = set(value.split(' '))
- elif keyword == 'eventdns':
- self.eventdns = value == '1'
- elif keyword == 'ipv6-policy':
- self.exit_policy_v6 = stem.exit_policy.MicroExitPolicy(value)
else:
- self._unrecognized_lines.append(line)
+ for value, block_type, block_contents in values:
+ line = '%s %s' % (keyword, value)
+
+ if block_contents:
+ line += '\n%s' % block_contents
+
+ self._unrecognized_lines.append(line)
except ValueError as exc:
if validate:
raise exc
@@ -719,16 +719,6 @@ class ServerDescriptor(Descriptor):
try:
if parsing_function:
parsing_function(self, self._entries)
- elif name == 'allow_single_hop_exits':
- self.allow_single_hop_exits = 'allow-single-hop-exits' in self._entries
- elif name == 'extra_info_cache':
- self.extra_info_cache = 'caches-extra-info' in self._entries
- elif name == 'family':
- self.family = set(self._entries['family'][0][0].split(' '))
- elif name == 'eventdns':
- self.eventdns = self._entries['eventdns'][0][0] == '1'
- elif name == 'exit_policy_v6':
- self.exit_policy_v6 = stem.exit_policy.MicroExitPolicy(self._entries['ipv6-policy'][0][0])
elif name == 'exit_policy':
if self._exit_policy_list == [str_type('reject *:*')]:
self.exit_policy = REJECT_ALL_POLICY
1
0

25 Jan '15
commit 1f76d13dfee5599acceee536f4e63049178d6bb4
Author: Damian Johnson <atagar(a)torproject.org>
Date: Mon Jan 12 08:25:15 2015 -0800
Class constants for attributes and line parsers
Much better. This was where I was originally wanting to go with this. Hopefully
this will allow us to later move _parse() and __getattr__() to the base
descriptor class.
---
stem/descriptor/server_descriptor.py | 198 +++++++++++++++-------------------
1 file changed, 88 insertions(+), 110 deletions(-)
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index 944c6c0..9f9535b 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -474,6 +474,66 @@ class ServerDescriptor(Descriptor):
a default value, others are left as **None** if undefined
"""
+ ATTRIBUTES = {
+ 'nickname': (None, _parse_router_line),
+ 'fingerprint': (None, _parse_fingerprint_line),
+ 'published': (None, _parse_published_line),
+
+ 'address': (None, _parse_router_line),
+ 'or_port': (None, _parse_router_line),
+ 'socks_port': (None, _parse_router_line),
+ 'dir_port': (None, _parse_router_line),
+
+ 'tor_version': (None, _parse_platform_line),
+ 'operating_system': (None, _parse_platform_line),
+ 'uptime': (None, _parse_uptime_line),
+ 'exit_policy_v6': (DEFAULT_IPV6_EXIT_POLICY, _parse_ipv6_policy_line),
+ 'family': (set(), _parse_family_line),
+
+ 'average_bandwidth': (None, _parse_bandwidth_line),
+ 'burst_bandwidth': (None, _parse_bandwidth_line),
+ 'observed_bandwidth': (None, _parse_bandwidth_line),
+
+ 'link_protocols': (None, _parse_protocols_line),
+ 'circuit_protocols': (None, _parse_protocols_line),
+ 'hibernating': (False, _parse_hibernating_line),
+ 'allow_single_hop_exits': (False, _parse_allow_single_hop_exits_line),
+ 'extra_info_cache': (False, _parse_caches_extra_info_line),
+ 'extra_info_digest': (None, _parse_extrainfo_digest_line),
+ 'hidden_service_dir': (None, _parse_hidden_service_dir_line),
+ 'eventdns': (None, _parse_eventdns_line),
+ 'or_addresses': ([], _parse_or_address_line),
+
+ 'read_history_end': (None, _parse_read_history_line),
+ 'read_history_interval': (None, _parse_read_history_line),
+ 'read_history_values': (None, _parse_read_history_line),
+
+ 'write_history_end': (None, _parse_write_history_line),
+ 'write_history_interval': (None, _parse_write_history_line),
+ 'write_history_values': (None, _parse_write_history_line),
+ }
+
+ PARSER_FOR_LINE = {
+ 'router': _parse_router_line,
+ 'bandwidth': _parse_bandwidth_line,
+ 'platform': _parse_platform_line,
+ 'published': _parse_published_line,
+ 'fingerprint': _parse_fingerprint_line,
+ 'hibernating': _parse_hibernating_line,
+ 'extra-info-digest': _parse_extrainfo_digest_line,
+ 'hidden-service-dir': _parse_hidden_service_dir_line,
+ 'uptime': _parse_uptime_line,
+ 'protocols': _parse_protocols_line,
+ 'or-address': _parse_or_address_line,
+ 'read-history': _parse_read_history_line,
+ 'write-history': _parse_write_history_line,
+ 'ipv6-policy': _parse_ipv6_policy_line,
+ 'allow-single-hop-exits': _parse_allow_single_hop_exits_line,
+ 'caches-extra-info': _parse_caches_extra_info_line,
+ 'family': _parse_family_line,
+ 'eventdns': _parse_eventdns_line,
+ }
+
def __init__(self, raw_contents, validate = True, annotations = None):
"""
Server descriptor constructor, created from an individual relay's
@@ -596,13 +656,13 @@ class ServerDescriptor(Descriptor):
# set defaults
- for attr in self._attributes():
- setattr(self, attr, self._attributes()[attr][0])
+ for attr in self.ATTRIBUTES:
+ setattr(self, attr, self.ATTRIBUTES[attr][0])
for keyword, values in list(entries.items()):
try:
- if keyword in self._parser_for_line():
- self._parser_for_line()[keyword](self, entries)
+ if keyword in self.PARSER_FOR_LINE:
+ self.PARSER_FOR_LINE[keyword](self, entries)
elif keyword == 'contact':
pass # parsed as a bytes field earlier
else:
@@ -668,85 +728,11 @@ class ServerDescriptor(Descriptor):
def _last_keyword(self):
return 'router-signature'
- @lru_cache()
- def _attributes(self):
- """
- Provides a mapping of attributes we should have...
-
- attrubute => (default_value, parsing_function)
- """
-
- return {
- 'nickname': (None, _parse_router_line),
- 'fingerprint': (None, _parse_fingerprint_line),
- 'published': (None, _parse_published_line),
-
- 'address': (None, _parse_router_line),
- 'or_port': (None, _parse_router_line),
- 'socks_port': (None, _parse_router_line),
- 'dir_port': (None, _parse_router_line),
-
- 'tor_version': (None, _parse_platform_line),
- 'operating_system': (None, _parse_platform_line),
- 'uptime': (None, _parse_uptime_line),
- 'exit_policy_v6': (DEFAULT_IPV6_EXIT_POLICY, _parse_ipv6_policy_line),
- 'family': (set(), _parse_family_line),
-
- 'average_bandwidth': (None, _parse_bandwidth_line),
- 'burst_bandwidth': (None, _parse_bandwidth_line),
- 'observed_bandwidth': (None, _parse_bandwidth_line),
-
- 'link_protocols': (None, _parse_protocols_line),
- 'circuit_protocols': (None, _parse_protocols_line),
- 'hibernating': (False, _parse_hibernating_line),
- 'allow_single_hop_exits': (False, _parse_allow_single_hop_exits_line),
- 'extra_info_cache': (False, _parse_caches_extra_info_line),
- 'extra_info_digest': (None, _parse_extrainfo_digest_line),
- 'hidden_service_dir': (None, _parse_hidden_service_dir_line),
- 'eventdns': (None, _parse_eventdns_line),
- 'or_addresses': ([], _parse_or_address_line),
-
- 'read_history_end': (None, _parse_read_history_line),
- 'read_history_interval': (None, _parse_read_history_line),
- 'read_history_values': (None, _parse_read_history_line),
-
- 'write_history_end': (None, _parse_write_history_line),
- 'write_history_interval': (None, _parse_write_history_line),
- 'write_history_values': (None, _parse_write_history_line),
- }
-
- @lru_cache()
- def _parser_for_line(self):
- """
- Provides the parsing function for the line with a given keyword.
- """
-
- return {
- 'router': _parse_router_line,
- 'bandwidth': _parse_bandwidth_line,
- 'platform': _parse_platform_line,
- 'published': _parse_published_line,
- 'fingerprint': _parse_fingerprint_line,
- 'hibernating': _parse_hibernating_line,
- 'extra-info-digest': _parse_extrainfo_digest_line,
- 'hidden-service-dir': _parse_hidden_service_dir_line,
- 'uptime': _parse_uptime_line,
- 'protocols': _parse_protocols_line,
- 'or-address': _parse_or_address_line,
- 'read-history': _parse_read_history_line,
- 'write-history': _parse_write_history_line,
- 'ipv6-policy': _parse_ipv6_policy_line,
- 'allow-single-hop-exits': _parse_allow_single_hop_exits_line,
- 'caches-extra-info': _parse_caches_extra_info_line,
- 'family': _parse_family_line,
- 'eventdns': _parse_eventdns_line,
- }
-
def __getattr__(self, name):
# If attribute isn't already present we might be lazy loading it...
- if self._lazy_loading and name in self._attributes():
- default, parsing_function = self._attributes()[name]
+ if self._lazy_loading and name in self.ATTRIBUTES:
+ default, parsing_function = self.ATTRIBUTES[name]
try:
if parsing_function:
@@ -781,6 +767,20 @@ class RelayDescriptor(ServerDescriptor):
**\*** attribute is required when we're parsed with validation
"""
+ ATTRIBUTES = dict(ServerDescriptor.ATTRIBUTES, **{
+ 'onion_key': (None, _parse_onion_key_line),
+ 'ntor_onion_key': (None, _parse_ntor_onion_key_line),
+ 'signing_key': (None, _parse_signing_key_line),
+ 'signature': (None, _parse_router_signature_line),
+ })
+
+ PARSER_FOR_LINE = dict(ServerDescriptor.PARSER_FOR_LINE, **{
+ 'onion-key': _parse_onion_key_line,
+ 'ntor-onion-key': _parse_ntor_onion_key_line,
+ 'signing-key': _parse_signing_key_line,
+ 'router-signature': _parse_router_signature_line,
+ })
+
def __init__(self, raw_contents, validate = True, annotations = None):
super(RelayDescriptor, self).__init__(raw_contents, validate, annotations)
@@ -911,24 +911,6 @@ class RelayDescriptor(ServerDescriptor):
return method(str(self).strip(), str(other).strip())
- @lru_cache()
- def _attributes(self):
- return dict(super(RelayDescriptor, self)._attributes(), **{
- 'onion_key': (None, _parse_onion_key_line),
- 'ntor_onion_key': (None, _parse_ntor_onion_key_line),
- 'signing_key': (None, _parse_signing_key_line),
- 'signature': (None, _parse_router_signature_line),
- })
-
- @lru_cache()
- def _parser_for_line(self):
- return dict(super(RelayDescriptor, self)._parser_for_line(), **{
- 'onion-key': _parse_onion_key_line,
- 'ntor-onion-key': _parse_ntor_onion_key_line,
- 'signing-key': _parse_signing_key_line,
- 'router-signature': _parse_router_signature_line,
- })
-
def __hash__(self):
return hash(str(self).strip())
@@ -962,6 +944,14 @@ class BridgeDescriptor(ServerDescriptor):
<https://collector.torproject.org/formats.html#bridge-descriptors>`_)
"""
+ ATTRIBUTES = dict(ServerDescriptor.ATTRIBUTES, **{
+ '_digest': (None, _parse_router_digest_line),
+ })
+
+ PARSER_FOR_LINE = dict(ServerDescriptor.PARSER_FOR_LINE, **{
+ 'router-digest': _parse_router_digest_line,
+ })
+
def __init__(self, raw_contents, validate = True, annotations = None):
super(BridgeDescriptor, self).__init__(raw_contents, validate, annotations)
@@ -1037,18 +1027,6 @@ class BridgeDescriptor(ServerDescriptor):
def _last_keyword(self):
return None
- @lru_cache()
- def _attributes(self):
- return dict(super(BridgeDescriptor, self)._attributes(), **{
- '_digest': (None, _parse_router_digest_line),
- })
-
- @lru_cache()
- def _parser_for_line(self):
- return dict(super(BridgeDescriptor, self)._parser_for_line(), **{
- 'router-digest': _parse_router_digest_line,
- })
-
def _compare(self, other, method):
if not isinstance(other, BridgeDescriptor):
return False
1
0

[stem/master] Move lazyloading functionality into the Descriptor class
by atagar@torproject.org 25 Jan '15
by atagar@torproject.org 25 Jan '15
25 Jan '15
commit c904cf924980f5436208da2a1a0266d44302eab6
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sat Jan 17 12:03:57 2015 -0800
Move lazyloading functionality into the Descriptor class
Shifting lazyloading methods to the Descriptor parsent class so all subclasses
will be able to take advantage of it. Actually, this should let this whole
module become a little nicer and more succinct than when we started.
---
stem/descriptor/__init__.py | 61 +++++++++++++++++++++++-
stem/descriptor/extrainfo_descriptor.py | 35 +-------------
stem/descriptor/server_descriptor.py | 79 ++++++-------------------------
3 files changed, 76 insertions(+), 99 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 2221807..05a7d0d 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -315,10 +315,15 @@ class Descriptor(object):
Common parent for all types of descriptors.
"""
+ ATTRIBUTES = {} # mapping of 'attribute' => (default_value, parsing_function)
+ PARSER_FOR_LINE = {} # line keyword to its associated parsing function
+
def __init__(self, contents):
self._path = None
self._archive_path = None
self._raw_contents = contents
+ self._lazy_loading = False
+ self._unrecognized_lines = []
def get_path(self):
"""
@@ -361,7 +366,44 @@ class Descriptor(object):
:returns: **list** of lines of unrecognized content
"""
- raise NotImplementedError
+ if self._lazy_loading:
+ # we need to go ahead and parse the whole document to figure this out
+ self._parse(self._entries, False)
+ self._lazy_loading = False
+
+ return list(self._unrecognized_lines)
+
+ def _parse(self, entries, validate):
+ """
+ Parses a series of 'keyword => (value, pgp block)' mappings and applies
+ them as attributes.
+
+ :param dict entries: descriptor contents to be applied
+ :param bool validate: checks the validity of descriptor content if True
+
+ :raises: **ValueError** if an error occurs in validation
+ """
+
+ # set defaults
+
+ for attr in self.ATTRIBUTES:
+ setattr(self, attr, self.ATTRIBUTES[attr][0])
+
+ for keyword, values in list(entries.items()):
+ try:
+ if keyword in self.PARSER_FOR_LINE:
+ self.PARSER_FOR_LINE[keyword](self, entries)
+ else:
+ for value, block_type, block_contents in values:
+ line = '%s %s' % (keyword, value)
+
+ if block_contents:
+ line += '\n%s' % block_contents
+
+ self._unrecognized_lines.append(line)
+ except ValueError as exc:
+ if validate:
+ raise exc
def _set_path(self, path):
self._path = path
@@ -372,6 +414,23 @@ class Descriptor(object):
def _name(self, is_plural = False):
return str(type(self))
+ def __getattr__(self, name):
+ # If attribute isn't already present we might be lazy loading it...
+
+ if self._lazy_loading and name in self.ATTRIBUTES:
+ default, parsing_function = self.ATTRIBUTES[name]
+
+ try:
+ parsing_function(self, self._entries)
+ except (ValueError, KeyError):
+ try:
+ # despite having a validation failure check to see if we set something
+ return super(Descriptor, self).__getattribute__(name)
+ except AttributeError:
+ setattr(self, name, default)
+
+ return super(Descriptor, self).__getattribute__(name)
+
def __str__(self):
if stem.prereq.is_python_3():
return stem.util.str_tools._to_unicode(self._raw_contents)
diff --git a/stem/descriptor/extrainfo_descriptor.py b/stem/descriptor/extrainfo_descriptor.py
index 830cacc..b14932b 100644
--- a/stem/descriptor/extrainfo_descriptor.py
+++ b/stem/descriptor/extrainfo_descriptor.py
@@ -32,8 +32,7 @@ Extra-info descriptors are available from a few sources...
|- RelayExtraInfoDescriptor - Extra-info descriptor for a relay.
|- BridgeExtraInfoDescriptor - Extra-info descriptor for a bridge.
|
- |- digest - calculates the upper-case hex digest value for our content
- +- get_unrecognized_lines - lines with unrecognized content
+ +- digest - calculates the upper-case hex digest value for our content
.. data:: DirResponse (enum)
@@ -810,8 +809,6 @@ class ExtraInfoDescriptor(Descriptor):
self.ip_versions = None
self.ip_transports = None
- self._unrecognized_lines = []
-
entries = _get_descriptor_components(raw_contents, validate)
if validate:
@@ -833,36 +830,6 @@ class ExtraInfoDescriptor(Descriptor):
self._parse(entries, validate)
- def get_unrecognized_lines(self):
- return list(self._unrecognized_lines)
-
- def _parse(self, entries, validate):
- """
- Parses a series of 'keyword => (value, pgp block)' mappings and applies
- them as attributes.
-
- :param dict entries: descriptor contents to be applied
- :param bool validate: checks the validity of descriptor content if True
-
- :raises: **ValueError** if an error occurs in validation
- """
-
- for keyword, values in list(entries.items()):
- try:
- if keyword in self.PARSER_FOR_LINE:
- self.PARSER_FOR_LINE[keyword](self, entries)
- else:
- for value, block_type, block_contents in values:
- line = '%s %s' % (keyword, value)
-
- if block_contents:
- line += '\n%s' % block_contents
-
- self._unrecognized_lines.append(line)
- except ValueError as exc:
- if validate:
- raise exc
-
def digest(self):
"""
Provides the upper-case hex encoded sha1 of our content. This value is part
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index 9f9535b..5a2ca1e 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -27,7 +27,6 @@ etc). This information is provided from a few sources...
| +- get_scrubbing_issues - description of issues with our scrubbing
|
|- digest - calculates the upper-case hex digest value for our content
- |- get_unrecognized_lines - lines with unrecognized content
|- get_annotations - dictionary of content prior to the descriptor entry
+- get_annotation_lines - lines that provided the annotations
"""
@@ -412,6 +411,16 @@ def _key_block(entries, keyword, expected_block_type):
return block_contents
+def _parse_exit_policy(descriptor, entries):
+ if hasattr(descriptor, '_unparsed_exit_policy'):
+ if descriptor._unparsed_exit_policy == [str_type('reject *:*')]:
+ descriptor.exit_policy = REJECT_ALL_POLICY
+ else:
+ descriptor.exit_policy = stem.exit_policy.ExitPolicy(*descriptor._unparsed_exit_policy)
+
+ del descriptor._unparsed_exit_policy
+
+
_parse_ipv6_policy_line = lambda descriptor, entries: setattr(descriptor, 'exit_policy_v6', stem.exit_policy.MicroExitPolicy(_value('ipv6-policy', entries)))
_parse_allow_single_hop_exits_line = lambda descriptor, entries: setattr(descriptor, 'allow_single_hop_exits', True)
_parse_caches_extra_info_line = lambda descriptor, entries: setattr(descriptor, 'extra_info_cache', True)
@@ -478,6 +487,7 @@ class ServerDescriptor(Descriptor):
'nickname': (None, _parse_router_line),
'fingerprint': (None, _parse_fingerprint_line),
'published': (None, _parse_published_line),
+ 'exit_policy': (None, _parse_exit_policy),
'address': (None, _parse_router_line),
'or_port': (None, _parse_router_line),
@@ -532,6 +542,7 @@ class ServerDescriptor(Descriptor):
'caches-extra-info': _parse_caches_extra_info_line,
'family': _parse_family_line,
'eventdns': _parse_eventdns_line,
+ 'contact': lambda descriptor, entries: None, # parsed as a bytes field earlier
}
def __init__(self, raw_contents, validate = True, annotations = None):
@@ -563,7 +574,6 @@ class ServerDescriptor(Descriptor):
raw_contents = stem.util.str_tools._to_unicode(raw_contents)
self._lazy_loading = not validate
- self._unrecognized_lines = []
self._annotation_lines = annotations if annotations else []
# A descriptor contains a series of 'keyword lines' which are simply a
@@ -574,19 +584,13 @@ class ServerDescriptor(Descriptor):
# influences the resulting exit policy, but for everything else the order
# does not matter so breaking it into key / value pairs.
- entries, policy = _get_descriptor_components(raw_contents, validate, ('accept', 'reject'))
+ entries, self._unparsed_exit_policy = _get_descriptor_components(raw_contents, validate, ('accept', 'reject'))
if validate:
- if policy == [str_type('reject *:*')]:
- self.exit_policy = REJECT_ALL_POLICY
- else:
- self.exit_policy = stem.exit_policy.ExitPolicy(*policy)
-
self._parse(entries, validate)
self._check_constraints(entries)
else:
self._entries = entries
- self._exit_policy_list = policy
def digest(self):
"""
@@ -598,14 +602,6 @@ class ServerDescriptor(Descriptor):
raise NotImplementedError('Unsupported Operation: this should be implemented by the ServerDescriptor subclass')
- def get_unrecognized_lines(self):
- if self._lazy_loading:
- # we need to go ahead and parse the whole document to figure this out
- self._parse(self._entries, False)
- self._lazy_loading = False
-
- return list(self._unrecognized_lines)
-
@lru_cache()
def get_annotations(self):
"""
@@ -654,28 +650,8 @@ class ServerDescriptor(Descriptor):
:raises: **ValueError** if an error occurs in validation
"""
- # set defaults
-
- for attr in self.ATTRIBUTES:
- setattr(self, attr, self.ATTRIBUTES[attr][0])
-
- for keyword, values in list(entries.items()):
- try:
- if keyword in self.PARSER_FOR_LINE:
- self.PARSER_FOR_LINE[keyword](self, entries)
- elif keyword == 'contact':
- pass # parsed as a bytes field earlier
- else:
- for value, block_type, block_contents in values:
- line = '%s %s' % (keyword, value)
-
- if block_contents:
- line += '\n%s' % block_contents
-
- self._unrecognized_lines.append(line)
- except ValueError as exc:
- if validate:
- raise exc
+ super(ServerDescriptor, self)._parse(entries, validate)
+ _parse_exit_policy(self, entries)
# if we have a negative uptime and a tor version that shouldn't exhibit
# this bug then fail validation
@@ -728,31 +704,6 @@ class ServerDescriptor(Descriptor):
def _last_keyword(self):
return 'router-signature'
- def __getattr__(self, name):
- # If attribute isn't already present we might be lazy loading it...
-
- if self._lazy_loading and name in self.ATTRIBUTES:
- default, parsing_function = self.ATTRIBUTES[name]
-
- try:
- if parsing_function:
- parsing_function(self, self._entries)
- elif name == 'exit_policy':
- if self._exit_policy_list == [str_type('reject *:*')]:
- self.exit_policy = REJECT_ALL_POLICY
- else:
- self.exit_policy = stem.exit_policy.ExitPolicy(*self._exit_policy_list)
-
- del self._exit_policy_list
- except (ValueError, KeyError):
- try:
- # despite having a validation failure check to see if we set something
- return super(ServerDescriptor, self).__getattribute__(name)
- except AttributeError:
- setattr(self, name, default)
-
- return super(ServerDescriptor, self).__getattribute__(name)
-
class RelayDescriptor(ServerDescriptor):
"""
1
0
commit 8bbc48950095f35d9a8dbc64f9b27aa66a79875e
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sat Jan 17 15:01:30 2015 -0800
ExtraInfo lazy loading
Implement lazy loading for extrainfo descriptors. This highlighted a bug in
that we need a shallow copy of our default values. Otherwise defaults like
lists and dictionaries will be shared between descriptors.
---
stem/descriptor/__init__.py | 5 +-
stem/descriptor/extrainfo_descriptor.py | 235 +++++++++++++++----------------
2 files changed, 116 insertions(+), 124 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 05a7d0d..0baacdb 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -50,6 +50,7 @@ __all__ = [
'Descriptor',
]
+import copy
import os
import re
import tarfile
@@ -387,7 +388,7 @@ class Descriptor(object):
# set defaults
for attr in self.ATTRIBUTES:
- setattr(self, attr, self.ATTRIBUTES[attr][0])
+ setattr(self, attr, copy.copy(self.ATTRIBUTES[attr][0]))
for keyword, values in list(entries.items()):
try:
@@ -427,7 +428,7 @@ class Descriptor(object):
# despite having a validation failure check to see if we set something
return super(Descriptor, self).__getattribute__(name)
except AttributeError:
- setattr(self, name, default)
+ setattr(self, name, copy.copy(default))
return super(Descriptor, self).__getattribute__(name)
diff --git a/stem/descriptor/extrainfo_descriptor.py b/stem/descriptor/extrainfo_descriptor.py
index b14932b..97623e7 100644
--- a/stem/descriptor/extrainfo_descriptor.py
+++ b/stem/descriptor/extrainfo_descriptor.py
@@ -534,6 +534,24 @@ def _parse_bridge_ip_transports_line(descriptor, entries):
descriptor.ip_transports = ip_transports
+def _parse_router_signature_line(descriptor, entries):
+ value, block_type, block_contents = entries['router-signature'][0]
+
+ if not block_contents or block_type != 'SIGNATURE':
+ raise ValueError("'router-signature' should be followed by a SIGNATURE block rather than a '%s'" % block_type)
+
+ descriptor.signature = block_contents
+
+
+def _parse_router_digest(descriptor, entries):
+ value = _value('router-digest', entries)
+
+ if not stem.util.tor_tools.is_hex_digits(value, 40):
+ raise ValueError('Router digest line had an invalid sha1 digest: router-digest %s' % value)
+
+ descriptor._digest = value
+
+
_parse_dirreq_v2_resp_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-resp', 'dir_v2_responses', 'dir_v2_responses_unknown')
_parse_dirreq_v3_resp_line = functools.partial(_parse_dirreq_line, 'dirreq-v3-resp', 'dir_v3_responses', 'dir_v3_responses_unknown')
_parse_dirreq_v2_direct_dl_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-direct-dl', 'dir_v2_direct_dl', 'dir_v2_direct_dl_unknown')
@@ -673,6 +691,85 @@ class ExtraInfoDescriptor(Descriptor):
a default value, others are left as **None** if undefined
"""
+ ATTRIBUTES = {
+ 'nickname': (None, _parse_extra_info_line),
+ 'fingerprint': (None, _parse_extra_info_line),
+ 'published': (None, _parse_published_line),
+ 'geoip_db_digest': (None, _parse_geoip_db_digest_line),
+ 'geoip6_db_digest': (None, _parse_geoip6_db_digest_line),
+ 'transport': ({}, _parse_transport_line),
+
+ 'conn_bi_direct_end': (None, _parse_conn_bi_direct_line),
+ 'conn_bi_direct_interval': (None, _parse_conn_bi_direct_line),
+ 'conn_bi_direct_below': (None, _parse_conn_bi_direct_line),
+ 'conn_bi_direct_read': (None, _parse_conn_bi_direct_line),
+ 'conn_bi_direct_write': (None, _parse_conn_bi_direct_line),
+ 'conn_bi_direct_both': (None, _parse_conn_bi_direct_line),
+
+ 'read_history_end': (None, _parse_read_history_line),
+ 'read_history_interval': (None, _parse_read_history_line),
+ 'read_history_values': (None, _parse_read_history_line),
+
+ 'write_history_end': (None, _parse_write_history_line),
+ 'write_history_interval': (None, _parse_write_history_line),
+ 'write_history_values': (None, _parse_write_history_line),
+
+ 'cell_stats_end': (None, _parse_cell_stats_end_line),
+ 'cell_stats_interval': (None, _parse_cell_stats_end_line),
+ 'cell_processed_cells': (None, _parse_cell_processed_cells_line),
+ 'cell_queued_cells': (None, _parse_cell_queued_cells_line),
+ 'cell_time_in_queue': (None, _parse_cell_time_in_queue_line),
+ 'cell_circuits_per_decile': (None, _parse_cell_circuits_per_decline_line),
+
+ 'dir_stats_end': (None, _parse_dirreq_stats_end_line),
+ 'dir_stats_interval': (None, _parse_dirreq_stats_end_line),
+ 'dir_v2_ips': (None, _parse_dirreq_v2_ips_line),
+ 'dir_v3_ips': (None, _parse_dirreq_v3_ips_line),
+ 'dir_v2_share': (None, _parse_dirreq_v2_share_line),
+ 'dir_v3_share': (None, _parse_dirreq_v3_share_line),
+ 'dir_v2_requests': (None, _parse_dirreq_v2_reqs_line),
+ 'dir_v3_requests': (None, _parse_dirreq_v3_reqs_line),
+ 'dir_v2_responses': (None, _parse_dirreq_v2_resp_line),
+ 'dir_v3_responses': (None, _parse_dirreq_v3_resp_line),
+ 'dir_v2_responses_unknown': (None, _parse_dirreq_v2_resp_line),
+ 'dir_v3_responses_unknown': (None, _parse_dirreq_v3_resp_line),
+ 'dir_v2_direct_dl': (None, _parse_dirreq_v2_direct_dl_line),
+ 'dir_v3_direct_dl': (None, _parse_dirreq_v3_direct_dl_line),
+ 'dir_v2_direct_dl_unknown': (None, _parse_dirreq_v2_direct_dl_line),
+ 'dir_v3_direct_dl_unknown': (None, _parse_dirreq_v3_direct_dl_line),
+ 'dir_v2_tunneled_dl': (None, _parse_dirreq_v2_tunneled_dl_line),
+ 'dir_v3_tunneled_dl': (None, _parse_dirreq_v3_tunneled_dl_line),
+ 'dir_v2_tunneled_dl_unknown': (None, _parse_dirreq_v2_tunneled_dl_line),
+ 'dir_v3_tunneled_dl_unknown': (None, _parse_dirreq_v3_tunneled_dl_line),
+
+ 'dir_read_history_end': (None, _parse_dirreq_read_history_line),
+ 'dir_read_history_interval': (None, _parse_dirreq_read_history_line),
+ 'dir_read_history_values': (None, _parse_dirreq_read_history_line),
+
+ 'dir_write_history_end': (None, _parse_dirreq_write_history_line),
+ 'dir_write_history_interval': (None, _parse_dirreq_write_history_line),
+ 'dir_write_history_values': (None, _parse_dirreq_write_history_line),
+
+ 'entry_stats_end': (None, _parse_entry_stats_end_line),
+ 'entry_stats_interval': (None, _parse_entry_stats_end_line),
+ 'entry_ips': (None, _parse_entry_ips_line),
+
+ 'exit_stats_end': (None, _parse_exit_stats_end_line),
+ 'exit_stats_interval': (None, _parse_exit_stats_end_line),
+ 'exit_kibibytes_written': (None, _parse_exit_kibibytes_written_line),
+ 'exit_kibibytes_read': (None, _parse_exit_kibibytes_read_line),
+ 'exit_streams_opened': (None, _parse_exit_streams_opened_line),
+
+ 'bridge_stats_end': (None, _parse_bridge_stats_end_line),
+ 'bridge_stats_interval': (None, _parse_bridge_stats_end_line),
+ 'bridge_ips': (None, _parse_bridge_ips_line),
+ 'geoip_start_time': (None, _parse_geoip_start_time_line),
+ 'geoip_client_origins': (None, _parse_geoip_client_origins_line),
+
+ 'ip_versions': (None, _parse_bridge_ip_versions_line),
+ 'ip_transports': (None, _parse_bridge_ip_transports_line),
+ }
+
PARSER_FOR_LINE = {
'extra-info': _parse_extra_info_line,
'geoip-db-digest': _parse_geoip_db_digest_line,
@@ -732,84 +829,8 @@ class ExtraInfoDescriptor(Descriptor):
super(ExtraInfoDescriptor, self).__init__(raw_contents)
raw_contents = stem.util.str_tools._to_unicode(raw_contents)
- self.nickname = None
- self.fingerprint = None
- self.published = None
- self.geoip_db_digest = None
- self.geoip6_db_digest = None
- self.transport = {}
-
- self.conn_bi_direct_end = None
- self.conn_bi_direct_interval = None
- self.conn_bi_direct_below = None
- self.conn_bi_direct_read = None
- self.conn_bi_direct_write = None
- self.conn_bi_direct_both = None
-
- self.read_history_end = None
- self.read_history_interval = None
- self.read_history_values = None
-
- self.write_history_end = None
- self.write_history_interval = None
- self.write_history_values = None
-
- self.cell_stats_end = None
- self.cell_stats_interval = None
- self.cell_processed_cells = None
- self.cell_queued_cells = None
- self.cell_time_in_queue = None
- self.cell_circuits_per_decile = None
-
- self.dir_stats_end = None
- self.dir_stats_interval = None
- self.dir_v2_ips = None
- self.dir_v3_ips = None
- self.dir_v2_share = None
- self.dir_v3_share = None
- self.dir_v2_requests = None
- self.dir_v3_requests = None
- self.dir_v2_responses = None
- self.dir_v3_responses = None
- self.dir_v2_responses_unknown = None
- self.dir_v3_responses_unknown = None
- self.dir_v2_direct_dl = None
- self.dir_v3_direct_dl = None
- self.dir_v2_direct_dl_unknown = None
- self.dir_v3_direct_dl_unknown = None
- self.dir_v2_tunneled_dl = None
- self.dir_v3_tunneled_dl = None
- self.dir_v2_tunneled_dl_unknown = None
- self.dir_v3_tunneled_dl_unknown = None
-
- self.dir_read_history_end = None
- self.dir_read_history_interval = None
- self.dir_read_history_values = None
-
- self.dir_write_history_end = None
- self.dir_write_history_interval = None
- self.dir_write_history_values = None
-
- self.entry_stats_end = None
- self.entry_stats_interval = None
- self.entry_ips = None
-
- self.exit_stats_end = None
- self.exit_stats_interval = None
- self.exit_kibibytes_written = None
- self.exit_kibibytes_read = None
- self.exit_streams_opened = None
-
- self.bridge_stats_end = None
- self.bridge_stats_interval = None
- self.bridge_ips = None
- self.geoip_start_time = None
- self.geoip_client_origins = None
-
- self.ip_versions = None
- self.ip_transports = None
-
entries = _get_descriptor_components(raw_contents, validate)
+ self._lazy_loading = not validate
if validate:
for keyword in self._required_fields():
@@ -828,7 +849,9 @@ class ExtraInfoDescriptor(Descriptor):
if expected_last_keyword and expected_last_keyword != list(entries.keys())[-1]:
raise ValueError("Descriptor must end with a '%s' entry" % expected_last_keyword)
- self._parse(entries, validate)
+ self._parse(entries, validate)
+ else:
+ self._entries = entries
def digest(self):
"""
@@ -862,10 +885,13 @@ class RelayExtraInfoDescriptor(ExtraInfoDescriptor):
**\*** attribute is required when we're parsed with validation
"""
- def __init__(self, raw_contents, validate = True):
- self.signature = None
+ ATTRIBUTES = dict(ExtraInfoDescriptor.ATTRIBUTES, **{
+ 'signature': (None, _parse_router_signature_line),
+ })
- super(RelayExtraInfoDescriptor, self).__init__(raw_contents, validate)
+ PARSER_FOR_LINE = dict(ExtraInfoDescriptor.PARSER_FOR_LINE, **{
+ 'router-signature': _parse_router_signature_line,
+ })
@lru_cache()
def digest(self):
@@ -874,27 +900,6 @@ class RelayExtraInfoDescriptor(ExtraInfoDescriptor):
raw_content = raw_content[:raw_content.find(ending) + len(ending)]
return hashlib.sha1(stem.util.str_tools._to_bytes(raw_content)).hexdigest().upper()
- def _parse(self, entries, validate):
- entries = dict(entries) # shallow copy since we're destructive
-
- # handles fields only in server descriptors
- for keyword, values in list(entries.items()):
- value, block_type, block_contents = values[0]
-
- line = '%s %s' % (keyword, value) # original line
-
- if block_contents:
- line += '\n%s' % block_contents
-
- if keyword == 'router-signature':
- if validate and (not block_contents or block_type != 'SIGNATURE'):
- raise ValueError("'router-signature' should be followed by a SIGNATURE block: %s" % line)
-
- self.signature = block_contents
- del entries['router-signature']
-
- ExtraInfoDescriptor._parse(self, entries, validate)
-
class BridgeExtraInfoDescriptor(ExtraInfoDescriptor):
"""
@@ -902,31 +907,17 @@ class BridgeExtraInfoDescriptor(ExtraInfoDescriptor):
<https://collector.torproject.org/formats.html#bridge-descriptors>`_)
"""
- def __init__(self, raw_contents, validate = True):
- self._digest = None
+ ATTRIBUTES = dict(ExtraInfoDescriptor.ATTRIBUTES, **{
+ '_digest': (None, _parse_router_digest),
+ })
- super(BridgeExtraInfoDescriptor, self).__init__(raw_contents, validate)
+ PARSER_FOR_LINE = dict(ExtraInfoDescriptor.PARSER_FOR_LINE, **{
+ 'router-digest': _parse_router_digest,
+ })
def digest(self):
return self._digest
- def _parse(self, entries, validate):
- entries = dict(entries) # shallow copy since we're destructive
-
- # handles fields only in server descriptors
- for keyword, values in list(entries.items()):
- value, _, _ = values[0]
- line = '%s %s' % (keyword, value) # original line
-
- if keyword == 'router-digest':
- if validate and not stem.util.tor_tools.is_hex_digits(value, 40):
- raise ValueError('Router digest line had an invalid sha1 digest: %s' % line)
-
- self._digest = value
- del entries['router-digest']
-
- ExtraInfoDescriptor._parse(self, entries, validate)
-
def _required_fields(self):
excluded_fields = [
'router-signature',
1
0

25 Jan '15
commit c895a57197f94d492f04472a2037e6cf1e05eca1
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sat Jan 17 11:33:16 2015 -0800
Move extrainfo line parsers to PARSER_FOR_LINE
Like server descriptors, using a dict of line keywords to their associated
parser.
---
stem/descriptor/extrainfo_descriptor.py | 139 ++++++++++++-------------------
1 file changed, 52 insertions(+), 87 deletions(-)
diff --git a/stem/descriptor/extrainfo_descriptor.py b/stem/descriptor/extrainfo_descriptor.py
index 408cc92..830cacc 100644
--- a/stem/descriptor/extrainfo_descriptor.py
+++ b/stem/descriptor/extrainfo_descriptor.py
@@ -674,6 +674,49 @@ class ExtraInfoDescriptor(Descriptor):
a default value, others are left as **None** if undefined
"""
+ PARSER_FOR_LINE = {
+ 'extra-info': _parse_extra_info_line,
+ 'geoip-db-digest': _parse_geoip_db_digest_line,
+ 'geoip6-db-digest': _parse_geoip6_db_digest_line,
+ 'transport': _parse_transport_line,
+ 'cell-circuits-per-decile': _parse_cell_circuits_per_decline_line,
+ 'dirreq-v2-resp': _parse_dirreq_v2_resp_line,
+ 'dirreq-v3-resp': _parse_dirreq_v3_resp_line,
+ 'dirreq-v2-direct-dl': _parse_dirreq_v2_direct_dl_line,
+ 'dirreq-v3-direct-dl': _parse_dirreq_v3_direct_dl_line,
+ 'dirreq-v2-tunneled-dl': _parse_dirreq_v2_tunneled_dl_line,
+ 'dirreq-v3-tunneled-dl': _parse_dirreq_v3_tunneled_dl_line,
+ 'dirreq-v2-share': _parse_dirreq_v2_share_line,
+ 'dirreq-v3-share': _parse_dirreq_v3_share_line,
+ 'cell-processed-cells': _parse_cell_processed_cells_line,
+ 'cell-queued-cells': _parse_cell_queued_cells_line,
+ 'cell-time-in-queue': _parse_cell_time_in_queue_line,
+ 'published': _parse_published_line,
+ 'geoip-start-time': _parse_geoip_start_time_line,
+ 'cell-stats-end': _parse_cell_stats_end_line,
+ 'entry-stats-end': _parse_entry_stats_end_line,
+ 'exit-stats-end': _parse_exit_stats_end_line,
+ 'bridge-stats-end': _parse_bridge_stats_end_line,
+ 'dirreq-stats-end': _parse_dirreq_stats_end_line,
+ 'conn-bi-direct': _parse_conn_bi_direct_line,
+ 'read-history': _parse_read_history_line,
+ 'write-history': _parse_write_history_line,
+ 'dirreq-read-history': _parse_dirreq_read_history_line,
+ 'dirreq-write-history': _parse_dirreq_write_history_line,
+ 'exit-kibibytes-written': _parse_exit_kibibytes_written_line,
+ 'exit-kibibytes-read': _parse_exit_kibibytes_read_line,
+ 'exit-streams-opened': _parse_exit_streams_opened_line,
+ 'dirreq-v2-ips': _parse_dirreq_v2_ips_line,
+ 'dirreq-v3-ips': _parse_dirreq_v3_ips_line,
+ 'dirreq-v2-reqs': _parse_dirreq_v2_reqs_line,
+ 'dirreq-v3-reqs': _parse_dirreq_v3_reqs_line,
+ 'geoip-client-origins': _parse_geoip_client_origins_line,
+ 'entry-ips': _parse_entry_ips_line,
+ 'bridge-ips': _parse_bridge_ips_line,
+ 'bridge-ip-versions': _parse_bridge_ip_versions_line,
+ 'bridge-ip-transports': _parse_bridge_ip_transports_line,
+ }
+
def __init__(self, raw_contents, validate = True):
"""
Extra-info descriptor constructor. By default this validates the
@@ -805,98 +848,20 @@ class ExtraInfoDescriptor(Descriptor):
"""
for keyword, values in list(entries.items()):
- # most just work with the first (and only) value
- value, _, _ = values[0]
- line = '%s %s' % (keyword, value) # original line
-
try:
- if keyword == 'extra-info':
- _parse_extra_info_line(self, entries)
- elif keyword == 'geoip-db-digest':
- _parse_geoip_db_digest_line(self, entries)
- elif keyword == 'geoip6-db-digest':
- _parse_geoip6_db_digest_line(self, entries)
- elif keyword == 'transport':
- _parse_transport_line(self, entries)
- elif keyword == 'cell-circuits-per-decile':
- _parse_cell_circuits_per_decline_line(self, entries)
- elif keyword == 'dirreq-v2-resp':
- _parse_dirreq_v2_resp_line(self, entries)
- elif keyword == 'dirreq-v3-resp':
- _parse_dirreq_v3_resp_line(self, entries)
- elif keyword == 'dirreq-v2-direct-dl':
- _parse_dirreq_v2_direct_dl_line(self, entries)
- elif keyword == 'dirreq-v3-direct-dl':
- _parse_dirreq_v3_direct_dl_line(self, entries)
- elif keyword == 'dirreq-v2-tunneled-dl':
- _parse_dirreq_v2_tunneled_dl_line(self, entries)
- elif keyword == 'dirreq-v3-tunneled-dl':
- _parse_dirreq_v3_tunneled_dl_line(self, entries)
- elif keyword == 'dirreq-v2-share':
- _parse_dirreq_v2_share_line(self, entries)
- elif keyword == 'dirreq-v3-share':
- _parse_dirreq_v3_share_line(self, entries)
- elif keyword == 'cell-processed-cells':
- _parse_cell_processed_cells_line(self, entries)
- elif keyword == 'cell-queued-cells':
- _parse_cell_queued_cells_line(self, entries)
- elif keyword == 'cell-time-in-queue':
- _parse_cell_time_in_queue_line(self, entries)
- elif keyword == 'published':
- _parse_published_line(self, entries)
- elif keyword == 'geoip-start-time':
- _parse_geoip_start_time_line(self, entries)
- elif keyword == 'cell-stats-end':
- _parse_cell_stats_end_line(self, entries)
- elif keyword == 'entry-stats-end':
- _parse_entry_stats_end_line(self, entries)
- elif keyword == 'exit-stats-end':
- _parse_exit_stats_end_line(self, entries)
- elif keyword == 'bridge-stats-end':
- _parse_bridge_stats_end_line(self, entries)
- elif keyword == 'dirreq-stats-end':
- _parse_dirreq_stats_end_line(self, entries)
- elif keyword == 'conn-bi-direct':
- _parse_conn_bi_direct_line(self, entries)
- elif keyword == 'read-history':
- _parse_read_history_line(self, entries)
- elif keyword == 'write-history':
- _parse_write_history_line(self, entries)
- elif keyword == 'dirreq-read-history':
- _parse_dirreq_read_history_line(self, entries)
- elif keyword == 'dirreq-write-history':
- _parse_dirreq_write_history_line(self, entries)
- elif keyword == 'exit-kibibytes-written':
- _parse_exit_kibibytes_written_line(self, entries)
- elif keyword == 'exit-kibibytes-read':
- _parse_exit_kibibytes_read_line(self, entries)
- elif keyword == 'exit-streams-opened':
- _parse_exit_streams_opened_line(self, entries)
- elif keyword == 'dirreq-v2-ips':
- _parse_dirreq_v2_ips_line(self, entries)
- elif keyword == 'dirreq-v3-ips':
- _parse_dirreq_v3_ips_line(self, entries)
- elif keyword == 'dirreq-v2-reqs':
- _parse_dirreq_v2_reqs_line(self, entries)
- elif keyword == 'dirreq-v3-reqs':
- _parse_dirreq_v3_reqs_line(self, entries)
- elif keyword == 'geoip-client-origins':
- _parse_geoip_client_origins_line(self, entries)
- elif keyword == 'entry-ips':
- _parse_entry_ips_line(self, entries)
- elif keyword == 'bridge-ips':
- _parse_bridge_ips_line(self, entries)
- elif keyword == 'bridge-ip-versions':
- _parse_bridge_ip_versions_line(self, entries)
- elif keyword == 'bridge-ip-transports':
- _parse_bridge_ip_transports_line(self, entries)
+ if keyword in self.PARSER_FOR_LINE:
+ self.PARSER_FOR_LINE[keyword](self, entries)
else:
- self._unrecognized_lines.append(line)
+ for value, block_type, block_contents in values:
+ line = '%s %s' % (keyword, value)
+
+ if block_contents:
+ line += '\n%s' % block_contents
+
+ self._unrecognized_lines.append(line)
except ValueError as exc:
if validate:
raise exc
- else:
- continue
def digest(self):
"""
1
0
commit 6f3a9d846d1226679bdd56dedce362d76c2a3be5
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sat Jan 17 20:33:33 2015 -0800
DirectoryAuthority lazy loading
Another subsection of network status documents.
---
stem/descriptor/__init__.py | 6 +-
stem/descriptor/extrainfo_descriptor.py | 8 +-
stem/descriptor/networkstatus.py | 180 ++++++++------------
stem/descriptor/server_descriptor.py | 17 +-
.../networkstatus/directory_authority.py | 9 +-
5 files changed, 85 insertions(+), 135 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 1e1acb5..cd9dcde 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -333,12 +333,14 @@ def _parse_timestamp_line(keyword, attribute):
return _parse
-def _parse_sha1_digest_line(keyword, attribute):
+def _parse_forty_character_hex(keyword, attribute):
+ # format of fingerprints, sha1 digests, etc
+
def _parse(descriptor, entries):
value = _value(keyword, entries)
if not stem.util.tor_tools.is_hex_digits(value, 40):
- raise ValueError('%s line had an invalid sha1 digest: %s %s' % (keyword, keyword, value))
+ raise ValueError('%s line had an invalid value (should be 40 hex characters): %s %s' % (keyword, keyword, value))
setattr(descriptor, attribute, value)
diff --git a/stem/descriptor/extrainfo_descriptor.py b/stem/descriptor/extrainfo_descriptor.py
index 3413711..124ce16 100644
--- a/stem/descriptor/extrainfo_descriptor.py
+++ b/stem/descriptor/extrainfo_descriptor.py
@@ -84,7 +84,7 @@ from stem.descriptor import (
_value,
_values,
_parse_timestamp_line,
- _parse_sha1_digest_line,
+ _parse_forty_character_hex,
_parse_key_block,
)
@@ -498,8 +498,8 @@ def _parse_bridge_ip_transports_line(descriptor, entries):
descriptor.ip_transports = ip_transports
-_parse_geoip_db_digest_line = _parse_sha1_digest_line('geoip-db-digest', 'geoip_db_digest')
-_parse_geoip6_db_digest_line = _parse_sha1_digest_line('geoip6-db-digest', 'geoip6_db_digest')
+_parse_geoip_db_digest_line = _parse_forty_character_hex('geoip-db-digest', 'geoip_db_digest')
+_parse_geoip6_db_digest_line = _parse_forty_character_hex('geoip6-db-digest', 'geoip6_db_digest')
_parse_dirreq_v2_resp_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-resp', 'dir_v2_responses', 'dir_v2_responses_unknown')
_parse_dirreq_v3_resp_line = functools.partial(_parse_dirreq_line, 'dirreq-v3-resp', 'dir_v3_responses', 'dir_v3_responses_unknown')
_parse_dirreq_v2_direct_dl_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-direct-dl', 'dir_v2_direct_dl', 'dir_v2_direct_dl_unknown')
@@ -532,7 +532,7 @@ _parse_dirreq_v3_reqs_line = functools.partial(_parse_geoip_to_count_line, 'dirr
_parse_geoip_client_origins_line = functools.partial(_parse_geoip_to_count_line, 'geoip-client-origins', 'geoip_client_origins')
_parse_entry_ips_line = functools.partial(_parse_geoip_to_count_line, 'entry-ips', 'entry_ips')
_parse_bridge_ips_line = functools.partial(_parse_geoip_to_count_line, 'bridge-ips', 'bridge_ips')
-_parse_router_digest_line = _parse_sha1_digest_line('router-digest', '_digest')
+_parse_router_digest_line = _parse_forty_character_hex('router-digest', '_digest')
_parse_router_signature_line = _parse_key_block('router-signature', 'signature', 'SIGNATURE')
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index a70a7cf..70f325b 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -64,6 +64,7 @@ from stem.descriptor import (
_read_until_keywords,
_value,
_parse_timestamp_line,
+ _parse_forty_character_hex,
_parse_key_block,
)
@@ -1027,6 +1028,43 @@ def _parse_int_mappings(keyword, value, validate):
return results
+def _parse_dir_source_line(descriptor, entries):
+ # "dir-source" nickname identity address IP dirport orport
+
+ value = _value('dir-source', entries)
+ dir_source_comp = value.split(' ')
+
+ if len(dir_source_comp) < 6:
+ raise ValueError("Authority entry's 'dir-source' line must have six values: dir-source %s" % value)
+
+ if not stem.util.tor_tools.is_valid_nickname(dir_source_comp[0].rstrip('-legacy')):
+ raise ValueError("Authority's nickname is invalid: %s" % dir_source_comp[0])
+ elif not stem.util.tor_tools.is_valid_fingerprint(dir_source_comp[1]):
+ raise ValueError("Authority's fingerprint is invalid: %s" % dir_source_comp[1])
+ elif not dir_source_comp[2]:
+ # https://trac.torproject.org/7055
+ raise ValueError("Authority's hostname can't be blank: dir-source %s" % value)
+ elif not stem.util.connection.is_valid_ipv4_address(dir_source_comp[3]):
+ raise ValueError("Authority's address isn't a valid IPv4 address: %s" % dir_source_comp[3])
+ elif not stem.util.connection.is_valid_port(dir_source_comp[4], allow_zero = True):
+ raise ValueError("Authority's DirPort is invalid: %s" % dir_source_comp[4])
+ elif not stem.util.connection.is_valid_port(dir_source_comp[5]):
+ raise ValueError("Authority's ORPort is invalid: %s" % dir_source_comp[5])
+
+ descriptor.nickname = dir_source_comp[0]
+ descriptor.fingerprint = dir_source_comp[1]
+ descriptor.hostname = dir_source_comp[2]
+ descriptor.address = dir_source_comp[3]
+ descriptor.dir_port = None if dir_source_comp[4] == '0' else int(dir_source_comp[4])
+ descriptor.or_port = int(dir_source_comp[5])
+ descriptor.is_legacy = descriptor.nickname.endswith('-legacy')
+
+
+_parse_contact_line = lambda descriptor, entries: setattr(descriptor, 'contact', _value('contact', entries))
+_parse_legacy_dir_key_line = _parse_forty_character_hex('legacy-dir-key', 'legacy_dir_key')
+_parse_vote_digest_line = _parse_forty_character_hex('vote-digest', 'vote_digest')
+
+
class DirectoryAuthority(Descriptor):
"""
Directory authority information obtained from a v3 network status document.
@@ -1059,6 +1097,26 @@ class DirectoryAuthority(Descriptor):
**\*** mandatory attribute
"""
+ ATTRIBUTES = {
+ 'nickname': (None, _parse_dir_source_line),
+ 'fingerprint': (None, _parse_dir_source_line),
+ 'hostname': (None, _parse_dir_source_line),
+ 'address': (None, _parse_dir_source_line),
+ 'dir_port': (None, _parse_dir_source_line),
+ 'or_port': (None, _parse_dir_source_line),
+ 'is_legacy': (False, _parse_dir_source_line),
+ 'contact': (None, _parse_contact_line),
+ 'vote_digest': (None, _parse_vote_digest_line),
+ 'legacy_dir_key': (None, _parse_legacy_dir_key_line),
+ }
+
+ PARSER_FOR_LINE = {
+ 'dir-source': _parse_dir_source_line,
+ 'contact': _parse_contact_line,
+ 'legacy-dir-key': _parse_legacy_dir_key_line,
+ 'vote-digest': _parse_vote_digest_line,
+ }
+
def __init__(self, raw_content, validate = True, is_vote = False):
"""
Parse a directory authority entry in a v3 network status document.
@@ -1071,47 +1129,17 @@ class DirectoryAuthority(Descriptor):
:raises: ValueError if the descriptor data is invalid
"""
- super(DirectoryAuthority, self).__init__(raw_content)
- raw_content = stem.util.str_tools._to_unicode(raw_content)
-
- self.nickname = None
- self.fingerprint = None
- self.hostname = None
- self.address = None
- self.dir_port = None
- self.or_port = None
- self.is_legacy = False
- self.contact = None
-
- self.vote_digest = None
-
- self.legacy_dir_key = None
- self.key_certificate = None
-
- self._unrecognized_lines = []
-
- self._parse(raw_content, validate, is_vote)
-
- def _parse(self, content, validate, is_vote):
- """
- Parses the given content and applies the attributes.
-
- :param str content: descriptor content
- :param bool validate: checks validity if True
- :param bool is_vote: **True** if this is for a vote, **False** if it's for
- a consensus
-
- :raises: **ValueError** if a validity check fails
- """
+ super(DirectoryAuthority, self).__init__(raw_content, lazy_load = not validate)
+ content = stem.util.str_tools._to_unicode(raw_content)
# separate the directory authority entry from its key certificate
key_div = content.find('\ndir-key-certificate-version')
if key_div != -1:
- key_cert_content = content[key_div + 1:]
+ self.key_certificate = KeyCertificate(content[key_div + 1:], validate)
content = content[:key_div + 1]
else:
- key_cert_content = None
+ self.key_certificate = None
entries = _get_descriptor_components(content, validate)
@@ -1132,12 +1160,12 @@ class DirectoryAuthority(Descriptor):
required_fields += ['contact']
if is_vote:
- if not key_cert_content:
+ if not self.key_certificate:
raise ValueError('Authority votes must have a key certificate:\n%s' % content)
excluded_fields += ['vote-digest']
elif not is_vote:
- if key_cert_content:
+ if self.key_certificate:
raise ValueError("Authority consensus entries shouldn't have a key certificate:\n%s" % content)
if not is_legacy:
@@ -1154,82 +1182,14 @@ class DirectoryAuthority(Descriptor):
type_label = 'votes' if is_vote else 'consensus entries'
raise ValueError("Authority %s shouldn't have a '%s' line:\n%s" % (type_label, keyword, content))
- for keyword, values in list(entries.items()):
- value, _, _ = values[0]
- line = '%s %s' % (keyword, value)
-
# all known attributes can only appear at most once
- if validate and len(values) > 1 and keyword in ('dir-source', 'contact', 'legacy-dir-key', 'vote-digest'):
- raise ValueError("Authority entries can only have a single '%s' line, got %i:\n%s" % (keyword, len(values), content))
-
- if keyword == 'dir-source':
- # "dir-source" nickname identity address IP dirport orport
-
- dir_source_comp = value.split(' ')
+ for keyword, values in list(entries.items()):
+ if len(values) > 1 and keyword in ('dir-source', 'contact', 'legacy-dir-key', 'vote-digest'):
+ raise ValueError("Authority entries can only have a single '%s' line, got %i:\n%s" % (keyword, len(values), content))
- if len(dir_source_comp) < 6:
- if not validate:
- continue
-
- raise ValueError("Authority entry's 'dir-source' line must have six values: %s" % line)
-
- if validate:
- if not stem.util.tor_tools.is_valid_nickname(dir_source_comp[0].rstrip('-legacy')):
- raise ValueError("Authority's nickname is invalid: %s" % dir_source_comp[0])
- elif not stem.util.tor_tools.is_valid_fingerprint(dir_source_comp[1]):
- raise ValueError("Authority's fingerprint is invalid: %s" % dir_source_comp[1])
- elif not dir_source_comp[2]:
- # https://trac.torproject.org/7055
- raise ValueError("Authority's hostname can't be blank: %s" % line)
- elif not stem.util.connection.is_valid_ipv4_address(dir_source_comp[3]):
- raise ValueError("Authority's address isn't a valid IPv4 address: %s" % dir_source_comp[3])
- elif not stem.util.connection.is_valid_port(dir_source_comp[4], allow_zero = True):
- raise ValueError("Authority's DirPort is invalid: %s" % dir_source_comp[4])
- elif not stem.util.connection.is_valid_port(dir_source_comp[5]):
- raise ValueError("Authority's ORPort is invalid: %s" % dir_source_comp[5])
- elif not (dir_source_comp[4].isdigit() and dir_source_comp[5].isdigit()):
- continue
-
- self.nickname = dir_source_comp[0]
- self.fingerprint = dir_source_comp[1]
- self.hostname = dir_source_comp[2]
- self.address = dir_source_comp[3]
- self.dir_port = None if dir_source_comp[4] == '0' else int(dir_source_comp[4])
- self.or_port = int(dir_source_comp[5])
- self.is_legacy = self.nickname.endswith('-legacy')
- elif keyword == 'contact':
- # "contact" string
-
- self.contact = value
- elif keyword == 'legacy-dir-key':
- # "legacy-dir-key" FINGERPRINT
-
- if validate and not stem.util.tor_tools.is_valid_fingerprint(value):
- raise ValueError('Authority has a malformed legacy directory key: %s' % line)
-
- self.legacy_dir_key = value
- elif keyword == 'vote-digest':
- # "vote-digest" digest
-
- # technically not a fingerprint, but has the same characteristics
- if validate and not stem.util.tor_tools.is_valid_fingerprint(value):
- raise ValueError('Authority has a malformed vote digest: %s' % line)
-
- self.vote_digest = value
- else:
- self._unrecognized_lines.append(line)
-
- if key_cert_content:
- self.key_certificate = KeyCertificate(key_cert_content, validate)
-
- def get_unrecognized_lines(self):
- """
- Returns any unrecognized lines.
-
- :returns: a list of unrecognized lines
- """
-
- return self._unrecognized_lines
+ self._parse(entries, validate)
+ else:
+ self._entries = entries
def _compare(self, other, method):
if not isinstance(other, DirectoryAuthority):
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index 09dc9c3..f626050 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -57,7 +57,7 @@ from stem.descriptor import (
_value,
_values,
_parse_timestamp_line,
- _parse_sha1_digest_line,
+ _parse_forty_character_hex,
_parse_key_block,
)
@@ -270,18 +270,6 @@ def _parse_hibernating_line(descriptor, entries):
descriptor.hibernating = value == '1'
-def _parse_extrainfo_digest_line(descriptor, entries):
- # this is forty hex digits which just so happens to be the same a
- # fingerprint
-
- value = _value('extra-info-digest', entries)
-
- if not stem.util.tor_tools.is_valid_fingerprint(value):
- raise ValueError('Extra-info digests should consist of forty hex digits: %s' % value)
-
- descriptor.extra_info_digest = value
-
-
def _parse_hidden_service_dir_line(descriptor, entries):
value = _value('hidden-service-dir', entries)
@@ -379,6 +367,7 @@ def _parse_exit_policy(descriptor, entries):
_parse_published_line = _parse_timestamp_line('published', 'published')
+_parse_extrainfo_digest_line = _parse_forty_character_hex('extra-info-digest', 'extra_info_digest')
_parse_read_history_line = functools.partial(_parse_history_line, 'read-history', 'read_history_end', 'read_history_interval', 'read_history_values')
_parse_write_history_line = functools.partial(_parse_history_line, 'write-history', 'write_history_end', 'write_history_interval', 'write_history_values')
_parse_ipv6_policy_line = lambda descriptor, entries: setattr(descriptor, 'exit_policy_v6', stem.exit_policy.MicroExitPolicy(_value('ipv6-policy', entries)))
@@ -390,7 +379,7 @@ _parse_onion_key_line = _parse_key_block('onion-key', 'onion_key', 'RSA PUBLIC K
_parse_signing_key_line = _parse_key_block('signing-key', 'signing_key', 'RSA PUBLIC KEY')
_parse_router_signature_line = _parse_key_block('router-signature', 'signature', 'SIGNATURE')
_parse_ntor_onion_key_line = lambda descriptor, entries: setattr(descriptor, 'ntor_onion_key', _value('ntor-onion-key', entries))
-_parse_router_digest_line = _parse_sha1_digest_line('router-digest', '_digest')
+_parse_router_digest_line = _parse_forty_character_hex('router-digest', '_digest')
class ServerDescriptor(Descriptor):
diff --git a/test/unit/descriptor/networkstatus/directory_authority.py b/test/unit/descriptor/networkstatus/directory_authority.py
index 1114518..a5bc647 100644
--- a/test/unit/descriptor/networkstatus/directory_authority.py
+++ b/test/unit/descriptor/networkstatus/directory_authority.py
@@ -164,7 +164,7 @@ class TestDirectoryAuthority(unittest.TestCase):
self.assertRaises(ValueError, DirectoryAuthority, content)
authority = DirectoryAuthority(content, False)
- self.assertEqual(value, authority.fingerprint)
+ self.assertEqual(None, authority.fingerprint)
def test_malformed_address(self):
"""
@@ -186,7 +186,7 @@ class TestDirectoryAuthority(unittest.TestCase):
self.assertRaises(ValueError, DirectoryAuthority, content)
authority = DirectoryAuthority(content, False)
- self.assertEqual(value, authority.address)
+ self.assertEqual(None, authority.address)
def test_malformed_port(self):
"""
@@ -219,9 +219,8 @@ class TestDirectoryAuthority(unittest.TestCase):
authority = DirectoryAuthority(content, False)
- expected_value = 399482 if value == '399482' else None
actual_value = authority.or_port if include_or_port else authority.dir_port
- self.assertEqual(expected_value, actual_value)
+ self.assertEqual(None, actual_value)
def test_legacy_dir_key(self):
"""
@@ -247,7 +246,7 @@ class TestDirectoryAuthority(unittest.TestCase):
self.assertRaises(ValueError, DirectoryAuthority, content)
authority = DirectoryAuthority(content, False)
- self.assertEqual(value, authority.legacy_dir_key)
+ self.assertEqual(None, authority.legacy_dir_key)
def test_key_certificate(self):
"""
1
0
commit 0f1b71125ab2709a041c02e0b700d47c4d69b6d7
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sun Jan 18 11:04:00 2015 -0800
NetworkStatusDocumentV2 lazy loading
---
stem/descriptor/__init__.py | 14 ++-
stem/descriptor/microdescriptor.py | 3 +-
stem/descriptor/networkstatus.py | 215 +++++++++++++---------------------
stem/descriptor/server_descriptor.py | 3 +-
4 files changed, 98 insertions(+), 137 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index cd9dcde..9a3412d 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -319,6 +319,13 @@ def _values(line, entries):
return [entry[0] for entry in entries[line]]
+def _parse_simple_line(keyword, attribute):
+ def _parse(descriptor, entries):
+ setattr(descriptor, attribute, _value(keyword, entries))
+
+ return _parse
+
+
def _parse_timestamp_line(keyword, attribute):
# "<keyword>" YYYY-MM-DD HH:MM:SS
@@ -347,15 +354,18 @@ def _parse_forty_character_hex(keyword, attribute):
return _parse
-def _parse_key_block(keyword, attribute, expected_block_type):
+def _parse_key_block(keyword, attribute, expected_block_type, value_attribute = None):
def _parse(descriptor, entries):
value, block_type, block_contents = entries[keyword][0]
if not block_contents or block_type != expected_block_type:
- raise ValueError("'%s' should be followed by a %s block" % (keyword, expected_block_type))
+ raise ValueError("'%s' should be followed by a %s block, but was a %s" % (keyword, expected_block_type, block_type))
setattr(descriptor, attribute, block_contents)
+ if value_attribute:
+ setattr(descriptor, value_attribute, value)
+
return _parse
diff --git a/stem/descriptor/microdescriptor.py b/stem/descriptor/microdescriptor.py
index 0590cbb..fd9ef9b 100644
--- a/stem/descriptor/microdescriptor.py
+++ b/stem/descriptor/microdescriptor.py
@@ -75,6 +75,7 @@ from stem.descriptor import (
_read_until_keywords,
_value,
_values,
+ _parse_simple_line,
_parse_key_block,
)
@@ -171,7 +172,7 @@ def _parse_id_line(descriptor, entries):
_parse_onion_key_line = _parse_key_block('onion-key', 'onion_key', 'RSA PUBLIC KEY')
-_parse_ntor_onion_key_line = lambda descriptor, entries: setattr(descriptor, 'ntor_onion_key', _value('ntor-onion-key', entries))
+_parse_ntor_onion_key_line = _parse_simple_line('ntor-onion-key', 'ntor_onion_key')
_parse_family_line = lambda descriptor, entries: setattr(descriptor, 'family', _value('family', entries).split(' '))
_parse_p_line = lambda descriptor, entries: stem.descriptor.router_status_entry._parse_p_line(descriptor, _value('p', entries), True)
_parse_p6_line = lambda descriptor, entries: setattr(descriptor, 'exit_policy_v6', stem.exit_policy.MicroExitPolicy(_value('p6', entries)))
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index 70f325b..29d2593 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -63,6 +63,7 @@ from stem.descriptor import (
_get_descriptor_components,
_read_until_keywords,
_value,
+ _parse_simple_line,
_parse_timestamp_line,
_parse_forty_character_hex,
_parse_key_block,
@@ -269,12 +270,51 @@ class NetworkStatusDocument(Descriptor):
Common parent for network status documents.
"""
- def __init__(self, raw_content):
- super(NetworkStatusDocument, self).__init__(raw_content)
- self._unrecognized_lines = []
- def get_unrecognized_lines(self):
- return list(self._unrecognized_lines)
+def _parse_version_line(keyword, attribute, expected_version):
+ def _parse(descriptor, entries):
+ value = _value(keyword, entries)
+
+ if not value.isdigit():
+ raise ValueError('Document has a non-numeric version: %s %s' % (keyword, value))
+
+ setattr(descriptor, attribute, int(value))
+
+ if int(value) != expected_version:
+ raise ValueError("Expected a version %i document, but got version '%s' instead" % (expected_version, value))
+
+ return _parse
+
+
+def _parse_dir_source_line(descriptor, entries):
+ value = _value('dir-source', entries)
+ dir_source_comp = value.split()
+
+ if len(dir_source_comp) < 3:
+ raise ValueError("The 'dir-source' line of a v2 network status document must have three values: dir-source %s" % value)
+
+ if not dir_source_comp[0]:
+ # https://trac.torproject.org/7055
+ raise ValueError("Authority's hostname can't be blank: dir-source %s" % value)
+ elif not stem.util.connection.is_valid_ipv4_address(dir_source_comp[1]):
+ raise ValueError("Authority's address isn't a valid IPv4 address: %s" % dir_source_comp[1])
+ elif not stem.util.connection.is_valid_port(dir_source_comp[2], allow_zero = True):
+ raise ValueError("Authority's DirPort is invalid: %s" % dir_source_comp[2])
+
+ descriptor.hostname = dir_source_comp[0]
+ descriptor.address = dir_source_comp[1]
+ descriptor.dir_port = None if dir_source_comp[2] == '0' else int(dir_source_comp[2])
+
+
+_parse_network_status_version_line = _parse_version_line('network-status-version', 'version', 2)
+_parse_fingerprint_line = _parse_forty_character_hex('fingerprint', 'fingerprint')
+_parse_contact_line = _parse_simple_line('contact', 'contact')
+_parse_dir_signing_key_line = _parse_key_block('dir-signing-key', 'signing_key', 'RSA PUBLIC KEY')
+_parse_client_versions_line = lambda descriptor, entries: setattr(descriptor, 'client_versions', _value('client-versions', entries).split(','))
+_parse_server_versions_line = lambda descriptor, entries: setattr(descriptor, 'server_versions', _value('server-versions', entries).split(','))
+_parse_published_line = _parse_timestamp_line('published', 'published')
+_parse_dir_options_line = lambda descriptor, entries: setattr(descriptor, 'options', _value('dir-options', entries).split())
+_parse_directory_signature_line = _parse_key_block('directory-signature', 'signature', 'SIGNATURE', value_attribute = 'signing_authority')
class NetworkStatusDocumentV2(NetworkStatusDocument):
@@ -306,24 +346,39 @@ class NetworkStatusDocumentV2(NetworkStatusDocument):
a default value, others are left as **None** if undefined
"""
- def __init__(self, raw_content, validate = True):
- super(NetworkStatusDocumentV2, self).__init__(raw_content)
+ ATTRIBUTES = {
+ 'version': (None, _parse_network_status_version_line),
+ 'hostname': (None, _parse_dir_source_line),
+ 'address': (None, _parse_dir_source_line),
+ 'dir_port': (None, _parse_dir_source_line),
+ 'fingerprint': (None, _parse_fingerprint_line),
+ 'contact': (None, _parse_contact_line),
+ 'signing_key': (None, _parse_dir_signing_key_line),
- self.version = None
- self.hostname = None
- self.address = None
- self.dir_port = None
- self.fingerprint = None
- self.contact = None
- self.signing_key = None
+ 'client_versions': ([], _parse_client_versions_line),
+ 'server_versions': ([], _parse_server_versions_line),
+ 'published': (None, _parse_published_line),
+ 'options': ([], _parse_dir_options_line),
- self.client_versions = []
- self.server_versions = []
- self.published = None
- self.options = []
+ 'signing_authority': (None, _parse_directory_signature_line),
+ 'signatures': (None, _parse_directory_signature_line),
+ }
+
+ PARSER_FOR_LINE = {
+ 'network-status-version': _parse_network_status_version_line,
+ 'dir-source': _parse_dir_source_line,
+ 'fingerprint': _parse_fingerprint_line,
+ 'contact': _parse_contact_line,
+ 'dir-signing-key': _parse_dir_signing_key_line,
+ 'client-versions': _parse_client_versions_line,
+ 'server-versions': _parse_server_versions_line,
+ 'published': _parse_published_line,
+ 'dir-options': _parse_dir_options_line,
+ 'directory-signature': _parse_directory_signature_line,
+ }
- self.signing_authority = None
- self.signatures = None
+ def __init__(self, raw_content, validate = True):
+ super(NetworkStatusDocumentV2, self).__init__(raw_content, lazy_load = not validate)
# Splitting the document from the routers. Unlike v3 documents we're not
# bending over backwards on the validation by checking the field order or
@@ -351,96 +406,15 @@ class NetworkStatusDocumentV2(NetworkStatusDocument):
if validate:
self._check_constraints(entries)
+ self._parse(entries, validate)
- self._parse(entries, validate)
-
- def _parse(self, entries, validate):
- for keyword, values in list(entries.items()):
- value, block_type, block_contents = values[0]
-
- line = '%s %s' % (keyword, value) # original line
-
- if block_contents:
- line += '\n%s' % block_contents
-
- if keyword == 'network-status-version':
- if not value.isdigit():
- if not validate:
- continue
-
- raise ValueError('Network status document has a non-numeric version: %s' % line)
-
- self.version = int(value)
-
- if validate and self.version != 2:
- raise ValueError("Expected a version 2 network status document, got version '%s' instead" % self.version)
- elif keyword == 'dir-source':
- dir_source_comp = value.split()
-
- if len(dir_source_comp) < 3:
- if not validate:
- continue
-
- raise ValueError("The 'dir-source' line of a v2 network status document must have three values: %s" % line)
-
- if validate:
- if not dir_source_comp[0]:
- # https://trac.torproject.org/7055
- raise ValueError("Authority's hostname can't be blank: %s" % line)
- elif not stem.util.connection.is_valid_ipv4_address(dir_source_comp[1]):
- raise ValueError("Authority's address isn't a valid IPv4 address: %s" % dir_source_comp[1])
- elif not stem.util.connection.is_valid_port(dir_source_comp[2], allow_zero = True):
- raise ValueError("Authority's DirPort is invalid: %s" % dir_source_comp[2])
- elif not dir_source_comp[2].isdigit():
- continue
-
- self.hostname = dir_source_comp[0]
- self.address = dir_source_comp[1]
- self.dir_port = None if dir_source_comp[2] == '0' else int(dir_source_comp[2])
- elif keyword == 'fingerprint':
- if validate and not stem.util.tor_tools.is_valid_fingerprint(value):
- raise ValueError("Authority's fingerprint in a v2 network status document is malformed: %s" % line)
-
- self.fingerprint = value
- elif keyword == 'contact':
- self.contact = value
- elif keyword == 'dir-signing-key':
- if validate and (not block_contents or block_type != 'RSA PUBLIC KEY'):
- raise ValueError("'dir-signing-key' should be followed by a RSA PUBLIC KEY block: %s" % line)
-
- self.signing_key = block_contents
- elif keyword in ('client-versions', 'server-versions'):
- # v2 documents existed while there were tor versions using the 'old'
- # style, hence we aren't attempting to parse them
-
- for version_str in value.split(','):
- if keyword == 'client-versions':
- self.client_versions.append(version_str)
- elif keyword == 'server-versions':
- self.server_versions.append(version_str)
- elif keyword == 'published':
- try:
- self.published = stem.util.str_tools._parse_timestamp(value)
- except ValueError:
- if validate:
- raise ValueError("Version 2 network status document's 'published' time wasn't parsable: %s" % value)
- elif keyword == 'dir-options':
- self.options = value.split()
- elif keyword == 'directory-signature':
- if validate and (not block_contents or block_type != 'SIGNATURE'):
- raise ValueError("'directory-signature' should be followed by a SIGNATURE block: %s" % line)
-
- self.signing_authority = value
- self.signature = block_contents
- else:
- self._unrecognized_lines.append(line)
-
- # 'client-versions' and 'server-versions' are only required if 'Versions'
- # is among the options
+ # 'client-versions' and 'server-versions' are only required if 'Versions'
+ # is among the options
- if validate and 'Versions' in self.options:
- if not ('client-versions' in entries and 'server-versions' in entries):
+ if 'Versions' in self.options and not ('client-versions' in entries and 'server-versions' in entries):
raise ValueError("Version 2 network status documents must have a 'client-versions' and 'server-versions' when 'Versions' is listed among its dir-options:\n%s" % str(self))
+ else:
+ self._entries = entries
def _check_constraints(self, entries):
required_fields = [field for (field, is_mandatory) in NETWORK_STATUS_V2_FIELDS if is_mandatory]
@@ -1060,7 +1034,6 @@ def _parse_dir_source_line(descriptor, entries):
descriptor.is_legacy = descriptor.nickname.endswith('-legacy')
-_parse_contact_line = lambda descriptor, entries: setattr(descriptor, 'contact', _value('contact', entries))
_parse_legacy_dir_key_line = _parse_forty_character_hex('legacy-dir-key', 'legacy_dir_key')
_parse_vote_digest_line = _parse_forty_character_hex('vote-digest', 'vote_digest')
@@ -1207,20 +1180,6 @@ class DirectoryAuthority(Descriptor):
return self._compare(other, lambda s, o: s <= o)
-def _parse_dir_key_certificate_version_line(descriptor, entries):
- # "dir-key-certificate-version" version
-
- value = _value('dir-key-certificate-version', entries)
-
- if not value.isdigit():
- raise ValueError('Key certificate has a non-integer version: dir-key-certificate-version %s' % value)
-
- descriptor.version = int(value)
-
- if descriptor.version != 3:
- raise ValueError("Expected a version 3 key certificate, got version '%i' instead" % descriptor.version)
-
-
def _parse_dir_address_line(descriptor, entries):
# "dir-address" IPPort
@@ -1240,17 +1199,7 @@ def _parse_dir_address_line(descriptor, entries):
descriptor.dir_port = int(dirport)
-def _parse_fingerprint_line(descriptor, entries):
- # "fingerprint" fingerprint
-
- value = _value('fingerprint', entries)
-
- if not stem.util.tor_tools.is_valid_fingerprint(value):
- raise ValueError("Key certificate's fingerprint is malformed: fingerprint %s" % value)
-
- descriptor.fingerprint = value
-
-
+_parse_dir_key_certificate_version_line = _parse_version_line('dir-key-certificate-version', 'version', 3)
_parse_dir_key_published_line = _parse_timestamp_line('dir-key-published', 'published')
_parse_dir_key_expires_line = _parse_timestamp_line('dir-key-expires', 'expires')
_parse_identity_key_line = _parse_key_block('dir-identity-key', 'identity_key', 'RSA PUBLIC KEY')
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index f626050..1cdf9be 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -56,6 +56,7 @@ from stem.descriptor import (
_read_until_keywords,
_value,
_values,
+ _parse_simple_line,
_parse_timestamp_line,
_parse_forty_character_hex,
_parse_key_block,
@@ -378,7 +379,7 @@ _parse_eventdns_line = lambda descriptor, entries: setattr(descriptor, 'eventdns
_parse_onion_key_line = _parse_key_block('onion-key', 'onion_key', 'RSA PUBLIC KEY')
_parse_signing_key_line = _parse_key_block('signing-key', 'signing_key', 'RSA PUBLIC KEY')
_parse_router_signature_line = _parse_key_block('router-signature', 'signature', 'SIGNATURE')
-_parse_ntor_onion_key_line = lambda descriptor, entries: setattr(descriptor, 'ntor_onion_key', _value('ntor-onion-key', entries))
+_parse_ntor_onion_key_line = _parse_simple_line('ntor-onion-key', 'ntor_onion_key')
_parse_router_digest_line = _parse_forty_character_hex('router-digest', '_digest')
1
0