tor-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
October 2012
- 20 participants
- 1288 discussions
commit 8ad310114b1ea7b743a868a8b70832eea5b8f3e2
Author: Ravi Chandra Padmala <neenaoffline(a)gmail.com>
Date: Fri Aug 10 17:49:23 2012 +0530
Add microdescriptor parsing
---
run_tests.py | 1 +
stem/descriptor/__init__.py | 4 +
stem/descriptor/networkstatus.py | 218 ++++++++++++++++++++++++++++++--
test/integ/descriptor/networkstatus.py | 42 ++++++
4 files changed, 254 insertions(+), 11 deletions(-)
diff --git a/run_tests.py b/run_tests.py
index 8d115f1..b0550d6 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -136,6 +136,7 @@ INTEG_TESTS = (
test.integ.descriptor.server_descriptor.TestServerDescriptor,
test.integ.descriptor.extrainfo_descriptor.TestExtraInfoDescriptor,
test.integ.descriptor.networkstatus.TestNetworkStatusDocument,
+ test.integ.descriptor.networkstatus.TestMicrodescriptorConsensus,
test.integ.version.TestVersion,
test.integ.response.protocolinfo.TestProtocolInfo,
test.integ.process.TestProcess,
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 40f03ad..d9ac21b 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -66,6 +66,8 @@ def parse_file(path, descriptor_file):
file_parser = stem.descriptor.extrainfo_descriptor.parse_file
elif filename == "cached-consensus":
file_parser = stem.descriptor.networkstatus.parse_file
+ elif filename == "cached-microdesc-consensus":
+ file_parser = lambda f: stem.descriptor.networkstatus.parse_file(f, True, "microdesc")
if file_parser:
for desc in file_parser(descriptor_file):
@@ -103,6 +105,8 @@ def parse_file(path, descriptor_file):
desc._set_path(path)
yield desc
return
+ elif desc_type == "network-status-microdesc-consensus-3" and major_version == 1:
+ desc = stem.descriptor.networkstatus.MicrodescriptorConsensus(descriptor_file.read())
if desc:
desc._set_path(path)
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index 214a33c..7effc7e 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -59,9 +59,30 @@ _bandwidth_weights_regex = re.compile(" ".join(["W%s=\d+" % weight for weight in
_router_desc_end_kws = ["r", "bandwidth-weights", "directory-footer", "directory-signature"]
+Flavour = stem.util.enum.Enum(
+ ("NONE", ""),
+ ("NS", "ns"),
+ ("MICRODESCRIPTOR", "microdesc"),
+ )
+
+Flag = stem.util.enum.Enum(
+ ("AUTHORITY", "Authority"),
+ ("BADEXIT", "BadExit"),
+ ("EXIT", "Exit"),
+ ("FAST", "Fast"),
+ ("GUARD", "Guard"),
+ ("HSDIR", "HSDir"),
+ ("NAMED", "Named"),
+ ("RUNNING", "Running"),
+ ("STABLE", "Stable"),
+ ("UNNAMED", "Unnamed"),
+ ("V2DIR", "V2Dir"),
+ ("VALID", "Valid"),
+ )
+
Flag = stem.util.enum.Enum(*[(flag.upper(), flag) for flag in ["Authority", "BadExit", "Exit", "Fast", "Guard", "HSDir", "Named", "Running", "Stable", "Unnamed", "V2Dir", "Valid"]])
-def parse_file(document_file, validate = True):
+def parse_file(document_file, validate = True, flavour = Flavour.NONE):
"""
Iterates over the router descriptors in a network status document.
@@ -83,15 +104,27 @@ def parse_file(document_file, validate = True):
_skip_until_keywords(["bandwidth-weights", "directory-footer", "directory-signature"], document_file)
# parse until end
document_data = document_data + document_file.read()
- document = NetworkStatusDocument(document_data, validate)
- document_file.seek(r_offset)
- document.router_descriptors = _router_desc_generator(document_file, document.vote_status == "vote", validate, document.known_flags)
- return document.router_descriptors
+
+ if flavour == Flavour.NONE:
+ document = NetworkStatusDocument(document_data, validate)
+ document_file.seek(r_offset)
+ document.router_descriptors = _ns_router_desc_generator(document_file, document.vote_status == "vote", validate)
+ yield document
+ elif flavour == Flavour.MICRODESCRIPTOR:
+ document = MicrodescriptorConsensus(document_data, validate)
+ document_file.seek(r_offset)
+ document.router_descriptors = _router_microdesc_generator(document_file, validate, document.known_flags)
+ yield document
+
+def _ns_router_desc_generator(document_file, vote, validate):
+ while _peek_keyword(document_file) == "r":
+ desc_content = "".join(_read_until_keywords(_router_desc_end_kws, document_file, False, True))
+ yield RouterDescriptor(desc_content, vote, validate)
-def _router_desc_generator(document_file, vote, validate, known_flags):
+def _router_microdesc_generator(document_file, validate, known_flags):
while _peek_keyword(document_file) == "r":
desc_content = "".join(_read_until_keywords(_router_desc_end_kws, document_file, False, True))
- yield RouterDescriptor(desc_content, vote, validate, known_flags)
+ yield RouterMicrodescriptor(desc_content, validate, known_flags)
class NetworkStatusDocument(stem.descriptor.Descriptor):
"""
@@ -159,8 +192,10 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
self._parse(raw_content)
- def _generate_router(self, raw_content, vote, validate, known_flags):
- return RouterDescriptor(raw_content, vote, validate, known_flags)
+ def _router_desc_generator(self, document_file):
+ while _peek_keyword(document_file) == "r":
+ desc_content = "".join(_read_until_keywords(_router_desc_end_kws, document_file, False, True))
+ yield RouterDescriptor(desc_content, self.vote_status == "vote", self.validated, self.known_flags)
def _validate_network_status_version(self):
return self.network_status_version == "3"
@@ -223,7 +258,7 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
# router descriptors
if _peek_keyword(content) == "r":
router_descriptors_data = "".join(_read_until_keywords(["bandwidth-weights", "directory-footer", "directory-signature"], content, False, True))
- self.router_descriptors = _router_desc_generator(StringIO(router_descriptors_data), vote, validate, self.known_flags)
+ self.router_descriptors = self._router_desc_generator(StringIO(router_descriptors_data))
# footer section
if self.consensus_method > 9 or vote and filter(lambda x: x >= 9, self.consensus_methods):
@@ -394,7 +429,7 @@ class RouterDescriptor(stem.descriptor.Descriptor):
:param bool vote: True if the descriptor is from a vote document
:param bool validate: whether the router descriptor should be validated
:param bool known_flags: list of known router status flags
-
+
:raises: ValueError if the descriptor data is invalid
"""
@@ -524,3 +559,164 @@ class RouterDescriptor(stem.descriptor.Descriptor):
return self.unrecognized_lines
+class MicrodescriptorConsensus(NetworkStatusDocument):
+ """
+ A v3 microdescriptor consensus.
+
+ :var bool validated: **\*** whether the document is validated
+ :var str network_status_version: **\*** a document format version. For v3 microdescriptor consensuses this is "3 microdesc"
+ :var str vote_status: **\*** status of the vote (is "consensus")
+ :var int consensus_method: **~** consensus method used to generate a consensus
+ :var datetime valid_after: **\*** time when the consensus becomes valid
+ :var datetime fresh_until: **\*** time until when the consensus is considered to be fresh
+ :var datetime valid_until: **\*** time until when the consensus is valid
+ :var int vote_delay: **\*** number of seconds allowed for collecting votes from all authorities
+ :var int dist_delay: number of seconds allowed for collecting signatures from all authorities
+ :var list client_versions: list of recommended Tor client versions
+ :var list server_versions: list of recommended Tor server versions
+ :var list known_flags: **\*** list of known router flags
+ :var list params: dict of parameter(str) => value(int) mappings
+ :var list router_descriptors: **\*** iterator for RouterDescriptor objects defined in the document
+ :var list directory_authorities: **\*** list of DirectoryAuthority objects that have generated this document
+ :var dict bandwidth_weights: **~** dict of weight(str) => value(int) mappings
+ :var list directory_signatures: **\*** list of signatures this document has
+
+ | **\*** attribute is either required when we're parsed with validation or has a default value, others are left as None if undefined
+ | **~** attribute appears only in consensuses
+ """
+
+ def _router_desc_generator(self, document_file):
+ while _peek_keyword(document_file) == "r":
+ desc_content = "".join(_read_until_keywords(_router_desc_end_kws, document_file, False, True))
+ yield RouterMicrodescriptor(desc_content, self.validated, self.known_flags)
+
+ def _validate_network_status_version(self):
+ return self.network_status_version == "3 microdesc"
+
+class RouterMicrodescriptor(RouterDescriptor):
+ """
+ Router microdescriptor object. Parses and stores router information in a router
+ microdescriptor from a v3 microdescriptor consensus.
+
+ :var str nickname: **\*** router's nickname
+ :var str identity: **\*** router's identity
+ :var datetime publication: **\*** router's publication
+ :var str ip: **\*** router's IP address
+ :var int orport: **\*** router's ORPort
+ :var int dirport: **\*** router's DirPort
+
+ :var list flags: **\*** list of status flags
+ :var list unknown_flags: **\*** list of unidentified status flags
+
+ :var :class:`stem.version.Version`,str version: Version of the Tor protocol this router is running
+
+ :var int bandwidth: router's claimed bandwidth
+ :var int measured_bandwidth: router's measured bandwidth
+
+ :var str digest: base64 of the hash of the router's microdescriptor with trailing =s omitted
+
+ | **\*** attribute is either required when we're parsed with validation or has a default value, others are left as None if undefined
+ """
+
+ def __init__(self, raw_contents, validate = True, known_flags = Flag):
+ """
+ Parse a router descriptor in a v3 microdescriptor consensus and provide a new
+ RouterMicrodescriptor object.
+
+ :param str raw_content: router descriptor content to be parsed
+ :param bool validate: whether the router descriptor should be validated
+ :param bool known_flags: list of known router status flags
+
+ :raises: ValueError if the descriptor data is invalid
+ """
+
+ super(RouterMicrodescriptor, self).__init__(raw_contents, False, validate, known_flags)
+
+ def _parse(self, raw_content, _, validate, known_flags):
+ """
+ :param dict raw_content: router descriptor contents to be parsed
+ :param bool validate: checks the validity of descriptor content if True
+ :param bool known_flags: list of known router status flags
+
+ :raises: ValueError if an error occures in validation
+ """
+
+ content = StringIO(raw_content)
+ seen_keywords = set()
+ peek_check_kw = lambda keyword: keyword == _peek_keyword(content)
+
+ r = _read_keyword_line("r", content, validate)
+ # r mauer BD7xbfsCFku3+tgybEZsg8Yjhvw itcuKQ6PuPLJ7m/Oi928WjO2j8g 2012-06-22 13:19:32 80.101.105.103 9001 0
+ # "r" SP nickname SP identity SP digest SP publication SP IP SP ORPort SP DirPort NL
+ if r:
+ seen_keywords.add("r")
+ values = r.split(" ")
+ self.nickname, self.identity = values[0], values[1]
+ self.publication = _strptime(" ".join((values[2], values[3])), validate)
+ self.ip, self.orport, self.dirport = values[4], int(values[5]), int(values[6])
+ if self.dirport == 0: self.dirport = None
+ elif validate: raise ValueError("Invalid router descriptor: empty 'r' line")
+
+ while _peek_line(content):
+ if peek_check_kw("s"):
+ if "s" in seen_keywords: raise ValueError("Invalid router descriptor: 's' line appears twice")
+ line = _read_keyword_line("s", content, validate)
+ if not line: continue
+ seen_keywords.add("s")
+ # s Named Running Stable Valid
+ #A series of space-separated status flags, in *lexical order*
+ self.flags = line.split(" ")
+
+ self.unknown_flags = filter(lambda f: not f in known_flags, self.flags)
+ if validate and self.unknown_flags:
+ raise ValueError("Router contained unknown flags: %s", " ".join(self.unknown_flags))
+
+ elif peek_check_kw("v"):
+ if "v" in seen_keywords: raise ValueError("Invalid router descriptor: 'v' line appears twice")
+ line = _read_keyword_line("v", content, validate, True)
+ seen_keywords.add("v")
+ # v Tor 0.2.2.35
+ if line:
+ if line.startswith("Tor "):
+ self.version = stem.version.Version(line[4:])
+ else:
+ self.version = line
+ elif validate: raise ValueError("Invalid router descriptor: empty 'v' line" )
+
+ elif peek_check_kw("w"):
+ if "w" in seen_keywords: raise ValueError("Invalid router descriptor: 'w' line appears twice")
+ w = _read_keyword_line("w", content, validate, True)
+ # "w" SP "Bandwidth=" INT [SP "Measured=" INT] NL
+ seen_keywords.add("w")
+ if w:
+ values = w.split(" ")
+ if len(values) <= 2 and len(values) > 0:
+ key, value = values[0].split("=")
+ if key == "Bandwidth": self.bandwidth = int(value)
+ elif validate: raise ValueError("Router descriptor contains invalid 'w' line: expected Bandwidth, read " + key)
+
+ if len(values) == 2:
+ key, value = values[1].split("=")
+ if key == "Measured": self.measured_bandwidth = int(value)
+ elif validate: raise ValueError("Router descriptor contains invalid 'w' line: expected Measured, read " + key)
+ elif validate: raise ValueError("Router descriptor contains invalid 'w' line")
+ elif validate: raise ValueError("Router descriptor contains empty 'w' line")
+
+ elif peek_check_kw("m"):
+ # microdescriptor hashes
+ self.digest = _read_keyword_line("m", content, validate, True)
+
+ elif validate:
+ raise ValueError("Router descriptor contains unrecognized trailing lines: %s" % content.readline())
+
+ else:
+ self.unrecognized_lines.append(content.readline()) # ignore unrecognized lines if we aren't validating
+
+ def get_unrecognized_lines(self):
+ """
+ Returns any unrecognized lines.
+
+ :returns: a list of unrecognized lines
+ """
+
+ return self.unrecognized_lines
diff --git a/test/integ/descriptor/networkstatus.py b/test/integ/descriptor/networkstatus.py
index 07414c3..484e67d 100644
--- a/test/integ/descriptor/networkstatus.py
+++ b/test/integ/descriptor/networkstatus.py
@@ -13,6 +13,7 @@ import stem.exit_policy
import stem.version
import stem.descriptor.networkstatus
import test.integ.descriptor
+from stem.descriptor.networkstatus import Flavour
def _strptime(string):
return datetime.datetime.strptime(string, "%Y-%m-%d %H:%M:%S")
@@ -256,3 +257,44 @@ DnN5aFtYKiTc19qIC7Nmo+afPdDEf0MlJvEOP5EWl3w=
self.assertEquals("D5C30C15BB3F1DA27669C2D88439939E8F418FCF", desc.directory_signatures[0].key_digest)
self.assertEquals(expected_signature, desc.directory_signatures[0].signature)
+class TestMicrodescriptorConsensus(unittest.TestCase):
+ def test_cached_microdesc_consensus(self):
+ """
+ Parses the cached-microdesc-consensus file in our data directory.
+ """
+
+ # lengthy test and uneffected by targets, so only run once
+ if test.runner.only_run_once(self, "test_cached_microdesc_consensus"): return
+
+ descriptor_path = test.runner.get_runner().get_test_dir("cached-microdesc-consensus")
+
+ if not os.path.exists(descriptor_path):
+ test.runner.skip(self, "(no cached-microdesc-consensus)")
+
+ count = 0
+ with open(descriptor_path) as descriptor_file:
+ for desc in next(stem.descriptor.networkstatus.parse_file(descriptor_file, True, flavour = Flavour.MICRODESCRIPTOR)).router_descriptors:
+ assert desc.nickname # check that the router has a nickname
+ count += 1
+
+ assert count > 100 # sanity check - assuming atleast 100 relays in the consensus
+
+ def test_metrics_microdesc_consensus(self):
+ """
+ Checks if consensus documents from Metrics are parsed properly.
+ """
+
+ descriptor_path = test.integ.descriptor.get_resource("metrics_microdesc_consensus")
+
+ with file(descriptor_path) as descriptor_file:
+ desc = stem.descriptor.parse_file(descriptor_path, descriptor_file)
+
+ router = next(next(desc).router_descriptors)
+ self.assertEquals("JapanAnon", router.nickname)
+ self.assertEquals("AGw/p8P246zRPQ3ZsQx9+pM8I3s", router.identity)
+ self.assertEquals("9LDw0XiFeLQDXK9t8ht4+MK9tWx6Jxp1RwP36eatRWs", router.digest)
+ self.assertEquals(_strptime("2012-07-18 15:55:42"), router.publication)
+ self.assertEquals("220.0.231.71", router.ip)
+ self.assertEquals(443, router.orport)
+ self.assertEquals(9030, router.dirport)
+
1
0

[stem/master] Extend _read_until_keyword to read until multiple keywords
by atagar@torproject.org 13 Oct '12
by atagar@torproject.org 13 Oct '12
13 Oct '12
commit 1b26a46ed3c2b26293474d9349efb247f5888a65
Author: Ravi Chandra Padmala <neenaoffline(a)gmail.com>
Date: Wed Aug 8 08:33:48 2012 +0530
Extend _read_until_keyword to read until multiple keywords
---
stem/descriptor/__init__.py | 11 ++++++-----
stem/descriptor/extrainfo_descriptor.py | 4 ++--
stem/descriptor/server_descriptor.py | 6 +++---
3 files changed, 11 insertions(+), 10 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 8193380..b1f3ab6 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -148,19 +148,20 @@ class Descriptor(object):
def __str__(self):
return self._raw_contents
-def _read_until_keyword(keyword, descriptor_file, inclusive = False):
+def _read_until_keywords(keywords, descriptor_file, inclusive = False):
"""
- Reads from the descriptor file until we get to the given keyword or reach the
+ Reads from the descriptor file until we get to one of the given keywords or reach the
end of the file.
- :param str keyword: keyword we want to read until
+ :param str,list keywords: keyword(s) we want to read until
:param file descriptor_file: file with the descriptor content
:param bool inclusive: includes the line with the keyword if True
- :returns: list with the lines until we find the keyword
+ :returns: list with the lines until we find one of the keywords
"""
content = []
+ if type(keywords) == str: keywords = (keywords,)
while True:
last_position = descriptor_file.tell()
@@ -170,7 +171,7 @@ def _read_until_keyword(keyword, descriptor_file, inclusive = False):
if " " in line: line_keyword = line.split(" ", 1)[0]
else: line_keyword = line.strip()
- if line_keyword == keyword:
+ if line_keyword in keywords:
if inclusive: content.append(line)
else: descriptor_file.seek(last_position)
diff --git a/stem/descriptor/extrainfo_descriptor.py b/stem/descriptor/extrainfo_descriptor.py
index a589139..8d181ae 100644
--- a/stem/descriptor/extrainfo_descriptor.py
+++ b/stem/descriptor/extrainfo_descriptor.py
@@ -131,11 +131,11 @@ def parse_file(descriptor_file, validate = True):
"""
while True:
- extrainfo_content = stem.descriptor._read_until_keyword("router-signature", descriptor_file)
+ extrainfo_content = stem.descriptor._read_until_keywords("router-signature", descriptor_file)
# we've reached the 'router-signature', now include the pgp style block
block_end_prefix = stem.descriptor.PGP_BLOCK_END.split(' ', 1)[0]
- extrainfo_content += stem.descriptor._read_until_keyword(block_end_prefix, descriptor_file, True)
+ extrainfo_content += stem.descriptor._read_until_keywords(block_end_prefix, descriptor_file, True)
if extrainfo_content:
yield RelayExtraInfoDescriptor("".join(extrainfo_content), validate)
diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py
index 3dc6216..9b8ef0d 100644
--- a/stem/descriptor/server_descriptor.py
+++ b/stem/descriptor/server_descriptor.py
@@ -109,12 +109,12 @@ def parse_file(descriptor_file, validate = True):
# to the caller).
while True:
- annotations = stem.descriptor._read_until_keyword("router", descriptor_file)
- descriptor_content = stem.descriptor._read_until_keyword("router-signature", descriptor_file)
+ annotations = stem.descriptor._read_until_keywords("router", descriptor_file)
+ descriptor_content = stem.descriptor._read_until_keywords("router-signature", descriptor_file)
# we've reached the 'router-signature', now include the pgp style block
block_end_prefix = stem.descriptor.PGP_BLOCK_END.split(' ', 1)[0]
- descriptor_content += stem.descriptor._read_until_keyword(block_end_prefix, descriptor_file, True)
+ descriptor_content += stem.descriptor._read_until_keywords(block_end_prefix, descriptor_file, True)
if descriptor_content:
# strip newlines from annotations
1
0
commit 6c3717b65acc9d208ef3bf90b5b54f3983e507df
Author: Ravi Chandra Padmala <neenaoffline(a)gmail.com>
Date: Wed Aug 15 20:47:29 2012 +0530
Fixes to document parsing
One major change is that stem.descriptor.networkstatus.parse_file now
returns a NetworkStatusDocument object instead of iterating over the
router descriptors in the document
---
stem/descriptor/__init__.py | 119 +++++++++++++++-----------------
stem/descriptor/networkstatus.py | 18 +++---
test/integ/descriptor/networkstatus.py | 27 +------
3 files changed, 68 insertions(+), 96 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index d9ac21b..6563e4b 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -65,9 +65,17 @@ def parse_file(path, descriptor_file):
elif filename == "cached-extrainfo":
file_parser = stem.descriptor.extrainfo_descriptor.parse_file
elif filename == "cached-consensus":
- file_parser = stem.descriptor.networkstatus.parse_file
+ file_parser = lambda f: [stem.descriptor.networkstatus.parse_file(f)]
elif filename == "cached-microdesc-consensus":
- file_parser = lambda f: stem.descriptor.networkstatus.parse_file(f, True, "microdesc")
+ file_parser = lambda f: [stem.descriptor.networkstatus.parse_file(f, True, "microdesc")]
+ else:
+ # Metrics descriptor handling
+ first_line, desc = descriptor_file.readline().strip(), None
+ metrics_header_match = re.match("^@type (\S+) (\d+).(\d+)$", first_line)
+
+ if metrics_header_match:
+ desc_type, major_version, minor_version = metrics_header_match.groups()
+ file_parser = lambda f: _parse_metrics_file(desc_type, int(major_version), int(minor_version), f)
if file_parser:
for desc in file_parser(descriptor_file):
@@ -76,47 +84,33 @@ def parse_file(path, descriptor_file):
return
- # Metrics descriptor handling. These contain a single descriptor per file.
-
- first_line, desc = descriptor_file.readline().strip(), None
- metrics_header_match = re.match("^@type (\S+) (\d+).(\d+)$", first_line)
-
- if metrics_header_match:
- # still doesn't necessarily mean that this is a descriptor, check if the
- # header contents are recognized
-
- desc_type, major_version, minor_version = metrics_header_match.groups()
- major_version, minor_version = int(major_version), int(minor_version)
-
- if desc_type == "server-descriptor" and major_version == 1:
- desc = stem.descriptor.server_descriptor.RelayDescriptor(descriptor_file.read())
- elif desc_type == "bridge-server-descriptor" and major_version == 1:
- desc = stem.descriptor.server_descriptor.BridgeDescriptor(descriptor_file.read())
- elif desc_type == "extra-info" and major_version == 1:
- desc = stem.descriptor.extrainfo_descriptor.RelayExtraInfoDescriptor(descriptor_file.read())
- elif desc_type == "bridge-extra-info" and major_version == 1:
- # version 1.1 introduced a 'transport' field...
- # https://trac.torproject.org/6257
-
- desc = stem.descriptor.extrainfo_descriptor.BridgeExtraInfoDescriptor(descriptor_file.read())
- elif desc_type in ("network-status-consensus-3", "network-status-vote-3") and major_version == 1:
- desc = stem.descriptor.networkstatus.NetworkStatusDocument(descriptor_file.read())
- for desc in desc.router_descriptors:
- desc._set_path(path)
- yield desc
- return
- elif desc_type == "network-status-microdesc-consensus-3" and major_version == 1:
- desc = stem.descriptor.networkstatus.MicrodescriptorConsensus(descriptor_file.read())
-
- if desc:
- desc._set_path(path)
- yield desc
- return
-
# Not recognized as a descriptor file.
raise TypeError("Unable to determine the descriptor's type. filename: '%s', first line: '%s'" % (filename, first_line))
+def _parse_metrics_file(descriptor_type, major_version, minor_version, descriptor_file):
+ # Parses descriptor files from metrics, yielding individual descriptors. This
+ # throws a TypeError if the descriptor_type or version isn't recognized.
+ import stem.descriptor.server_descriptor
+ import stem.descriptor.extrainfo_descriptor
+ import stem.descriptor.networkstatus
+
+ if descriptor_type == "server-descriptor" and major_version == 1:
+ yield stem.descriptor.server_descriptor.RelayDescriptor(descriptor_file.read())
+ elif descriptor_type == "bridge-server-descriptor" and major_version == 1:
+ yield stem.descriptor.server_descriptor.BridgeDescriptor(descriptor_file.read())
+ elif descriptor_type == "extra-info" and major_version == 1:
+ yield stem.descriptor.extrainfo_descriptor.RelayExtraInfoDescriptor(descriptor_file.read())
+ elif descriptor_type == "bridge-extra-info" and major_version == 1:
+ # version 1.1 introduced a 'transport' field...
+ # https://trac.torproject.org/6257
+
+ yield stem.descriptor.extrainfo_descriptor.BridgeExtraInfoDescriptor(descriptor_file.read())
+ elif descriptor_type in ("network-status-consensus-3", "network-status-vote-3") and major_version == 1:
+ yield stem.descriptor.networkstatus.parse_file(descriptor_file)
+ elif descriptor_type == "network-status-microdesc-consensus-3" and major_version == 1:
+ yield stem.descriptor.networkstatus.parse_file(descriptor_file, flavour = "microdesc")
+
class Descriptor(object):
"""
Common parent for all types of descriptors.
@@ -177,19 +171,13 @@ def _peek_keyword(descriptor_file):
:returns: keyword at the current offset of descriptor_file
"""
- last_position = descriptor_file.tell()
- line = descriptor_file.readline()
- if not line: return None
-
- if " " in line:
- keyword = line.split(" ", 1)[0]
- if keyword == "opt":
- keyword = line.split(" ", 2)[1]
- else: keyword = line.strip()
+ line = _peek_line(descriptor_file)
- descriptor_file.seek(last_position)
+ if line.startswith("opt "):
+ line = line[4:]
+ if not line: return None
- return keyword
+ return line.split(" ", 1)[0].rstrip("\n")
def _read_keyword_line(keyword, descriptor_file, validate = True, optional = False):
"""
@@ -200,8 +188,9 @@ def _read_keyword_line(keyword, descriptor_file, validate = True, optional = Fal
Respects the opt keyword and returns the next keyword if the first is "opt".
:param str keyword: keyword the line must begin with
- :param bool optional: if the current line must begin with the given keyword
+ :param bool descriptor_file: file/file-like object containing descriptor data
:param bool validate: validation is enabled
+ :param bool optional: if the current line must begin with the given keyword
:returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
@@ -214,13 +203,14 @@ def _read_keyword_line(keyword, descriptor_file, validate = True, optional = Fal
raise ValueError("Unexpected end of document")
return None
- if line_matches_keyword(keyword, line):
- line = descriptor_file.readline()
-
- if line == "opt " + keyword or line == keyword: return ""
- elif line.startswith("opt "): return line.split(" ", 2)[2].rstrip("\n")
- else: return line.split(" ", 1)[1].rstrip("\n")
- elif line.startswith("opt"):
+ opt_line = False
+ if line.startswith("opt "):
+ line = line[4:]
+ opt_line = True
+ if re.match("^" + re.escape(keyword) + "($| )", line):
+ descriptor_file.readline()
+ return line[len(keyword):].strip()
+ elif opt_line and not optional:
# if this is something new we don't recognize
# ignore it and go to the next line
descriptor_file.readline()
@@ -239,8 +229,8 @@ def _read_keyword_line_str(keyword, lines, validate = True, optional = False):
:param str keyword: keyword the line must begin with
:param list lines: list of strings to be read from
- :param bool optional: if the current line must begin with the given keyword
:param bool validate: validation is enabled
+ :param bool optional: if the current line must begin with the given keyword
:returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
@@ -252,16 +242,17 @@ def _read_keyword_line_str(keyword, lines, validate = True, optional = False):
raise ValueError("Unexpected end of document")
return
+ opt_line = False
+ if lines[0].startswith("opt "):
+ line = line[4:]
+ opt_line = True
if line_matches_keyword(keyword, lines[0]):
line = lines.pop(0)
- if line == "opt " + keyword or line == keyword: return ""
- elif line.startswith("opt "): return line.split(" ", 2)[2]
- else: return line.split(" ", 1)[1]
- elif line.startswith("opt "):
+ return line[len(keyword):].strip()
+ elif opt_line and not optional:
# if this is something new we don't recognize yet
# ignore it and go to the next line
- lines.pop(0)
return _read_keyword_line_str(keyword, lines, optional)
elif not optional and validate:
raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, lines[0]))
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index 7effc7e..f9d89a8 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -21,7 +21,7 @@ The documents can be obtained from any of the following sources...
nsdoc_file = open("/home/neena/.tor/cached-consensus")
try:
- consensus = stem.descriptor.networkstatus.NetworkStatusDocument(nsdoc_file.read())
+ consensus = stem.descriptor.networkstatus.parse_file(nsdoc_file)
except ValueError:
print "Invalid cached-consensus file"
@@ -33,7 +33,9 @@ The documents can be obtained from any of the following sources...
parse_file - parses a network status file and provides a NetworkStatusDocument
NetworkStatusDocument - Tor v3 network status document
+ +- MicrodescriptorConsensus - Microdescriptor flavoured consensus documents
RouterDescriptor - Router descriptor; contains information about a Tor relay
+ +- RouterMicrodescriptor - Router microdescriptor; contains information that doesn't change frequently
DirectorySignature - Network status document's directory signature
DirectoryAuthority - Directory authority defined in a v3 network status document
"""
@@ -63,7 +65,7 @@ Flavour = stem.util.enum.Enum(
("NONE", ""),
("NS", "ns"),
("MICRODESCRIPTOR", "microdesc"),
- )
+)
Flag = stem.util.enum.Enum(
("AUTHORITY", "Authority"),
@@ -78,18 +80,16 @@ Flag = stem.util.enum.Enum(
("UNNAMED", "Unnamed"),
("V2DIR", "V2Dir"),
("VALID", "Valid"),
- )
-
-Flag = stem.util.enum.Enum(*[(flag.upper(), flag) for flag in ["Authority", "BadExit", "Exit", "Fast", "Guard", "HSDir", "Named", "Running", "Stable", "Unnamed", "V2Dir", "Valid"]])
+)
def parse_file(document_file, validate = True, flavour = Flavour.NONE):
"""
- Iterates over the router descriptors in a network status document.
+ Parses a network status document and provides a NetworkStatusDocument object.
:param file document_file: file with network status document content
:param bool validate: checks the validity of the document's contents if True, skips these checks otherwise
- :returns: iterator for :class:`stem.descriptor.networkstatus.RouterDescriptor` instances in the file
+ :returns: :class:`stem.descriptor.networkstatus.NetworkStatusDocument` object
:raises:
* ValueError if the contents is malformed and validate is True
@@ -109,12 +109,12 @@ def parse_file(document_file, validate = True, flavour = Flavour.NONE):
document = NetworkStatusDocument(document_data, validate)
document_file.seek(r_offset)
document.router_descriptors = _ns_router_desc_generator(document_file, document.vote_status == "vote", validate)
- yield document
+ return document
elif flavour == Flavour.MICRODESCRIPTOR:
document = MicrodescriptorConsensus(document_data, validate)
document_file.seek(r_offset)
document.router_descriptors = _router_microdesc_generator(document_file, validate, document.known_flags)
- yield document
+ return document
def _ns_router_desc_generator(document_file, vote, validate):
while _peek_keyword(document_file) == "r":
diff --git a/test/integ/descriptor/networkstatus.py b/test/integ/descriptor/networkstatus.py
index 484e67d..bd326ad 100644
--- a/test/integ/descriptor/networkstatus.py
+++ b/test/integ/descriptor/networkstatus.py
@@ -39,7 +39,7 @@ class TestNetworkStatusDocument(unittest.TestCase):
count = 0
with open(descriptor_path) as descriptor_file:
- for desc in stem.descriptor.networkstatus.parse_file(descriptor_file):
+ for desc in stem.descriptor.networkstatus.parse_file(descriptor_file).router_descriptors:
if resource.getrusage(resource.RUSAGE_SELF).ru_maxrss > 200000:
# if we're using > 200 MB we should fail
self.fail()
@@ -58,7 +58,7 @@ class TestNetworkStatusDocument(unittest.TestCase):
with file(descriptor_path) as descriptor_file:
desc = stem.descriptor.parse_file(descriptor_path, descriptor_file)
- router = next(desc)
+ router = next(next(desc).router_descriptors)
self.assertEquals("sumkledi", router.nickname)
self.assertEquals("ABPSI4nNUNC3hKPkBhyzHozozrU", router.identity)
self.assertEquals("8mCr8Sl7RF4ENU4jb0FZFA/3do8", router.digest)
@@ -150,7 +150,7 @@ I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY=
with file(descriptor_path) as descriptor_file:
desc = stem.descriptor.parse_file(descriptor_path, descriptor_file)
- router = next(desc)
+ router = next(next(desc).router_descriptors)
self.assertEquals("sumkledi", router.nickname)
self.assertEquals("ABPSI4nNUNC3hKPkBhyzHozozrU", router.identity)
self.assertEquals("B5n4BiALAF8B5AqafxohyYiuj7E", router.digest)
@@ -273,28 +273,9 @@ class TestMicrodescriptorConsensus(unittest.TestCase):
count = 0
with open(descriptor_path) as descriptor_file:
- for desc in next(stem.descriptor.networkstatus.parse_file(descriptor_file, True, flavour = Flavour.MICRODESCRIPTOR)).router_descriptors:
+ for desc in stem.descriptor.networkstatus.parse_file(descriptor_file, True, flavour = Flavour.MICRODESCRIPTOR).router_descriptors:
assert desc.nickname # check that the router has a nickname
count += 1
assert count > 100 # sanity check - assuming atleast 100 relays in the consensus
-
- def test_metrics_microdesc_consensus(self):
- """
- Checks if consensus documents from Metrics are parsed properly.
- """
-
- descriptor_path = test.integ.descriptor.get_resource("metrics_microdesc_consensus")
-
- with file(descriptor_path) as descriptor_file:
- desc = stem.descriptor.parse_file(descriptor_path, descriptor_file)
-
- router = next(next(desc).router_descriptors)
- self.assertEquals("JapanAnon", router.nickname)
- self.assertEquals("AGw/p8P246zRPQ3ZsQx9+pM8I3s", router.identity)
- self.assertEquals("9LDw0XiFeLQDXK9t8ht4+MK9tWx6Jxp1RwP36eatRWs", router.digest)
- self.assertEquals(_strptime("2012-07-18 15:55:42"), router.publication)
- self.assertEquals("220.0.231.71", router.ip)
- self.assertEquals(443, router.orport)
- self.assertEquals(9030, router.dirport)
1
0
commit f1ecbde2b5f00019ec171c44628c03c2f09e1952
Author: Ravi Chandra Padmala <neenaoffline(a)gmail.com>
Date: Thu Aug 16 18:18:09 2012 +0530
stop skipping unexpected "opt" lines
---
stem/descriptor/__init__.py | 13 -------------
1 files changed, 0 insertions(+), 13 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 6563e4b..1372e17 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -203,18 +203,11 @@ def _read_keyword_line(keyword, descriptor_file, validate = True, optional = Fal
raise ValueError("Unexpected end of document")
return None
- opt_line = False
if line.startswith("opt "):
line = line[4:]
- opt_line = True
if re.match("^" + re.escape(keyword) + "($| )", line):
descriptor_file.readline()
return line[len(keyword):].strip()
- elif opt_line and not optional:
- # if this is something new we don't recognize
- # ignore it and go to the next line
- descriptor_file.readline()
- return _read_keyword_line(keyword, descriptor_file, optional)
elif not optional and validate:
raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, line))
else: return None
@@ -242,18 +235,12 @@ def _read_keyword_line_str(keyword, lines, validate = True, optional = False):
raise ValueError("Unexpected end of document")
return
- opt_line = False
if lines[0].startswith("opt "):
line = line[4:]
- opt_line = True
if line_matches_keyword(keyword, lines[0]):
line = lines.pop(0)
return line[len(keyword):].strip()
- elif opt_line and not optional:
- # if this is something new we don't recognize yet
- # ignore it and go to the next line
- return _read_keyword_line_str(keyword, lines, optional)
elif not optional and validate:
raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, lines[0]))
else: return None
1
0

13 Oct '12
commit dcf2092607c8be29d065def1cb104290709bef87
Author: Ravi Chandra Padmala <neenaoffline(a)gmail.com>
Date: Thu Aug 9 17:21:09 2012 +0530
RouterDescriptor objects store a list of flags
---
stem/descriptor/networkstatus.py | 83 ++++++++++----------------------
test/integ/descriptor/networkstatus.py | 7 +--
2 files changed, 29 insertions(+), 61 deletions(-)
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index b1dc4dd..214a33c 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -49,6 +49,7 @@ except:
import stem.descriptor
import stem.version
import stem.exit_policy
+import stem.util.enum
from stem.descriptor import _read_until_keywords, _skip_until_keywords, _peek_keyword, _strptime
from stem.descriptor import _read_keyword_line, _read_keyword_line_str, _get_pseudo_pgp_block, _peek_line
@@ -58,6 +59,8 @@ _bandwidth_weights_regex = re.compile(" ".join(["W%s=\d+" % weight for weight in
_router_desc_end_kws = ["r", "bandwidth-weights", "directory-footer", "directory-signature"]
+Flag = stem.util.enum.Enum(*[(flag.upper(), flag) for flag in ["Authority", "BadExit", "Exit", "Fast", "Guard", "HSDir", "Named", "Running", "Stable", "Unnamed", "V2Dir", "Valid"]])
+
def parse_file(document_file, validate = True):
"""
Iterates over the router descriptors in a network status document.
@@ -82,13 +85,13 @@ def parse_file(document_file, validate = True):
document_data = document_data + document_file.read()
document = NetworkStatusDocument(document_data, validate)
document_file.seek(r_offset)
- document.router_descriptors = _router_desc_generator(document_file, document.vote_status == "vote", validate)
+ document.router_descriptors = _router_desc_generator(document_file, document.vote_status == "vote", validate, document.known_flags)
return document.router_descriptors
-def _router_desc_generator(document_file, vote, validate):
+def _router_desc_generator(document_file, vote, validate, known_flags):
while _peek_keyword(document_file) == "r":
desc_content = "".join(_read_until_keywords(_router_desc_end_kws, document_file, False, True))
- yield RouterDescriptor(desc_content, vote, validate)
+ yield RouterDescriptor(desc_content, vote, validate, known_flags)
class NetworkStatusDocument(stem.descriptor.Descriptor):
"""
@@ -156,8 +159,8 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
self._parse(raw_content)
- def _generate_router(self, raw_content, vote, validate):
- return RouterDescriptor(raw_content, vote, validate)
+ def _generate_router(self, raw_content, vote, validate, known_flags):
+ return RouterDescriptor(raw_content, vote, validate, known_flags)
def _validate_network_status_version(self):
return self.network_status_version == "3"
@@ -220,7 +223,7 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
# router descriptors
if _peek_keyword(content) == "r":
router_descriptors_data = "".join(_read_until_keywords(["bandwidth-weights", "directory-footer", "directory-signature"], content, False, True))
- self.router_descriptors = _router_desc_generator(StringIO(router_descriptors_data), vote, validate)
+ self.router_descriptors = _router_desc_generator(StringIO(router_descriptors_data), vote, validate, self.known_flags)
# footer section
if self.consensus_method > 9 or vote and filter(lambda x: x >= 9, self.consensus_methods):
@@ -366,20 +369,8 @@ class RouterDescriptor(stem.descriptor.Descriptor):
:var int orport: **\*** router's ORPort
:var int dirport: **\*** router's DirPort
- :var bool is_valid: **\*** router is valid
- :var bool is_guard: **\*** router is suitable for use as an entry guard
- :var bool is_named: **\*** router is named
- :var bool is_unnamed: **\*** router is unnamed
- :var bool is_running: **\*** router is running and currently usable
- :var bool is_stable: **\*** router is stable, i.e., it's suitable for for long-lived circuits
- :var bool is_exit: **\*** router is an exit router
- :var bool is_fast: **\*** router is Fast, i.e., it's usable for high-bandwidth circuits
- :var bool is_authority: **\*** router is a directory authority
- :var bool supports_v2dir: **\*** router supports v2dir
- :var bool supports_v3dir: **\*** router supports v3dir
- :var bool is_hsdir: **\*** router is a hidden status
- :var bool is_badexit: **\*** router is marked a bad exit
- :var bool is_baddirectory: **\*** router is a bad directory
+ :var list flags: **\*** list of status flags
+ :var list unknown_flags: **\*** list of unidentified status flags
:var :class:`stem.version.Version`,str version: Version of the Tor protocol this router is running
@@ -394,13 +385,17 @@ class RouterDescriptor(stem.descriptor.Descriptor):
| exit_policy appears only in votes
"""
- def __init__(self, raw_contents, vote = True, validate = True):
+ def __init__(self, raw_contents, vote = True, validate = True, known_flags = Flag):
"""
Parse a router descriptor in a v3 network status document and provide a new
RouterDescriptor object.
:param str raw_content: router descriptor content to be parsed
+ :param bool vote: True if the descriptor is from a vote document
:param bool validate: whether the router descriptor should be validated
+ :param bool known_flags: list of known router status flags
+
+ :raises: ValueError if the descriptor data is invalid
"""
super(RouterDescriptor, self).__init__(raw_contents)
@@ -413,20 +408,8 @@ class RouterDescriptor(stem.descriptor.Descriptor):
self.orport = None
self.dirport = None
- self.is_valid = False
- self.is_guard = False
- self.is_named = False
- self.is_unnamed = False
- self.is_running = False
- self.is_stable = False
- self.is_exit = False
- self.is_fast = False
- self.is_authority = False
- self.supports_v2dir = False
- self.supports_v3dir = False
- self.is_hsdir = False
- self.is_badexit = False
- self.is_baddirectory = False
+ self.flags = []
+ self.unknown_flags = []
self.version = None
@@ -437,12 +420,14 @@ class RouterDescriptor(stem.descriptor.Descriptor):
self.microdescriptor_hashes = []
- self._parse(raw_contents, vote, validate)
+ self._parse(raw_contents, vote, validate, known_flags)
- def _parse(self, raw_content, vote, validate):
+ def _parse(self, raw_content, vote, validate, known_flags):
"""
:param dict raw_content: iptor contents to be applied
+ :param bool vote: True if the descriptor is from a vote document
:param bool validate: checks the validity of descriptor content if True
+ :param bool known_flags: list of known router status flags
:raises: ValueError if an error occures in validation
"""
@@ -471,27 +456,11 @@ class RouterDescriptor(stem.descriptor.Descriptor):
seen_keywords.add("s")
# s Named Running Stable Valid
#A series of space-separated status flags, in *lexical order*
- flags = line.split(" ")
- flag_map = {
- "Valid": "is_valid",
- "Guard": "is_guard",
- "Named": "is_named",
- "Unnamed": "is_unnamed",
- "Running": "is_running",
- "Stable": "is_stable",
- "Exit": "is_exit",
- "Fast": "is_fast",
- "Authority": "is_authority",
- "V2Dir": "supports_v2dir",
- "V3Dir": "supports_v3dir",
- "HSDir": "is_hsdir",
- "BadExit": "is_badexit",
- "BadDirectory": "is_baddirectory",
- }
- map(lambda flag: setattr(self, flag_map[flag], True), flags)
+ self.flags = line.split(" ")
- if self.is_unnamed: self.is_named = False
- elif self.is_named: self.is_unnamed = False
+ self.unknown_flags = filter(lambda f: not f in known_flags, self.flags)
+ if validate and self.unknown_flags:
+ raise ValueError("Router contained unknown flags: %s", " ".join(self.unknown_flags))
elif peek_check_kw("v"):
if "v" in seen_keywords: raise ValueError("Invalid router descriptor: 'v' line appears twice")
diff --git a/test/integ/descriptor/networkstatus.py b/test/integ/descriptor/networkstatus.py
index f5d8942..07414c3 100644
--- a/test/integ/descriptor/networkstatus.py
+++ b/test/integ/descriptor/networkstatus.py
@@ -98,8 +98,7 @@ class TestNetworkStatusDocument(unittest.TestCase):
"0.2.3.18-rc", "0.2.3.19-rc"]]
self.assertEquals(expected_client_versions, desc.client_versions)
self.assertEquals(expected_server_versions, desc.server_versions)
- known_flags = ["Authority", "BadExit", "Exit", "Fast", "Guard", "HSDir", "Named", "Running", "Stable", "Unnamed", "V2Dir", "Valid"]
- self.assertEquals(known_flags, desc.known_flags)
+ self.assertEquals(set(desc.known_flags), set(["Authority", "BadExit", "Exit", "Fast", "Guard", "HSDir", "Named", "Running", "Stable", "Unnamed", "V2Dir", "Valid"]))
expected_params = {"CircuitPriorityHalflifeMsec": 30000, "bwauthpid": 1}
self.assertEquals(expected_params, desc.params)
router1 = next(desc.router_descriptors)
@@ -110,6 +109,7 @@ class TestNetworkStatusDocument(unittest.TestCase):
self.assertEquals("178.218.213.229", router1.ip)
self.assertEquals(80, router1.orport)
self.assertEquals(None, router1.dirport)
+ self.assertEquals(set(["Exit", "Fast", "Named", "Running", "Valid"]), set(router1.flags))
self.assertEquals(8, len(desc.directory_authorities))
self.assertEquals("tor26", desc.directory_authorities[0].nickname)
@@ -182,8 +182,7 @@ I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY=
self.assertEquals(300, desc.dist_delay)
self.assertEquals([], desc.client_versions)
self.assertEquals([], desc.server_versions)
- known_flags = ["Authority", "BadExit", "Exit", "Fast", "Guard", "HSDir", "Running", "Stable", "V2Dir", "Valid"]
- self.assertEquals(known_flags, desc.known_flags)
+ self.assertEquals(set(desc.known_flags), set(["Authority", "BadExit", "Exit", "Fast", "Guard", "HSDir", "Running", "Stable", "V2Dir", "Valid"]))
expected_params = {"CircuitPriorityHalflifeMsec": 30000, "bwauthpid": 1}
self.assertEquals(expected_params, desc.params)
router1 = next(desc.router_descriptors)
1
0
commit 9ad3395400cd21a26c0106e665aa6e1e2026a353
Author: Ravi Chandra Padmala <neenaoffline(a)gmail.com>
Date: Thu Aug 9 13:01:44 2012 +0530
Removed DescriptorParser
---
stem/descriptor/__init__.py | 306 +++++++++++++-------------------
stem/descriptor/networkstatus.py | 123 +++++++-------
test/integ/descriptor/networkstatus.py | 40 +++--
3 files changed, 214 insertions(+), 255 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 168b357..40f03ad 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -148,6 +148,21 @@ class Descriptor(object):
def __str__(self):
return self._raw_contents
+def _peek_line(descriptor_file):
+ """
+ Returns the line at the current offset of descriptor_file.
+
+ :param file descriptor_file: file with the descriptor content
+
+ :returns: line at the current offset of descriptor_file
+ """
+
+ last_position = descriptor_file.tell()
+ line = descriptor_file.readline()
+ descriptor_file.seek(last_position)
+
+ return line
+
def _peek_keyword(descriptor_file):
"""
Returns the keyword at the current offset of descriptor_file. Respects the
@@ -172,6 +187,82 @@ def _peek_keyword(descriptor_file):
return keyword
+def _read_keyword_line(keyword, descriptor_file, validate = True, optional = False):
+ """
+ Returns the rest of the line if the first keyword matches the given keyword. If
+ it doesn't, a ValueError is raised if optional and validate are True, if
+ not, None is returned.
+
+ Respects the opt keyword and returns the next keyword if the first is "opt".
+
+ :param str keyword: keyword the line must begin with
+ :param bool optional: if the current line must begin with the given keyword
+ :param bool validate: validation is enabled
+
+ :returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
+
+ :raises: ValueError if a non-optional keyword doesn't match when validation is enabled
+ """
+
+ line = _peek_line(descriptor_file)
+ if not line:
+ if not optional and validate:
+ raise ValueError("Unexpected end of document")
+ return None
+
+ if line_matches_keyword(keyword, line):
+ line = descriptor_file.readline()
+
+ if line == "opt " + keyword or line == keyword: return ""
+ elif line.startswith("opt "): return line.split(" ", 2)[2].rstrip("\n")
+ else: return line.split(" ", 1)[1].rstrip("\n")
+ elif line.startswith("opt"):
+ # if this is something new we don't recognize
+ # ignore it and go to the next line
+ descriptor_file.readline()
+ return _read_keyword_line(keyword, descriptor_file, optional)
+ elif not optional and validate:
+ raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, line))
+ else: return None
+
+def _read_keyword_line_str(keyword, lines, validate = True, optional = False):
+ """
+ Returns the rest of the line if the first keyword matches the given keyword. If
+ it doesn't, a ValueError is raised if optional and validate are True, if
+ not, None is returned.
+
+ Respects the opt keyword and returns the next keyword if the first is "opt".
+
+ :param str keyword: keyword the line must begin with
+ :param list lines: list of strings to be read from
+ :param bool optional: if the current line must begin with the given keyword
+ :param bool validate: validation is enabled
+
+ :returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
+
+ :raises: ValueError if a non-optional keyword doesn't match when validation is enabled
+ """
+
+ if not lines:
+ if not optional and validate:
+ raise ValueError("Unexpected end of document")
+ return
+
+ if line_matches_keyword(keyword, lines[0]):
+ line = lines.pop(0)
+
+ if line == "opt " + keyword or line == keyword: return ""
+ elif line.startswith("opt "): return line.split(" ", 2)[2]
+ else: return line.split(" ", 1)[1]
+ elif line.startswith("opt "):
+ # if this is something new we don't recognize yet
+ # ignore it and go to the next line
+ lines.pop(0)
+ return _read_keyword_line_str(keyword, lines, optional)
+ elif not optional and validate:
+ raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, lines[0]))
+ else: return None
+
def _read_until_keywords(keywords, descriptor_file, inclusive = False, ignore_first = False):
"""
Reads from the descriptor file until we get to one of the given keywords or reach the
@@ -348,146 +439,10 @@ def _strptime(string, validate = True, optional = False):
return datetime.datetime.strptime(string, "%Y-%m-%d %H:%M:%S")
except ValueError, exc:
if validate or not optional: raise exc
+ else: return None
-class DescriptorParser:
- """
- Helper class to parse documents.
-
- :var str line: current line to be being parsed
- :var list lines: list of remaining lines to be parsed
- """
-
- def __init__(self, raw_content, validate):
- """
- Create a new DocumentParser.
-
- :param str raw_content: content to be parsed
- :param bool validate: if False, treats every keyword line as optional
- """
-
- self._raw_content = raw_content
- self.lines = raw_content.split("\n")
- self.validate = validate
- self.line = self.lines.pop(0)
-
- def peek_keyword(self):
- """
- Returns the first keyword in the next line. Respects the opt keyword and
- returns the actual keyword if the first is "opt".
-
- :returns: the first keyword of the next line
- """
-
- if self.line:
- if self.line.startswith("opt "):
- return self.line.split(" ")[1]
- return self.line.split(" ")[0]
-
- def read_keyword_line(self, keyword, optional = False):
- """
- Returns the first keyword in the next line it matches the given keyword.
-
- If it doesn't match, a ValueError is raised if optional is True and if the
- DocumentParser was created with validation enabled. If not, None is returned.
-
- Respects the opt keyword and returns the next keyword if the first is "opt".
-
- :param str keyword: keyword the line must begin with
- :param bool optional: If the current line must begin with the given keyword
-
- :returns: the text after the keyword if the keyword matches the one provided, otherwise returns None or raises an exception
-
- :raises: ValueError if a non-optional keyword doesn't match when validation is enabled
- """
-
- keyword_regex = re.compile("(opt )?" + re.escape(keyword) + "($| )")
-
- if not self.line:
- if not optional and self.validate:
- raise ValueError("Unexpected end of document")
- return
-
- if keyword_regex.match(self.line):
- line = self.read_line()
-
- if line == "opt " + keyword or line == keyword: return ""
- elif line.startswith("opt "): return line.split(" ", 2)[2]
- else: return line.split(" ", 1)[1]
- elif self.line.startswith("opt"):
- # if this is something new we don't recognize
- # ignore it and go to the next line
- self.read_line()
- return self.read_keyword_line(self, keyword, optional)
- elif not optional and self.validate:
- raise ValueError("Error parsing network status document: Expected %s, received: %s" % (keyword, self.line))
-
- def read_line(self):
- """
- Returns the current line and shifts the parser to the next line.
-
- :returns: the current line if it exists, None otherwise
- """
-
- if self.line:
- try: tmp, self.line = self.line, self.lines.pop(0)
- except IndexError: tmp, self.line = self.line, None
-
- return tmp
- elif not optional and self.validate:
- raise ValueError("Unexpected end of document")
-
- def read_block(self, keyword):
- """
- Returns a keyword block that begins with "-----BEGIN keyword-----\\n" and
- ends with "-----END keyword-----\\n".
-
- :param str keyword: keyword block that must be read
-
- :returns: the data in the keyword block
- """
-
- lines = []
-
- if self.line == "-----BEGIN " + keyword + "-----":
- self.read_line()
- while self.line != "-----END " + keyword + "-----":
- lines.append(self.read_line())
-
- self.read_line() # pop out the END line
-
- return "\n".join(lines)
-
- def read_until(self, terminals = []):
- """
- Returns the data in the parser until a line that begins with one of the keywords in terminals are found.
-
- :param list terminals: list of strings at which we should stop reading and return the data
-
- :returns: the current line if it exists, None otherwise
- """
-
- if self.line == None: return
- lines = [self.read_line()]
- while self.line and not self.line.split(" ")[0] in terminals:
- lines.append(self.line)
- self.read_line()
-
- return "\n".join(lines)
-
- def remaining(self):
- """
- Returns the data remaining in the parser.
-
- :returns: all a list of all unparsed lines
- """
-
- if self.line:
- lines, self.lines = self.lines, []
- lines.insert(0, self.line)
- self.line = None
- return lines
- else:
- return []
+def line_matches_keyword(keyword, line):
+ return re.search("^(opt )?" + re.escape(keyword) + "($| )", line)
class KeyCertificate(Descriptor):
"""
@@ -522,63 +477,54 @@ class KeyCertificate(Descriptor):
self.fingerprint, self.identity_key, self.published = None, None, None
self.expires, self.signing_key, self.crosscert = None, None, None
self.certification = None
- parser = DescriptorParser(raw_content, validate)
- peek_check_kw = lambda keyword: keyword == parser.peek_keyword()
+ content = raw_content.splitlines()
seen_keywords = set()
- self.key_certificate_version = parser.read_keyword_line("dir-key-certificate-version")
- if validate and self.key_certificate_version != "3": raise ValueError("Unrecognized dir-key-certificate-version")
+ self.key_certificate_version = _read_keyword_line_str("dir-key-certificate-version", content)
+ if validate and self.key_certificate_version != "3":
+ raise ValueError("Unrecognized dir-key-certificate-version")
- def _read_keyword_line(keyword):
+ def read_keyword_line(keyword):
if validate and keyword in seen_keywords:
raise ValueError("Invalid key certificate: '%s' appears twice" % keyword)
seen_keywords.add(keyword)
- return parser.read_keyword_line(keyword)
+ return _read_keyword_line_str(keyword, content, validate)
- while parser.line:
- if peek_check_kw("dir-address"):
- line = _read_keyword_line("dir-address")
+ while content:
+ if line_matches_keyword("dir-address", content[0]):
+ line = read_keyword_line("dir-address")
try:
self.ip, self.port = line.rsplit(":", 1)
self.port = int(self.port)
except Exception:
if validate: raise ValueError("Invalid dir-address line: %s" % line)
-
- elif peek_check_kw("fingerprint"):
- self.fingerprint = _read_keyword_line("fingerprint")
-
- elif peek_check_kw("dir-identity-key"):
- _read_keyword_line("dir-identity-key")
- self.identity_key = parser.read_block("RSA PUBLIC KEY")
-
- elif peek_check_kw("dir-key-published"):
- self.published = _strptime(_read_keyword_line("dir-key-published"))
-
- elif peek_check_kw("dir-key-expires"):
- self.expires = _strptime(_read_keyword_line("dir-key-expires"))
-
- elif peek_check_kw("dir-signing-key"):
- _read_keyword_line("dir-signing-key")
- self.signing_key = parser.read_block("RSA PUBLIC KEY")
-
- elif peek_check_kw("dir-key-crosscert"):
- _read_keyword_line("dir-key-crosscert")
- self.crosscert = parser.read_block("ID SIGNATURE")
-
- elif peek_check_kw("dir-key-certification"):
- _read_keyword_line("dir-key-certification")
- self.certification = parser.read_block("SIGNATURE")
+ elif line_matches_keyword("fingerprint", content[0]):
+ self.fingerprint = read_keyword_line("fingerprint")
+ elif line_matches_keyword("dir-identity-key", content[0]):
+ read_keyword_line("dir-identity-key")
+ self.identity_key = _get_pseudo_pgp_block(content)
+ elif line_matches_keyword("dir-key-published", content[0]):
+ self.published = _strptime(read_keyword_line("dir-key-published"))
+ elif line_matches_keyword("dir-key-expires", content[0]):
+ self.expires = _strptime(read_keyword_line("dir-key-expires"))
+ elif line_matches_keyword("dir-signing-key", content[0]):
+ read_keyword_line("dir-signing-key")
+ self.signing_key = _get_pseudo_pgp_block(content)
+ elif line_matches_keyword("dir-key-crosscert", content[0]):
+ read_keyword_line("dir-key-crosscert")
+ self.crosscert = _get_pseudo_pgp_block(content)
+ elif line_matches_keyword("dir-key-certification", content[0]):
+ read_keyword_line("dir-key-certification")
+ self.certification = _get_pseudo_pgp_block(content)
break
-
elif validate:
- raise ValueError("Key certificate contains unrecognized lines: %s" % parser.line)
-
+ raise ValueError("Key certificate contains unrecognized lines: %s" % content[0])
else:
# ignore unrecognized lines if we aren't validating
- self._unrecognized_lines.append(parser.read_line())
+ self.unrecognized_lines.append(content.pop(0))
- self._unrecognized_lines = parser.remaining()
- if self._unrecognized_lines and validate:
+ self.unrecognized_lines = content
+ if self.unrecognized_lines and validate:
raise ValueError("Unrecognized trailing data in key certificate")
def get_unrecognized_lines(self):
@@ -588,5 +534,5 @@ class KeyCertificate(Descriptor):
:returns: a list of unrecognized lines
"""
- return self._unrecognized_lines
+ return self.unrecognized_lines
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py
index a51fcbd..b1dc4dd 100644
--- a/stem/descriptor/networkstatus.py
+++ b/stem/descriptor/networkstatus.py
@@ -40,13 +40,18 @@ The documents can be obtained from any of the following sources...
import re
import datetime
-from StringIO import StringIO
+
+try:
+ from cStringIO import StringIO
+except:
+ from StringIO import StringIO
import stem.descriptor
import stem.version
import stem.exit_policy
-from stem.descriptor import _read_until_keywords, _skip_until_keywords, _peek_keyword
+from stem.descriptor import _read_until_keywords, _skip_until_keywords, _peek_keyword, _strptime
+from stem.descriptor import _read_keyword_line, _read_keyword_line_str, _get_pseudo_pgp_block, _peek_line
_bandwidth_weights_regex = re.compile(" ".join(["W%s=\d+" % weight for weight in ["bd",
"be", "bg", "bm", "db", "eb", "ed", "ee", "eg", "em", "gb", "gd", "gg", "gm", "mb", "md", "me", "mg", "mm"]]))
@@ -80,13 +85,6 @@ def parse_file(document_file, validate = True):
document.router_descriptors = _router_desc_generator(document_file, document.vote_status == "vote", validate)
return document.router_descriptors
-def _strptime(string, validate = True, optional = False):
- try:
- return datetime.datetime.strptime(string, "%Y-%m-%d %H:%M:%S")
- except ValueError, exc:
- if validate or not optional: raise exc
- else: return None
-
def _router_desc_generator(document_file, vote, validate):
while _peek_keyword(document_file) == "r":
desc_content = "".join(_read_until_keywords(_router_desc_end_kws, document_file, False, True))
@@ -171,14 +169,13 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
:returns: a list of unrecognized trailing lines
"""
- return self._unrecognized_lines
+ return self.unrecognized_lines
def _parse(self, raw_content):
# preamble
+ content = StringIO(raw_content)
validate = self.validated
- doc_parser = stem.descriptor.DescriptorParser(raw_content, validate)
-
- read_keyword_line = lambda keyword, optional = False: setattr(self, keyword.replace("-", "_"), doc_parser.read_keyword_line(keyword, optional))
+ read_keyword_line = lambda keyword, optional = False: setattr(self, keyword.replace("-", "_"), _read_keyword_line(keyword, content, validate, optional))
map(read_keyword_line, ["network-status-version", "vote-status"])
if validate and not self._validate_network_status_version():
@@ -186,47 +183,49 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
if self.vote_status == "vote": vote = True
elif self.vote_status == "consensus": vote = False
- elif validate: raise ValueError("Unrecognized document type specified in vote-status")
+ elif validate: raise ValueError("Unrecognized vote-status")
if vote:
read_keyword_line("consensus-methods", True)
self.consensus_methods = [int(method) for method in self.consensus_methods.split(" ")]
- self.published = _strptime(doc_parser.read_keyword_line("published", True), validate, True)
+ self.published = _strptime(_read_keyword_line("published", content, validate, True), validate, True)
else:
- self.consensus_method = int(doc_parser.read_keyword_line("consensus-method", True))
+ read_keyword_line("consensus-method", True)
+ self.consensus_method = int(self.consensus_method)
map(read_keyword_line, ["valid-after", "fresh-until", "valid-until"])
self.valid_after = _strptime(self.valid_after, validate)
self.fresh_until = _strptime(self.fresh_until, validate)
self.valid_until = _strptime(self.valid_until, validate)
- voting_delay = doc_parser.read_keyword_line("voting-delay")
+ voting_delay = _read_keyword_line("voting-delay", content, validate)
self.vote_delay, self.dist_delay = [int(delay) for delay in voting_delay.split(" ")]
- client_versions = doc_parser.read_keyword_line("client-versions", True)
+ client_versions = _read_keyword_line("client-versions", content, validate, True)
if client_versions:
self.client_versions = [stem.version.Version(version_string) for version_string in client_versions.split(",")]
- server_versions = doc_parser.read_keyword_line("server-versions", True)
+ server_versions = _read_keyword_line("server-versions", content, validate, True)
if server_versions:
self.server_versions = [stem.version.Version(version_string) for version_string in server_versions.split(",")]
- self.known_flags = doc_parser.read_keyword_line("known-flags").split(" ")
+ self.known_flags = _read_keyword_line("known-flags", content, validate).split(" ")
read_keyword_line("params", True)
if self.params:
self.params = dict([(param.split("=")[0], int(param.split("=")[1])) for param in self.params.split(" ")])
# authority section
- while doc_parser.line.startswith("dir-source "):
- dirauth_data = doc_parser.read_until(["dir-source", "r", "directory-footer", "directory-signature", "bandwidth-weights"])
+ while _peek_keyword(content) == "dir-source":
+ dirauth_data = _read_until_keywords(["dir-source", "r", "directory-footer", "directory-signature", "bandwidth-weights"], content, False, True)
+ dirauth_data = "".join(dirauth_data).rstrip()
self.directory_authorities.append(DirectoryAuthority(dirauth_data, vote, validate))
# router descriptors
- if doc_parser.peek_keyword() == "r":
- router_descriptors_data = doc_parser.read_until(["bandwidth-weights", "directory-footer", "directory-signature"])
+ if _peek_keyword(content) == "r":
+ router_descriptors_data = "".join(_read_until_keywords(["bandwidth-weights", "directory-footer", "directory-signature"], content, False, True))
self.router_descriptors = _router_desc_generator(StringIO(router_descriptors_data), vote, validate)
# footer section
if self.consensus_method > 9 or vote and filter(lambda x: x >= 9, self.consensus_methods):
- if doc_parser.line == "directory-footer":
- doc_parser.read_line()
+ if _peek_keyword(content) == "directory-footer":
+ content.readline()
elif validate:
raise ValueError("Network status document missing directory-footer")
@@ -237,12 +236,12 @@ class NetworkStatusDocument(stem.descriptor.Descriptor):
elif validate:
raise ValueError("Invalid bandwidth-weights line")
- while doc_parser.line.startswith("directory-signature "):
- signature_data = doc_parser.read_until(["directory-signature"])
- self.directory_signatures.append(DirectorySignature(signature_data))
+ while _peek_keyword(content) == "directory-signature":
+ signature_data = _read_until_keywords("directory-signature", content, False, True)
+ self.directory_signatures.append(DirectorySignature("".join(signature_data)))
- self._unrecognized_lines = doc_parser.remaining()
- if validate and self._unrecognized_lines: raise ValueError("Unrecognized trailing data")
+ self.unrecognized_lines = content.read()
+ if validate and self.unrecognized_lines: raise ValueError("Unrecognized trailing data")
class DirectoryAuthority(stem.descriptor.Descriptor):
"""
@@ -280,21 +279,21 @@ class DirectoryAuthority(stem.descriptor.Descriptor):
self.nickname, self.identity, self.address, self.ip = None, None, None, None
self.dirport, self.orport, self.legacy_dir_key = None, None, None
self.key_certificate, self.contact, self.vote_digest = None, None, None
- parser = stem.descriptor.DescriptorParser(raw_content, validate)
- dir_source = parser.read_keyword_line("dir-source")
+ content = StringIO(raw_content)
+ dir_source = _read_keyword_line("dir-source", content, validate)
self.nickname, self.identity, self.address, self.ip, self.dirport, self.orport = dir_source.split(" ")
self.dirport = int(self.dirport)
self.orport = int(self.orport)
- self.contact = parser.read_keyword_line("contact")
+ self.contact = _read_keyword_line("contact", content, validate)
if vote:
- self.legacy_dir_key = parser.read_keyword_line("legacy-dir-key", True)
- self.key_certificate = stem.descriptor.KeyCertificate("\n".join(parser.remaining()), validate)
+ self.legacy_dir_key = _read_keyword_line("legacy-dir-key", content, validate, True)
+ self.key_certificate = stem.descriptor.KeyCertificate(content.read(), validate)
else:
- self.vote_digest = parser.read_keyword_line("vote-digest", True)
- self._unrecognized_lines = parser.remaining()
- if self._unrecognized_lines and validate:
+ self.vote_digest = _read_keyword_line("vote-digest", content, True, validate)
+ self.unrecognized_lines = content.read()
+ if self.unrecognized_lines and validate:
raise ValueError("Unrecognized trailing data in directory authority information")
def get_unrecognized_lines(self):
@@ -304,7 +303,7 @@ class DirectoryAuthority(stem.descriptor.Descriptor):
:returns: a list of unrecognized lines
"""
- return self._unrecognized_lines
+ return self.unrecognized_lines
class DirectorySignature(stem.descriptor.Descriptor):
"""
@@ -329,18 +328,20 @@ class DirectorySignature(stem.descriptor.Descriptor):
super(DirectorySignature, self).__init__(raw_content)
self.identity, self.key_digest, self.method, self.signature = None, None, None, None
- parser = stem.descriptor.DescriptorParser(raw_content, validate)
+ content = raw_content.splitlines()
- signature_line = parser.read_keyword_line("directory-signature").split(" ")
+ signature_line = _read_keyword_line_str("directory-signature", content, validate).split(" ")
if len(signature_line) == 2:
self.identity, self.key_digest = signature_line
- if len(signature_line) == 3: # for microdescriptor consensuses
+ if len(signature_line) == 3:
+ # for microdescriptor consensuses
+ # This 'method' seems to be undocumented 8-8-12
self.method, self.identity, self.key_digest = signature_line
- self.signature = parser.read_block("SIGNATURE")
- self._unrecognized_lines = parser.remaining()
- if self._unrecognized_lines and validate:
+ self.signature = _get_pseudo_pgp_block(content)
+ self.unrecognized_lines = content
+ if self.unrecognized_lines and validate:
raise ValueError("Unrecognized trailing data in directory signature")
def get_unrecognized_lines(self):
@@ -350,7 +351,7 @@ class DirectorySignature(stem.descriptor.Descriptor):
:returns: a list of unrecognized lines
"""
- return self._unrecognized_lines
+ return self.unrecognized_lines
class RouterDescriptor(stem.descriptor.Descriptor):
"""
@@ -446,26 +447,26 @@ class RouterDescriptor(stem.descriptor.Descriptor):
:raises: ValueError if an error occures in validation
"""
- parser = stem.descriptor.DescriptorParser(raw_content, validate)
+ content = StringIO(raw_content)
seen_keywords = set()
- peek_check_kw = lambda keyword: keyword == parser.peek_keyword()
+ peek_check_kw = lambda keyword: keyword == _peek_keyword(content)
- r = parser.read_keyword_line("r")
+ r = _read_keyword_line("r", content, validate)
# r mauer BD7xbfsCFku3+tgybEZsg8Yjhvw itcuKQ6PuPLJ7m/Oi928WjO2j8g 2012-06-22 13:19:32 80.101.105.103 9001 0
# "r" SP nickname SP identity SP digest SP publication SP IP SP ORPort SP DirPort NL
- seen_keywords.add("r")
if r:
+ seen_keywords.add("r")
values = r.split(" ")
self.nickname, self.identity, self.digest = values[0], values[1], values[2]
self.publication = _strptime(" ".join((values[3], values[4])), validate)
self.ip, self.orport, self.dirport = values[5], int(values[6]), int(values[7])
if self.dirport == 0: self.dirport = None
- elif validate: raise ValueError("Invalid router descriptor: empty 'r' line" )
+ elif validate: raise ValueError("Invalid router descriptor: empty 'r' line")
- while parser.line:
+ while _peek_line(content):
if peek_check_kw("s"):
if "s" in seen_keywords: raise ValueError("Invalid router descriptor: 's' line appears twice")
- line = parser.read_keyword_line("s")
+ line = _read_keyword_line("s", content, validate)
if not line: continue
seen_keywords.add("s")
# s Named Running Stable Valid
@@ -494,7 +495,7 @@ class RouterDescriptor(stem.descriptor.Descriptor):
elif peek_check_kw("v"):
if "v" in seen_keywords: raise ValueError("Invalid router descriptor: 'v' line appears twice")
- line = parser.read_keyword_line("v", True)
+ line = _read_keyword_line("v", content, validate, True)
seen_keywords.add("v")
# v Tor 0.2.2.35
if line:
@@ -506,7 +507,7 @@ class RouterDescriptor(stem.descriptor.Descriptor):
elif peek_check_kw("w"):
if "w" in seen_keywords: raise ValueError("Invalid router descriptor: 'w' line appears twice")
- w = parser.read_keyword_line("w", True)
+ w = _read_keyword_line("w", content, validate, True)
# "w" SP "Bandwidth=" INT [SP "Measured=" INT] NL
seen_keywords.add("w")
if w:
@@ -525,7 +526,7 @@ class RouterDescriptor(stem.descriptor.Descriptor):
elif peek_check_kw("p"):
if "p" in seen_keywords: raise ValueError("Invalid router descriptor: 'p' line appears twice")
- p = parser.read_keyword_line("p", True)
+ p = _read_keyword_line("p", content, validate, True)
seen_keywords.add("p")
# "p" SP ("accept" / "reject") SP PortList NL
if p:
@@ -533,17 +534,17 @@ class RouterDescriptor(stem.descriptor.Descriptor):
elif vote and peek_check_kw("m"):
# microdescriptor hashes
- m = parser.read_keyword_line("m", True)
+ m = _read_keyword_line("m", content, validate, True)
methods, digests = m.split(" ", 1)
method_list = methods.split(",")
digest_dict = [digest.split("=", 1) for digest in digests.split(" ")]
self.microdescriptor_hashes.append((method_list, digest_dict))
elif validate:
- raise ValueError("Router descriptor contains unrecognized trailing lines: %s" % parser.line)
+ raise ValueError("Router descriptor contains unrecognized trailing lines: %s" % content.readline())
else:
- self._unrecognized_lines.append(parser.read_line()) # ignore unrecognized lines if we aren't validating
+ self.unrecognized_lines.append(content.readline()) # ignore unrecognized lines if we aren't validating
def get_unrecognized_lines(self):
"""
@@ -552,5 +553,5 @@ class RouterDescriptor(stem.descriptor.Descriptor):
:returns: a list of unrecognized lines
"""
- return self._unrecognized_lines
+ return self.unrecognized_lines
diff --git a/test/integ/descriptor/networkstatus.py b/test/integ/descriptor/networkstatus.py
index 67992c7..f5d8942 100644
--- a/test/integ/descriptor/networkstatus.py
+++ b/test/integ/descriptor/networkstatus.py
@@ -39,8 +39,8 @@ class TestNetworkStatusDocument(unittest.TestCase):
count = 0
with open(descriptor_path) as descriptor_file:
for desc in stem.descriptor.networkstatus.parse_file(descriptor_file):
- if resource.getrusage(resource.RUSAGE_SELF).ru_maxrss > 100000:
- # if we're using > 100 MB we should fail
+ if resource.getrusage(resource.RUSAGE_SELF).ru_maxrss > 200000:
+ # if we're using > 200 MB we should fail
self.fail()
assert desc.nickname # check that the router has a nickname
count += 1
@@ -129,9 +129,11 @@ class TestNetworkStatusDocument(unittest.TestCase):
}
self.assertEquals(expected_bandwidth_weights, desc.bandwidth_weights)
- expected_signature = """HFXB4497LzESysYJ/4jJY83E5vLjhv+igIxD9LU6lf6ftkGeF+lNmIAIEKaMts8H
+ expected_signature = """-----BEGIN SIGNATURE-----
+HFXB4497LzESysYJ/4jJY83E5vLjhv+igIxD9LU6lf6ftkGeF+lNmIAIEKaMts8H
mfWcW0b+jsrXcJoCxV5IrwCDF3u1aC3diwZY6yiG186pwWbOwE41188XI2DeYPwE
-I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY="""
+I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY=
+-----END SIGNATURE-----"""
self.assertEquals(8, len(desc.directory_signatures))
self.assertEquals("14C131DFC5C6F93646BE72FA1401C02A8DF2E8B4", desc.directory_signatures[0].identity)
self.assertEquals("BF112F1C6D5543CFD0A32215ACABD4197B5279AD", desc.directory_signatures[0].key_digest)
@@ -203,7 +205,8 @@ I/TJmV928na7RLZe2mGHCAW3VQOvV+QkCfj05VZ8CsY="""
self.assertEquals("Mike Perry <email>", desc.directory_authorities[0].contact)
self.assertEquals(None, desc.directory_authorities[0].legacy_dir_key)
- expected_identity_key = """MIIBigKCAYEA6uSmsoxj2MiJ3qyZq0qYXlRoG8o82SNqg+22m+t1c7MlQOZWPJYn
+ expected_identity_key = """-----BEGIN RSA PUBLIC KEY-----
+MIIBigKCAYEA6uSmsoxj2MiJ3qyZq0qYXlRoG8o82SNqg+22m+t1c7MlQOZWPJYn
XeMcBCt8xrTeIt2ZI+Q/Kt2QJSeD9WZRevTKk/kn5Tg2+xXPogalUU47y5tUohGz
+Q8+CxtRSXpDxBHL2P8rLHvGrI69wbNHGoQkce/7gJy9vw5Ie2qzbyXk1NG6V8Fb
pr6A885vHo6TbhUnolz2Wqt/kN+UorjLkN2H3fV+iGcQFv42SyHYGDLa0WwL3PJJ
@@ -211,21 +214,28 @@ r/veu36S3VaHBrfhutfioi+d3d4Ya0bKwiWi5Lm2CHuuRTgMpHLU9vlci8Hunuxq
HsULe2oMsr4VEic7sW5SPC5Obpx6hStHdNv1GxoSEm3/vIuPM8pINpU5ZYAyH9yO
Ef22ZHeiVMMKmpV9TtFyiFqvlI6GpQn3mNbsQqF1y3XCA3Q4vlRAkpgJVUSvTxFP
2bNDobOyVCpCM/rwxU1+RCNY5MFJ/+oktUY+0ydvTen3gFdZdgNqCYjKPLfBNm9m
-RGL7jZunMUNvAgMBAAE="""
- expected_signing_key = """MIGJAoGBAJ5itcJRYNEM3Qf1OVWLRkwjqf84oXPc2ZusaJ5zOe7TVvBMra9GNyc0
+RGL7jZunMUNvAgMBAAE=
+-----END RSA PUBLIC KEY-----"""
+ expected_signing_key = """-----BEGIN RSA PUBLIC KEY-----
+MIGJAoGBAJ5itcJRYNEM3Qf1OVWLRkwjqf84oXPc2ZusaJ5zOe7TVvBMra9GNyc0
NM9y6zVkHCAePAjr4KbW/8P1olA6FUE2LV9bozaU1jFf6K8B2OELKs5FUEW+n+ic
-GM0x6MhngyXonWOcKt5Gj+mAu5lrno9tpNbPkz2Utr/Pi0nsDhWlAgMBAAE="""
- expected_key_crosscert = """RHYImGTwg36wmEdAn7qaRg2sAfql7ZCtPIL/O3lU5OIdXXp0tNn/K00Bamqohjk+
+GM0x6MhngyXonWOcKt5Gj+mAu5lrno9tpNbPkz2Utr/Pi0nsDhWlAgMBAAE=
+-----END RSA PUBLIC KEY-----"""
+ expected_key_crosscert = """-----BEGIN ID SIGNATURE-----
+RHYImGTwg36wmEdAn7qaRg2sAfql7ZCtPIL/O3lU5OIdXXp0tNn/K00Bamqohjk+
Tz4FKsKXGDlbGv67PQcZPOK6NF0GRkNh4pk89prrDO4XwtEn7rkHHdBH6/qQ7IRG
-GdDZHtZ1a69oFZvPWD3hUaB50xeIe7GoKdKIfdNNJ+8="""
- expected_key_certification = """fasWOGyUZ3iMCYpDfJ+0JcMiTH25sXPWzvlHorEOyOMbaMqRYpZU4GHzt1jLgdl6
+GdDZHtZ1a69oFZvPWD3hUaB50xeIe7GoKdKIfdNNJ+8=
+-----END ID SIGNATURE-----"""
+ expected_key_certification = """-----BEGIN SIGNATURE-----
+fasWOGyUZ3iMCYpDfJ+0JcMiTH25sXPWzvlHorEOyOMbaMqRYpZU4GHzt1jLgdl6
AAoR6KdamsLg5VE8xzst48a4UFuzHFlklZ5O8om2rcvDd5DhSnWWYZnYJecqB+bo
dNisPmaIVSAWb29U8BpNRj4GMC9KAgGYUj8aE/KtutAeEekFfFEHTfWZ2fFp4j3m
9rY8FWraqyiF+Emq1T8pAAgMQ+79R3oZxq0TXS42Z4Anhms735ccauKhI3pDKjbl
tD5vAzIHOyjAOXj7a6jY/GrnaBNuJ4qe/4Hf9UmzK/jKKwG95BPJtPTT4LoFwEB0
KG2OUeQUNoCck4nDpsZwFqPlrWCHcHfTV2iDYFV1HQWDTtZz/qf+GtB8NXsq+I1w
brADmvReM2BD6p/13h0QURCI5hq7ZYlIKcKrBa0jn1d9cduULl7vgKsRCJDls/ID
-emBZ6pUxMpBmV0v+PrA3v9w4DlE7GHAq61FF/zju2kpqj6MInbEvI/E+e438sWsL"""
+emBZ6pUxMpBmV0v+PrA3v9w4DlE7GHAq61FF/zju2kpqj6MInbEvI/E+e438sWsL
+-----END SIGNATURE-----"""
self.assertEquals("3", desc.directory_authorities[0].key_certificate.key_certificate_version)
self.assertEquals("27B6B5996C426270A5C95488AA5BCEB6BCC86956", desc.directory_authorities[0].key_certificate.fingerprint)
self.assertEquals(_strptime("2011-11-28 21:51:04"), desc.directory_authorities[0].key_certificate.published)
@@ -237,9 +247,11 @@ emBZ6pUxMpBmV0v+PrA3v9w4DlE7GHAq61FF/zju2kpqj6MInbEvI/E+e438sWsL"""
self.assertEquals(None, desc.directory_authorities[0].vote_digest)
self.assertEquals({}, desc.bandwidth_weights)
- expected_signature = """fskXN84wB3mXfo+yKGSt0AcDaaPuU3NwMR3ROxWgLN0KjAaVi2eV9PkPCsQkcgw3
+ expected_signature = """-----BEGIN SIGNATURE-----
+fskXN84wB3mXfo+yKGSt0AcDaaPuU3NwMR3ROxWgLN0KjAaVi2eV9PkPCsQkcgw3
JZ/1HL9sHyZfo6bwaC6YSM9PNiiY6L7rnGpS7UkHiFI+M96VCMorvjm5YPs3FioJ
-DnN5aFtYKiTc19qIC7Nmo+afPdDEf0MlJvEOP5EWl3w="""
+DnN5aFtYKiTc19qIC7Nmo+afPdDEf0MlJvEOP5EWl3w=
+-----END SIGNATURE-----"""
self.assertEquals(1, len(desc.directory_signatures))
self.assertEquals("27B6B5996C426270A5C95488AA5BCEB6BCC86956", desc.directory_signatures[0].identity)
self.assertEquals("D5C30C15BB3F1DA27669C2D88439939E8F418FCF", desc.directory_signatures[0].key_digest)
1
0

[stem/master] Raising TypeError when metrics header is unrecognized
by atagar@torproject.org 13 Oct '12
by atagar@torproject.org 13 Oct '12
13 Oct '12
commit a85e6f77ee1b4ba40e86bd148062bf8e479484e1
Author: Damian Johnson <atagar(a)torproject.org>
Date: Sat Aug 18 16:12:35 2012 -0700
Raising TypeError when metrics header is unrecognized
When we get a metrics header for a format we don't recognize the
_parse_metrics_file() function is supposed to throw a TypeError (and says it
does in a comment). However, it didn't.
This caused unrecognized metrics types to be parsed like a valid descriptor
file containing zero descriptors. Fixing and adding a test for this use case.
Also fixing a couple other minor bugs I ran into while fixing this:
* Some tests had an assertTrue() rather than assertEqual(), causing the
assertions to almost always evaluate to True.
* The UnrecognizedType we threw for text files didn't have its mime type set.
---
stem/descriptor/__init__.py | 2 +
stem/descriptor/reader.py | 6 ++--
test/integ/descriptor/data/new_metrics_type | 3 ++
test/integ/descriptor/reader.py | 29 ++++++++++++++++++++++----
4 files changed, 32 insertions(+), 8 deletions(-)
diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py
index 1372e17..8e2bdc7 100644
--- a/stem/descriptor/__init__.py
+++ b/stem/descriptor/__init__.py
@@ -110,6 +110,8 @@ def _parse_metrics_file(descriptor_type, major_version, minor_version, descripto
yield stem.descriptor.networkstatus.parse_file(descriptor_file)
elif descriptor_type == "network-status-microdesc-consensus-3" and major_version == 1:
yield stem.descriptor.networkstatus.parse_file(descriptor_file, flavour = "microdesc")
+ else:
+ raise TypeError("Unrecognized metrics descriptor format. type: '%s', version: '%i.%i'" % (descriptor_type, major_version, minor_version))
class Descriptor(object):
"""
diff --git a/stem/descriptor/reader.py b/stem/descriptor/reader.py
index 84ce695..eb2265b 100644
--- a/stem/descriptor/reader.py
+++ b/stem/descriptor/reader.py
@@ -423,14 +423,14 @@ class DescriptorReader(object):
if target_type[0] in (None, 'text/plain'):
# either '.txt' or an unknown type
- self._handle_descriptor_file(target)
+ self._handle_descriptor_file(target, target_type)
elif is_tar:
# handles gzip, bz2, and decompressed tarballs among others
self._handle_archive(target)
else:
self._notify_skip_listeners(target, UnrecognizedType(target_type))
- def _handle_descriptor_file(self, target):
+ def _handle_descriptor_file(self, target, mime_type):
try:
with open(target) as target_file:
for desc in stem.descriptor.parse_file(target, target_file):
@@ -438,7 +438,7 @@ class DescriptorReader(object):
self._unreturned_descriptors.put(desc)
self._iter_notice.set()
except TypeError, exc:
- self._notify_skip_listeners(target, UnrecognizedType(None))
+ self._notify_skip_listeners(target, UnrecognizedType(mime_type))
except ValueError, exc:
self._notify_skip_listeners(target, ParsingFailure(exc))
except IOError, exc:
diff --git a/test/integ/descriptor/data/new_metrics_type b/test/integ/descriptor/data/new_metrics_type
new file mode 100644
index 0000000..95bd669
--- /dev/null
+++ b/test/integ/descriptor/data/new_metrics_type
@@ -0,0 +1,3 @@
+@type non-existant-type 1.0
+valid metrics header, but a type we shouldn't recognize
+
diff --git a/test/integ/descriptor/reader.py b/test/integ/descriptor/reader.py
index 6ec538d..7b3a6da 100644
--- a/test/integ/descriptor/reader.py
+++ b/test/integ/descriptor/reader.py
@@ -350,12 +350,12 @@ class TestDescriptorReader(unittest.TestCase):
with reader: list(reader) # iterates over all of the descriptors
- self.assertTrue(2, len(skip_listener.results))
+ self.assertEqual(4, len(skip_listener.results))
for skip_path, skip_exception in skip_listener.results:
if skip_path.endswith(".swp"): continue # skip vim temp files
- if not os.path.basename(skip_path) in ("riddle", "tiny.png", "vote"):
+ if not os.path.basename(skip_path) in ("riddle", "tiny.png", "vote", "new_metrics_type"):
self.fail("Unexpected non-descriptor content: %s" % skip_path)
self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.UnrecognizedType))
@@ -406,7 +406,7 @@ class TestDescriptorReader(unittest.TestCase):
reader.register_skip_listener(skip_listener.listener)
with reader: list(reader) # iterates over all of the descriptors
- self.assertTrue(1, len(skip_listener.results))
+ self.assertEqual(1, len(skip_listener.results))
skipped_path, skip_exception = skip_listener.results[0]
self.assertEqual(test_path, skipped_path)
@@ -438,7 +438,7 @@ class TestDescriptorReader(unittest.TestCase):
reader.register_skip_listener(skip_listener.listener)
with reader: list(reader) # iterates over all of the descriptors
- self.assertTrue(1, len(skip_listener.results))
+ self.assertEqual(1, len(skip_listener.results))
skipped_path, skip_exception = skip_listener.results[0]
self.assertEqual(test_path, skipped_path)
@@ -460,9 +460,28 @@ class TestDescriptorReader(unittest.TestCase):
reader.register_skip_listener(skip_listener.listener)
with reader: list(reader) # iterates over all of the descriptors
- self.assertTrue(1, len(skip_listener.results))
+ self.assertEqual(1, len(skip_listener.results))
skipped_path, skip_exception = skip_listener.results[0]
self.assertEqual(test_path, skipped_path)
self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.FileMissing))
+
+ def test_unrecognized_metrics_type(self):
+ """
+ Parses a file that has a valid metrics header, but an unrecognized type.
+ """
+
+ test_path = test.integ.descriptor.get_resource("new_metrics_type")
+
+ skip_listener = SkipListener()
+ reader = stem.descriptor.reader.DescriptorReader(test_path)
+ reader.register_skip_listener(skip_listener.listener)
+ with reader: list(reader) # iterates over all of the descriptors
+
+ self.assertEqual(1, len(skip_listener.results))
+
+ skipped_path, skip_exception = skip_listener.results[0]
+ self.assertEqual(test_path, skipped_path)
+ self.assertTrue(isinstance(skip_exception, stem.descriptor.reader.UnrecognizedType))
+ self.assertEqual((None, None), skip_exception.mime_type)
1
0

13 Oct '12
commit 55d73defda11bf1496e50565dd8ee911cb0530bb
Merge: 27f13b3 5b6d240
Author: Erinn Clark <erinn(a)torproject.org>
Date: Sat Oct 13 10:20:42 2012 +0100
Merge branch 'maint-2.2' into maint-2.3
README.LINUX-2.2 | 2 +-
README.OSX-2.2 | 2 +-
README.WIN-2.2 | 2 +-
build-scripts/linux.mk | 2 +-
build-scripts/osx.mk | 2 +-
build-scripts/windows.mk | 2 +-
changelog.linux-2.2 | 6 ++++++
changelog.osx-2.2 | 6 ++++++
changelog.windows-2.2 | 6 ++++++
9 files changed, 24 insertions(+), 6 deletions(-)
1
0

[torbrowser/master] bump all stable tbbs to 2.2.39-2 for firefox 10.0.9esr
by erinn@torproject.org 13 Oct '12
by erinn@torproject.org 13 Oct '12
13 Oct '12
commit 5b6d240d0486eeea51209f8c5ee6d1746a31251d
Author: Erinn Clark <erinn(a)torproject.org>
Date: Sat Oct 13 10:20:23 2012 +0100
bump all stable tbbs to 2.2.39-2 for firefox 10.0.9esr
---
README.LINUX-2.2 | 2 +-
README.OSX-2.2 | 2 +-
README.WIN-2.2 | 2 +-
build-scripts/linux.mk | 2 +-
build-scripts/osx.mk | 2 +-
build-scripts/windows.mk | 2 +-
changelog.linux-2.2 | 6 ++++++
changelog.osx-2.2 | 6 ++++++
changelog.windows-2.2 | 6 ++++++
9 files changed, 24 insertions(+), 6 deletions(-)
diff --git a/README.LINUX-2.2 b/README.LINUX-2.2
index bfeb2f7..859c854 100644
--- a/README.LINUX-2.2
+++ b/README.LINUX-2.2
@@ -6,7 +6,7 @@ Included applications
Vidalia 0.2.20 (with Qt 4.8.1)
Tor 0.2.2.39 (with libevent-2.0.20-stable, zlib-1.2.7 and openssl-1.0.1c)
-Firefox 10.0.8esr
+Firefox 10.0.9esr
\_ Torbutton 1.4.6.3
|_ NoScript 2.5.7
|_ HTTPS-Everywhere 2.2.2
diff --git a/README.OSX-2.2 b/README.OSX-2.2
index ca8e7fa..3ffefa3 100644
--- a/README.OSX-2.2
+++ b/README.OSX-2.2
@@ -6,7 +6,7 @@ Included applications
Vidalia 0.2.20 (with Qt 4.8.1)
Tor 0.2.2.39 (with libevent-2.0.20-stable, zlib-1.2.7 and openssl-1.0.1c)
-Firefox 10.0.8esr
+Firefox 10.0.9esr
\_ Torbutton 1.4.6.3
|_ NoScript 2.5.7
|_ HTTPS-Everywhere 2.2.2
diff --git a/README.WIN-2.2 b/README.WIN-2.2
index a7d2188..3c73b5c 100644
--- a/README.WIN-2.2
+++ b/README.WIN-2.2
@@ -6,7 +6,7 @@ Included applications
Vidalia 0.2.20 (with Qt 4.8.1)
Tor 0.2.2.39 (with libevent-2.0.20-stable, zlib-1.2.7 and openssl-1.0.1c)
-Firefox 10.0.8esr
+Firefox 10.0.9esr
\_ Torbutton 1.4.6.3
|_ NoScript 2.5.7
|_ HTTPS-Everywhere 2.2.2
diff --git a/build-scripts/linux.mk b/build-scripts/linux.mk
index 7e9a20f..5e64cba 100644
--- a/build-scripts/linux.mk
+++ b/build-scripts/linux.mk
@@ -15,7 +15,7 @@
## Architecture
ARCH_TYPE=$(shell uname -m)
-BUILD_NUM=2
+BUILD_NUM=3
PLATFORM=Linux
## Build machine specific settings
diff --git a/build-scripts/osx.mk b/build-scripts/osx.mk
index f08b50c..6325e5f 100644
--- a/build-scripts/osx.mk
+++ b/build-scripts/osx.mk
@@ -15,7 +15,7 @@
## Architecture
ARCH_TYPE=x86_64
-BUILD_NUM=2
+BUILD_NUM=3
PLATFORM=MacOS
## Set OSX-specific backwards compatibility options
diff --git a/build-scripts/windows.mk b/build-scripts/windows.mk
index a871270..54b972c 100644
--- a/build-scripts/windows.mk
+++ b/build-scripts/windows.mk
@@ -13,7 +13,7 @@
### Configuration ###
#####################
-BUILD_NUM=2
+BUILD_NUM=3
PLATFORM=Windows
## Location of required libraries
diff --git a/changelog.linux-2.2 b/changelog.linux-2.2
index 96f62b6..386cab3 100644
--- a/changelog.linux-2.2
+++ b/changelog.linux-2.2
@@ -1,3 +1,9 @@
+Tor Browser Bundle (2.2.39-3); suite=linux
+
+ * Update Firefox to 10.0.9esr
+
+ -- Erinn Clark <erinn(a)torproject.org> Sat Oct 13 10:06:07 BST 2012
+
Tor Browser Bundle (2.2.39-2); suite=linux
* Update Firefox to 10.0.8esr
diff --git a/changelog.osx-2.2 b/changelog.osx-2.2
index 04f49a0..af3069a 100644
--- a/changelog.osx-2.2
+++ b/changelog.osx-2.2
@@ -1,3 +1,9 @@
+Tor Browser Bundle (2.2.39-3); suite=osx
+
+ * Update Firefox to 10.0.9esr
+
+ -- Erinn Clark <erinn(a)torproject.org> Sat Oct 13 10:06:02 BST 2012
+
Tor Browser Bundle (2.2.39-2); suite=osx
* Update Firefox to 10.0.8esr
diff --git a/changelog.windows-2.2 b/changelog.windows-2.2
index bc4fd3c..094b8d2 100644
--- a/changelog.windows-2.2
+++ b/changelog.windows-2.2
@@ -1,3 +1,9 @@
+Tor Browser Bundle (2.2.39-3); suite=windows
+
+ * Update Firefox to 10.0.9esr
+
+ -- Erinn Clark <erinn(a)torproject.org> Sat Oct 13 10:06:05 BST 2012
+
Tor Browser Bundle (2.2.39-2); suite=windows
* Update Firefox to 10.0.8esr
1
0

[torbrowser/maint-2.3] bump all stable tbbs to 2.2.39-2 for firefox 10.0.9esr
by erinn@torproject.org 13 Oct '12
by erinn@torproject.org 13 Oct '12
13 Oct '12
commit 5b6d240d0486eeea51209f8c5ee6d1746a31251d
Author: Erinn Clark <erinn(a)torproject.org>
Date: Sat Oct 13 10:20:23 2012 +0100
bump all stable tbbs to 2.2.39-2 for firefox 10.0.9esr
---
README.LINUX-2.2 | 2 +-
README.OSX-2.2 | 2 +-
README.WIN-2.2 | 2 +-
build-scripts/linux.mk | 2 +-
build-scripts/osx.mk | 2 +-
build-scripts/windows.mk | 2 +-
changelog.linux-2.2 | 6 ++++++
changelog.osx-2.2 | 6 ++++++
changelog.windows-2.2 | 6 ++++++
9 files changed, 24 insertions(+), 6 deletions(-)
diff --git a/README.LINUX-2.2 b/README.LINUX-2.2
index bfeb2f7..859c854 100644
--- a/README.LINUX-2.2
+++ b/README.LINUX-2.2
@@ -6,7 +6,7 @@ Included applications
Vidalia 0.2.20 (with Qt 4.8.1)
Tor 0.2.2.39 (with libevent-2.0.20-stable, zlib-1.2.7 and openssl-1.0.1c)
-Firefox 10.0.8esr
+Firefox 10.0.9esr
\_ Torbutton 1.4.6.3
|_ NoScript 2.5.7
|_ HTTPS-Everywhere 2.2.2
diff --git a/README.OSX-2.2 b/README.OSX-2.2
index ca8e7fa..3ffefa3 100644
--- a/README.OSX-2.2
+++ b/README.OSX-2.2
@@ -6,7 +6,7 @@ Included applications
Vidalia 0.2.20 (with Qt 4.8.1)
Tor 0.2.2.39 (with libevent-2.0.20-stable, zlib-1.2.7 and openssl-1.0.1c)
-Firefox 10.0.8esr
+Firefox 10.0.9esr
\_ Torbutton 1.4.6.3
|_ NoScript 2.5.7
|_ HTTPS-Everywhere 2.2.2
diff --git a/README.WIN-2.2 b/README.WIN-2.2
index a7d2188..3c73b5c 100644
--- a/README.WIN-2.2
+++ b/README.WIN-2.2
@@ -6,7 +6,7 @@ Included applications
Vidalia 0.2.20 (with Qt 4.8.1)
Tor 0.2.2.39 (with libevent-2.0.20-stable, zlib-1.2.7 and openssl-1.0.1c)
-Firefox 10.0.8esr
+Firefox 10.0.9esr
\_ Torbutton 1.4.6.3
|_ NoScript 2.5.7
|_ HTTPS-Everywhere 2.2.2
diff --git a/build-scripts/linux.mk b/build-scripts/linux.mk
index 7e9a20f..5e64cba 100644
--- a/build-scripts/linux.mk
+++ b/build-scripts/linux.mk
@@ -15,7 +15,7 @@
## Architecture
ARCH_TYPE=$(shell uname -m)
-BUILD_NUM=2
+BUILD_NUM=3
PLATFORM=Linux
## Build machine specific settings
diff --git a/build-scripts/osx.mk b/build-scripts/osx.mk
index f08b50c..6325e5f 100644
--- a/build-scripts/osx.mk
+++ b/build-scripts/osx.mk
@@ -15,7 +15,7 @@
## Architecture
ARCH_TYPE=x86_64
-BUILD_NUM=2
+BUILD_NUM=3
PLATFORM=MacOS
## Set OSX-specific backwards compatibility options
diff --git a/build-scripts/windows.mk b/build-scripts/windows.mk
index a871270..54b972c 100644
--- a/build-scripts/windows.mk
+++ b/build-scripts/windows.mk
@@ -13,7 +13,7 @@
### Configuration ###
#####################
-BUILD_NUM=2
+BUILD_NUM=3
PLATFORM=Windows
## Location of required libraries
diff --git a/changelog.linux-2.2 b/changelog.linux-2.2
index 96f62b6..386cab3 100644
--- a/changelog.linux-2.2
+++ b/changelog.linux-2.2
@@ -1,3 +1,9 @@
+Tor Browser Bundle (2.2.39-3); suite=linux
+
+ * Update Firefox to 10.0.9esr
+
+ -- Erinn Clark <erinn(a)torproject.org> Sat Oct 13 10:06:07 BST 2012
+
Tor Browser Bundle (2.2.39-2); suite=linux
* Update Firefox to 10.0.8esr
diff --git a/changelog.osx-2.2 b/changelog.osx-2.2
index 04f49a0..af3069a 100644
--- a/changelog.osx-2.2
+++ b/changelog.osx-2.2
@@ -1,3 +1,9 @@
+Tor Browser Bundle (2.2.39-3); suite=osx
+
+ * Update Firefox to 10.0.9esr
+
+ -- Erinn Clark <erinn(a)torproject.org> Sat Oct 13 10:06:02 BST 2012
+
Tor Browser Bundle (2.2.39-2); suite=osx
* Update Firefox to 10.0.8esr
diff --git a/changelog.windows-2.2 b/changelog.windows-2.2
index bc4fd3c..094b8d2 100644
--- a/changelog.windows-2.2
+++ b/changelog.windows-2.2
@@ -1,3 +1,9 @@
+Tor Browser Bundle (2.2.39-3); suite=windows
+
+ * Update Firefox to 10.0.9esr
+
+ -- Erinn Clark <erinn(a)torproject.org> Sat Oct 13 10:06:05 BST 2012
+
Tor Browser Bundle (2.2.39-2); suite=windows
* Update Firefox to 10.0.8esr
1
0