commit aab3bf57a87bb09d4d572195e568e73da489f38a Author: Damian Johnson atagar@torproject.org Date: Sat Mar 9 23:07:23 2013 -0800
Marking stem.util.str_tools functions as being private
Swapping several of the functions that we don't want to vend as being private. The to_bytes() and to_unicode() functions in particular are simply there for python 3.x support, so they'll disappear if/when we drop python 2.x support. --- stem/connection.py | 8 ++++---- stem/descriptor/__init__.py | 8 ++++---- stem/descriptor/extrainfo_descriptor.py | 2 +- stem/descriptor/networkstatus.py | 4 ++-- stem/descriptor/router_status_entry.py | 2 +- stem/descriptor/server_descriptor.py | 6 +++--- stem/response/authchallenge.py | 4 ++-- stem/response/events.py | 4 ++-- stem/socket.py | 6 +++--- stem/util/enum.py | 2 +- stem/util/str_tools.py | 27 +++++++++++---------------- stem/util/system.py | 2 +- stem/util/term.py | 2 +- test/settings.cfg | 4 ++-- test/unit/descriptor/server_descriptor.py | 2 +- test/unit/util/str_tools.py | 22 +++++++++++----------- 16 files changed, 50 insertions(+), 55 deletions(-)
diff --git a/stem/connection.py b/stem/connection.py index dc70bce..4f63378 100644 --- a/stem/connection.py +++ b/stem/connection.py @@ -596,8 +596,8 @@ def authenticate_cookie(controller, cookie_path, suppress_ctl_errors = True): # This seems dumb but oh well. Converting the result to unicode so it won't # misbehave.
- auth_token_hex = binascii.b2a_hex(stem.util.str_tools.to_bytes(cookie_data)) - msg = "AUTHENTICATE %s" % stem.util.str_tools.to_unicode(auth_token_hex) + auth_token_hex = binascii.b2a_hex(stem.util.str_tools._to_bytes(cookie_data)) + msg = "AUTHENTICATE %s" % stem.util.str_tools._to_unicode(auth_token_hex) auth_response = _msg(controller, msg)
# if we got anything but an OK response then error @@ -692,7 +692,7 @@ def authenticate_safecookie(controller, cookie_path, suppress_ctl_errors = True) client_nonce = os.urandom(32)
try: - client_nonce_hex = binascii.b2a_hex(stem.util.str_tools.to_bytes(client_nonce)) + client_nonce_hex = binascii.b2a_hex(stem.util.str_tools._to_bytes(client_nonce)) authchallenge_response = _msg(controller, "AUTHCHALLENGE SAFECOOKIE %s" % client_nonce_hex)
if not authchallenge_response.is_ok(): @@ -744,7 +744,7 @@ def authenticate_safecookie(controller, cookie_path, suppress_ctl_errors = True) CLIENT_HASH_CONSTANT, cookie_data + client_nonce + authchallenge_response.server_nonce)
- auth_response = _msg(controller, "AUTHENTICATE %s" % (binascii.b2a_hex(stem.util.str_tools.to_bytes(client_hash)))) + auth_response = _msg(controller, "AUTHENTICATE %s" % (binascii.b2a_hex(stem.util.str_tools._to_bytes(client_hash)))) except stem.ControllerError, exc: try: controller.connect() diff --git a/stem/descriptor/__init__.py b/stem/descriptor/__init__.py index 59d66a4..4f0a596 100644 --- a/stem/descriptor/__init__.py +++ b/stem/descriptor/__init__.py @@ -307,7 +307,7 @@ class Descriptor(object): if stem.prereq.is_python_3(): return self._raw_contents else: - return str(stem.util.str_tools.to_bytes(self._raw_contents)) + return str(stem.util.str_tools._to_bytes(self._raw_contents))
class _UnicodeReader(object): @@ -333,10 +333,10 @@ class _UnicodeReader(object): return self.wrapped_file.next()
def read(self, n = -1): - return stem.util.str_tools.to_unicode(self.wrapped_file.read(n)) + return stem.util.str_tools._to_unicode(self.wrapped_file.read(n))
def readline(self): - return stem.util.str_tools.to_unicode(self.wrapped_file.readline()) + return stem.util.str_tools._to_unicode(self.wrapped_file.readline())
def readlines(self, sizehint = None): # being careful to do in-place conversion so we don't accidently double our @@ -348,7 +348,7 @@ class _UnicodeReader(object): results = self.wrapped_file.readlines()
for i in xrange(len(results)): - results[i] = stem.util.str_tools.to_unicode(results[i]) + results[i] = stem.util.str_tools._to_unicode(results[i])
return results
diff --git a/stem/descriptor/extrainfo_descriptor.py b/stem/descriptor/extrainfo_descriptor.py index 3d7bb3c..5f1c423 100644 --- a/stem/descriptor/extrainfo_descriptor.py +++ b/stem/descriptor/extrainfo_descriptor.py @@ -825,7 +825,7 @@ class RelayExtraInfoDescriptor(ExtraInfoDescriptor): # our digest is calculated from everything except our signature raw_content, ending = str(self), "\nrouter-signature\n" raw_content = raw_content[:raw_content.find(ending) + len(ending)] - self._digest = hashlib.sha1(stem.util.str_tools.to_bytes(raw_content)).hexdigest().upper() + self._digest = hashlib.sha1(stem.util.str_tools._to_bytes(raw_content)).hexdigest().upper()
return self._digest
diff --git a/stem/descriptor/networkstatus.py b/stem/descriptor/networkstatus.py index e032f76..de0b945 100644 --- a/stem/descriptor/networkstatus.py +++ b/stem/descriptor/networkstatus.py @@ -265,7 +265,7 @@ class NetworkStatusDocumentV2(NetworkStatusDocument): """
def __init__(self, raw_content, validate = True): - raw_content = stem.util.str_tools.to_unicode(raw_content) + raw_content = stem.util.str_tools._to_unicode(raw_content) super(NetworkStatusDocumentV2, self).__init__(raw_content)
self.version = None @@ -464,7 +464,7 @@ class NetworkStatusDocumentV3(NetworkStatusDocument): :raises: **ValueError** if the document is invalid """
- raw_content = stem.util.str_tools.to_unicode(raw_content) + raw_content = stem.util.str_tools._to_unicode(raw_content) super(NetworkStatusDocumentV3, self).__init__(raw_content) document_file = StringIO.StringIO(raw_content)
diff --git a/stem/descriptor/router_status_entry.py b/stem/descriptor/router_status_entry.py index ef810ec..68c1b38 100644 --- a/stem/descriptor/router_status_entry.py +++ b/stem/descriptor/router_status_entry.py @@ -703,7 +703,7 @@ def _decode_fingerprint(identity, validate): fingerprint = ""
try: - identity_decoded = base64.b64decode(stem.util.str_tools.to_bytes(identity)) + identity_decoded = base64.b64decode(stem.util.str_tools._to_bytes(identity)) except (TypeError, binascii.Error): if not validate: return None diff --git a/stem/descriptor/server_descriptor.py b/stem/descriptor/server_descriptor.py index 298b480..89795c9 100644 --- a/stem/descriptor/server_descriptor.py +++ b/stem/descriptor/server_descriptor.py @@ -665,7 +665,7 @@ class RelayDescriptor(ServerDescriptor):
if start >= 0 and sig_start > 0 and end > start: for_digest = raw_descriptor[start:end] - digest_hash = hashlib.sha1(stem.util.str_tools.to_bytes(for_digest)) + digest_hash = hashlib.sha1(stem.util.str_tools._to_bytes(for_digest)) self._digest = digest_hash.hexdigest().upper() else: raise ValueError("unable to calculate digest for descriptor") @@ -686,7 +686,7 @@ class RelayDescriptor(ServerDescriptor): if self.fingerprint: # calculate the signing key hash
- key_der_as_hash = hashlib.sha1(stem.util.str_tools.to_bytes(key_as_bytes)).hexdigest() + key_der_as_hash = hashlib.sha1(stem.util.str_tools._to_bytes(key_as_bytes)).hexdigest()
if key_der_as_hash != self.fingerprint.lower(): log.warn("Signing key hash: %s != fingerprint: %s" % (key_der_as_hash, self.fingerprint.lower())) @@ -821,7 +821,7 @@ class RelayDescriptor(ServerDescriptor):
# get the key representation in bytes
- key_bytes = base64.b64decode(stem.util.str_tools.to_bytes(key_as_string)) + key_bytes = base64.b64decode(stem.util.str_tools._to_bytes(key_as_string))
return key_bytes
diff --git a/stem/response/authchallenge.py b/stem/response/authchallenge.py index c14369b..93be385 100644 --- a/stem/response/authchallenge.py +++ b/stem/response/authchallenge.py @@ -41,7 +41,7 @@ class AuthChallengeResponse(stem.response.ControlMessage): if not stem.util.tor_tools.is_hex_digits(value, 64): raise stem.ProtocolError("SERVERHASH has an invalid value: %s" % value)
- self.server_hash = binascii.a2b_hex(stem.util.str_tools.to_bytes(value)) + self.server_hash = binascii.a2b_hex(stem.util.str_tools._to_bytes(value)) else: raise stem.ProtocolError("Missing SERVERHASH mapping: %s" % line)
@@ -51,6 +51,6 @@ class AuthChallengeResponse(stem.response.ControlMessage): if not stem.util.tor_tools.is_hex_digits(value, 64): raise stem.ProtocolError("SERVERNONCE has an invalid value: %s" % value)
- self.server_nonce = binascii.a2b_hex(stem.util.str_tools.to_bytes(value)) + self.server_nonce = binascii.a2b_hex(stem.util.str_tools._to_bytes(value)) else: raise stem.ProtocolError("Missing SERVERNONCE mapping: %s" % line) diff --git a/stem/response/events.py b/stem/response/events.py index af8c8e5..a2c43ff 100644 --- a/stem/response/events.py +++ b/stem/response/events.py @@ -345,7 +345,7 @@ class CircuitEvent(Event):
if self.created is not None: try: - self.created = str_tools.parse_iso_timestamp(self.created) + self.created = str_tools._parse_iso_timestamp(self.created) except ValueError, exc: raise stem.ProtocolError("Unable to parse create date (%s): %s" % (exc, self))
@@ -401,7 +401,7 @@ class CircMinorEvent(Event):
if self.created is not None: try: - self.created = str_tools.parse_iso_timestamp(self.created) + self.created = str_tools._parse_iso_timestamp(self.created) except ValueError, exc: raise stem.ProtocolError("Unable to parse create date (%s): %s" % (exc, self))
diff --git a/stem/socket.py b/stem/socket.py index 3b9498c..d459fb7 100644 --- a/stem/socket.py +++ b/stem/socket.py @@ -425,7 +425,7 @@ def send_message(control_file, message, raw = False): message = send_formatting(message)
try: - control_file.write(stem.util.str_tools.to_bytes(message)) + control_file.write(stem.util.str_tools._to_bytes(message)) control_file.flush()
log_message = message.replace("\r\n", "\n").rstrip() @@ -473,7 +473,7 @@ def recv_message(control_file): line = control_file.readline()
if stem.prereq.is_python_3(): - line = stem.util.str_tools.to_unicode(line) + line = stem.util.str_tools._to_unicode(line) except AttributeError: # if the control_file has been closed then we will receive: # AttributeError: 'NoneType' object has no attribute 'recv' @@ -542,7 +542,7 @@ def recv_message(control_file): line = control_file.readline()
if stem.prereq.is_python_3(): - line = stem.util.str_tools.to_unicode(line) + line = stem.util.str_tools._to_unicode(line) except socket.error, exc: prefix = logging_prefix % "SocketClosed" log.info(prefix + "received an exception while mid-way through a data reply (exception: "%s", read content: "%s")" % (exc, log.escape(raw_content))) diff --git a/stem/util/enum.py b/stem/util/enum.py index bd983ee..504af48 100644 --- a/stem/util/enum.py +++ b/stem/util/enum.py @@ -75,7 +75,7 @@ class Enum(object):
for entry in args: if isinstance(entry, str): - key, val = entry, stem.util.str_tools.to_camel_case(entry) + key, val = entry, stem.util.str_tools._to_camel_case(entry) elif isinstance(entry, tuple) and len(entry) == 2: key, val = entry else: diff --git a/stem/util/str_tools.py b/stem/util/str_tools.py index 14198b4..d575cfa 100644 --- a/stem/util/str_tools.py +++ b/stem/util/str_tools.py @@ -8,16 +8,11 @@ Toolkit for various string activity.
::
- to_bytes - normalizes string ASCII bytes - to_unicode - normalizes string to unicode - to_camel_case - converts a string to camel case get_size_label - human readable label for a number of bytes get_time_label - human readable label for a number of seconds get_time_labels - human readable labels for each time unit get_short_time_label - condensed time label output parse_short_time_label - seconds represented by a short time label - - parse_iso_timestamp - parses an ISO timestamp as a datetime value """
import codecs @@ -53,32 +48,32 @@ TIME_UNITS = ( )
if stem.prereq.is_python_3(): - def _to_bytes(msg): + def _to_bytes_impl(msg): if isinstance(msg, str): return codecs.latin_1_encode(msg, "replace")[0] else: return msg
- def _to_unicode(msg): + def _to_unicode_impl(msg): if msg is not None and not isinstance(msg, str): return msg.decode("utf-8", "replace") else: return msg else: - def _to_bytes(msg): + def _to_bytes_impl(msg): if msg is not None and isinstance(msg, unicode): return codecs.latin_1_encode(msg, "replace")[0] else: return msg
- def _to_unicode(msg): + def _to_unicode_impl(msg): if msg is not None and not isinstance(msg, unicode): return msg.decode("utf-8", "replace") else: return msg
-def to_bytes(msg): +def _to_bytes(msg): """ Provides the ASCII bytes for the given string. This is purely to provide python 3 compatability, normalizing the unicode/ASCII change in the version @@ -91,10 +86,10 @@ def to_bytes(msg): :returns: ASCII bytes for string """
- return _to_bytes(msg) + return _to_bytes_impl(msg)
-def to_unicode(msg): +def _to_unicode(msg): """ Provides the unicode string for the given ASCII bytes. This is purely to provide python 3 compatability, normalizing the unicode/ASCII change in the @@ -105,16 +100,16 @@ def to_unicode(msg): :returns: unicode conversion """
- return _to_unicode(msg) + return _to_unicode_impl(msg)
-def to_camel_case(label, divider = "_", joiner = " "): +def _to_camel_case(label, divider = "_", joiner = " "): """ Converts the given string to camel case, ie:
::
- >>> to_camel_case("I_LIKE_PEPPERJACK!") + >>> _to_camel_case("I_LIKE_PEPPERJACK!") 'I Like Pepperjack!'
:param str label: input string to be converted @@ -315,7 +310,7 @@ def parse_short_time_label(label): raise ValueError("Non-numeric value in time entry: %s" % label)
-def parse_iso_timestamp(entry): +def _parse_iso_timestamp(entry): """ Parses the ISO 8601 standard that provides for timestamps like...
diff --git a/stem/util/system.py b/stem/util/system.py index 688475f..0a94b57 100644 --- a/stem/util/system.py +++ b/stem/util/system.py @@ -814,7 +814,7 @@ def _set_prctl_name(process_name):
libc = ctypes.CDLL(ctypes.util.find_library("c")) name_buffer = ctypes.create_string_buffer(len(process_name) + 1) - name_buffer.value = stem.util.str_tools.to_bytes(process_name) + name_buffer.value = stem.util.str_tools._to_bytes(process_name) libc.prctl(PR_SET_NAME, ctypes.byref(name_buffer), 0, 0, 0)
diff --git a/stem/util/term.py b/stem/util/term.py index 5f54b5d..05efcfe 100644 --- a/stem/util/term.py +++ b/stem/util/term.py @@ -87,7 +87,7 @@ def format(msg, *attr):
encodings = [] for text_attr in attr: - text_attr, encoding = stem.util.str_tools.to_camel_case(text_attr), None + text_attr, encoding = stem.util.str_tools._to_camel_case(text_attr), None encoding = FG_ENCODING.get(text_attr, encoding) encoding = BG_ENCODING.get(text_attr, encoding) encoding = ATTR_ENCODING.get(text_attr, encoding) diff --git a/test/settings.cfg b/test/settings.cfg index c184eec..8e96214 100644 --- a/test/settings.cfg +++ b/test/settings.cfg @@ -158,8 +158,8 @@ pyflakes.ignore stem/prereq.py => 'RSA' imported but unused pyflakes.ignore stem/prereq.py => 'asn1' imported but unused pyflakes.ignore stem/prereq.py => 'long_to_bytes' imported but unused pyflakes.ignore stem/descriptor/__init__.py => redefinition of unused 'OrderedDict' from line 60 -pyflakes.ignore stem/util/str_tools.py => redefinition of function '_to_bytes' from line 56 -pyflakes.ignore stem/util/str_tools.py => redefinition of function '_to_unicode' from line 62 +pyflakes.ignore stem/util/str_tools.py => redefinition of function '_to_bytes_impl' from line 51 +pyflakes.ignore stem/util/str_tools.py => redefinition of function '_to_unicode_impl' from line 57 pyflakes.ignore test/mocking.py => undefined name 'builtins' pyflakes.ignore test/unit/response/events.py => 'from stem import *' used; unable to detect undefined names
diff --git a/test/unit/descriptor/server_descriptor.py b/test/unit/descriptor/server_descriptor.py index 07fb50c..3da01b5 100644 --- a/test/unit/descriptor/server_descriptor.py +++ b/test/unit/descriptor/server_descriptor.py @@ -210,7 +210,7 @@ class TestServerDescriptor(unittest.TestCase): desc_text += "\ntrailing text that should be ignored, ho hum"
# running _parse_file should provide an iterator with a single descriptor - desc_iter = stem.descriptor.server_descriptor._parse_file(StringIO.StringIO(stem.util.str_tools.to_unicode(desc_text))) + desc_iter = stem.descriptor.server_descriptor._parse_file(StringIO.StringIO(stem.util.str_tools._to_unicode(desc_text))) desc_entries = list(desc_iter) self.assertEquals(1, len(desc_entries)) desc = desc_entries[0] diff --git a/test/unit/util/str_tools.py b/test/unit/util/str_tools.py index 4b35278..550edeb 100644 --- a/test/unit/util/str_tools.py +++ b/test/unit/util/str_tools.py @@ -11,19 +11,19 @@ from stem.util import str_tools class TestStrTools(unittest.TestCase): def test_to_camel_case(self): """ - Checks the to_camel_case() function. + Checks the _to_camel_case() function. """
# test the pydoc example - self.assertEquals("I Like Pepperjack!", str_tools.to_camel_case("I_LIKE_PEPPERJACK!")) + self.assertEquals("I Like Pepperjack!", str_tools._to_camel_case("I_LIKE_PEPPERJACK!"))
# check a few edge cases - self.assertEquals("", str_tools.to_camel_case("")) - self.assertEquals("Hello", str_tools.to_camel_case("hello")) - self.assertEquals("Hello", str_tools.to_camel_case("HELLO")) - self.assertEquals("Hello World", str_tools.to_camel_case("hello__world")) - self.assertEquals("Hello\tworld", str_tools.to_camel_case("hello\tWORLD")) - self.assertEquals("Hello\t\tWorld", str_tools.to_camel_case("hello__world", "_", "\t")) + self.assertEquals("", str_tools._to_camel_case("")) + self.assertEquals("Hello", str_tools._to_camel_case("hello")) + self.assertEquals("Hello", str_tools._to_camel_case("HELLO")) + self.assertEquals("Hello World", str_tools._to_camel_case("hello__world")) + self.assertEquals("Hello\tworld", str_tools._to_camel_case("hello\tWORLD")) + self.assertEquals("Hello\t\tWorld", str_tools._to_camel_case("hello__world", "_", "\t"))
def test_get_size_label(self): """ @@ -125,7 +125,7 @@ class TestStrTools(unittest.TestCase):
def test_parse_iso_timestamp(self): """ - Checks the parse_iso_timestamp() function. + Checks the _parse_iso_timestamp() function. """
test_inputs = { @@ -138,7 +138,7 @@ class TestStrTools(unittest.TestCase): }
for arg, expected in test_inputs.items(): - self.assertEqual(expected, str_tools.parse_iso_timestamp(arg)) + self.assertEqual(expected, str_tools._parse_iso_timestamp(arg))
invalid_input = [ None, @@ -150,4 +150,4 @@ class TestStrTools(unittest.TestCase): ]
for arg in invalid_input: - self.assertRaises(ValueError, str_tools.parse_iso_timestamp, arg) + self.assertRaises(ValueError, str_tools._parse_iso_timestamp, arg)