commit 9440f4ffa20aa7ba2a0e3297cbf5b245bf2c1c91 Author: Erik eislo@wesleyan.edu Date: Mon Jul 30 18:09:13 2012 -0400
First complete build of unit tests for torexport.
There is a bug in test_export_csv_file which will be fixed tomorrow morning, otherwise all tests are functional and should be passing. --- stem/descriptor/export.py | 53 ++++++++------- test/mocking.py | 35 ++++++---- test/unit/descriptor/export.py | 147 ++++++++++++++++++++++++++++++++++------ 3 files changed, 176 insertions(+), 59 deletions(-)
diff --git a/stem/descriptor/export.py b/stem/descriptor/export.py index dd7f324..af62be5 100644 --- a/stem/descriptor/export.py +++ b/stem/descriptor/export.py @@ -1,7 +1,7 @@ import os, csv, sets, cStringIO
-def export_csv(descriptor, include_fields=[], exclude_fields=[]): +def export_csv(descriptor, include_fields=(), exclude_fields=()): """ Takes a single descriptor object, puts it in a list, and passes it to descriptors_csv_exp to build a csv. @@ -12,13 +12,15 @@ def export_csv(descriptor, include_fields=[], exclude_fields=[]):
:returns: single csv line as a string with one descriptor attribute per cell. """ - descr = [descriptor] + + descr = (descriptor,) return export_csvs(descr, include_fields=include_fields, exclude_fields=exclude_fields)
def export_csvs(descriptors, include_fields=[], exclude_fields=[], header=False): """ - Returns a string that is in csv format, ready to be placed in a .csv file. + Takes an iterable of descriptors, returns a string with one line per descriptor + where each line is a comma separated list of descriptor attributes.
:param list descrs: List of descriptor objects whose attributes will be written. :param list include_fields: list of attribute fields to include in the csv string. @@ -27,64 +29,67 @@ def export_csvs(descriptors, include_fields=[], exclude_fields=[], header=False) needed outside of csv_file_exp's call of this function.
:returns: csv string with one descriptor per line and one attribute per cell. + :raises: ValueError if more than one descriptor type (e.g. server_descriptor, + extrainfo_descriptor) is provided in the iterable. """
- _temp_file = cStringIO.StringIO() + # Need a file object to write to with DictWriter. + temp_file = cStringIO.StringIO()
first = True
for desc in descriptors: + import sys attr = vars(desc)
# Defining incl_fields and the dwriter object requires having access # to a descriptor object. if first: # All descriptor objects should be of the same type - # (i.e. server_descriptor.RelayDescriptor) + # (i.e. server_descriptor.RelayDesrciptor) desc_type = type(desc)
# define incl_fields, 4 cases where final case is incl_fields already # defined and excl_fields left blank, so no action is necessary. if not include_fields and exclude_fields: - _incl = sets.Set(attr.keys()) - include_fields = list(_incl.difference(exclude_fields)) + incl = set(attr.keys()) + include_fields = list(incl.difference(exclude_fields))
elif not include_fields and not exclude_fields: include_fields = attr.keys()
elif include_fields and exclude_fields: - _incl = sets.Set(include_fields) - include_fields = list(_incl.difference(exclude_fields)) + incl = set(include_fields) + include_fields = list(incl.difference(exclude_fields))
- dwriter = csv.DictWriter(_temp_file, include_fields, extrasaction='ignore') - first = False + dwriter = csv.DictWriter(temp_file, include_fields, extrasaction='ignore')
if header: dwriter.writeheader() + first = False
if desc_type == type(desc): dwriter.writerow(attr) else: - raise ValueError('More than one type of descriptor was provided.') + raise ValueError('More than one descriptor type provided. Started with a %s, and just got a %s' % (desc_type, type(desc)))
- return _temp_file.getvalue() + return temp_file.getvalue() # cStringIO files are closed automatically when the current scope is exited.
-def export_csv_file(descriptors, document_location, header=True, include_fields=[], exclude_fields=[]): +def export_csv_file(descriptors, document, include_fields=[], exclude_fields=[], header=True): """ Writes descriptor attributes to a csv file on disk.
Calls get_csv_lines with the given argument, then writes the returned string to a file location specified by document_location.
- :param list descrs: descriptor objects with attributes to export as csv file. - :param str doc_loc: location and file name for csv file to be written to. - This overwrites existing files. - :param bool header: defaults to true, determines if doc will have a header row. - :param list incl_f: list of attribute fields to include in the csv line. - :param list excl_f: list of attribute fields to exclude from csv line. + :param list descriptors: descriptor objects with attributes to export as csv file. + :param object document: File object to be written to. + :param bool header: defaults to true, determines if document will have a header row. + :param list include_fields: list of attribute fields to include in the csv line. + :param list exclude_fields: list of attribute fields to exclude from csv line. """ - doc = open(document_location, 'w') - - for line in export_csvs(descriptors, include_fields=include_fields, exclude_fields=exclude_fields, head=header): - doc.write(line) + try: + document.write(export_csvs(descriptors, include_fields=include_fields, exclude_fields=exclude_fields, header=header)) + except AttributeError: + print "A valid document object was not provided; could not write" diff --git a/test/mocking.py b/test/mocking.py index 198bf70..b2d9737 100644 --- a/test/mocking.py +++ b/test/mocking.py @@ -58,27 +58,36 @@ def return_true(): return return_value(True) def return_false(): return return_value(False) def return_none(): return return_value(None)
-def return_for_args(args_to_return_value, default = None): +def return_for_args(args_to_return_value, kwarg_type=None, default=None): """ Returns a value if the arguments to it match something in a given - 'argument => return value' mapping. Otherwise, a default function - is called with the arguments. + 'argument => return value' mapping. In the case of keyword arguments, + a type must be specified so _return_value can check these arguments as + well. Otherwise, a default function is called with the arguments.
:param dict args_to_return_value: mapping of arguments to the value we should provide + :param object kwarg_type: type of kwarg mapping to be used in unwraping these arguments. :param functor default: returns the value of this function if the args don't match something that we have, we raise a ValueError by default """ - - def _return_value(*args): - if args in args_to_return_value: - return args_to_return_value[args] - elif default is None: - arg_label = ", ".join([str(v) for v in args]) - raise ValueError("Unrecognized argument sent for return_for_args(): %s" % arg_label) + + def _return_value(*args, **kwargs): + # First handle the case in which we aren't expecting keyword args. + if kwarg_type == None: + argument = args else: - return default(args) + argument = args + (kwarg_type(**kwargs),) + + try: + return args_to_return_value[argument] + except KeyError: + if default is None: + arg_label = ", ".join(map(str, argument)) + raise ValueError("Unrecognized argument sent for return_for_args(): %s" % arg_label) + else: + return default(args)
return _return_value - + def raise_exception(exception): def _raise(*args): raise exception return _raise @@ -121,7 +130,7 @@ def mock(target, mock_call, target_module=None): target_function = target.__name__ MOCK_STATE[mocking_id] = (target_module, target_function, target)
- mock_wrapper = lambda *args: mock_call(*args) + mock_wrapper = lambda *args, **kwargs: mock_call(*args, **kwargs) mock_wrapper.__dict__["mock_id"] = mocking_id
# mocks the function with this wrapper diff --git a/test/unit/descriptor/export.py b/test/unit/descriptor/export.py index b1b78e3..a785957 100644 --- a/test/unit/descriptor/export.py +++ b/test/unit/descriptor/export.py @@ -5,9 +5,40 @@ import unittest import stem.descriptor.export as export import test.mocking as mocking
-SINGLE_DESCR_DICT = {'average_bandwidth': 5242880, 'onion_key': 'RSA PUB = JAIK', 'address': '79.139.135.90', '_digest': None, 'exit_policy': ['reject *:*'], 'fingerprint': '0045EB8B820DC410197B'} +from collections import namedtuple +import stem.descriptor as desc +import cStringIO
+# Create descriptor objects. +DESCR_DICT = {'average_bandwidth': 5242880, 'onion_key': 'RSA PUB = JAIK', 'address': '79.139.135.90', '_digest': None, 'exit_policy': ['reject *:*'], 'fingerprint': 'AAAAAAAAAAAAAAAAAAA'} +DESCR2_DICT = {'average_bandwidth': 5555555, 'onion_key': 'RSA PUB = GOUS', 'address': '100.1.1.1', '_digest': None, 'exit_policy': ['reject *:*'], 'fingerprint': 'BBBBBBBBBBBBBBBBBBBBB'} +DESCR3_DICT = {'bandwidth':12345,'average_bandwidth': 6666666, 'address': '101.0.0.1','extra_info':None} +RAW = 'router TORsinn3r 46.17.96.217 9001 0 0 platform Tor 0.2.3.19-rc on Linux bandwidth 4 5 6 ...andonandon'
+descriptor = desc.Descriptor(RAW) +descriptor.__dict__.update(DESCR_DICT) + +descriptor2 = desc.Descriptor(RAW) +descriptor2.__dict__.update(DESCR2_DICT) + +descriptor3 = desc.server_descriptor.RelayDescriptor(RAW, validate=False) +descriptor3.__dict__.update(DESCR3_DICT) + +# Expected return csv strings. +SINGLE_ALL = '5242880,RSA PUB = JAIK,AAAAAAAAAAAAAAAAAAA,,router TORsinn3r 46.17.96.217 9001 0 0 platform Tor 0.2.3.19-rc on Linux bandwidth 4 5 6 ...andonandon,['reject *:*'],79.139.135.90,' +SINGLE_PART = '79.139.135.90,['reject *:*']' +SINGLE_PART2 = '5242880,,router TORsinn3r 46.17.96.217 9001 0 0 platform Tor 0.2.3.19-rc on Linux bandwidth 4 5 6 ...andonandon,['reject *:*'],79.139.135.90,' +SINGLE_PART3 = '79.139.135.90,AAAAAAAAAAAAAAAAAAA' + +DOUBLE_ALL = '5242880,RSA PUB = JAIK,AAAAAAAAAAAAAAAAAAA,,router TORsinn3r 46.17.96.217 9001 0 0 platform Tor 0.2.3.19-rc on Linux bandwidth 4 5 6 ...andonandon,['reject *:*'],79.139.135.90,\r\n5555555,RSA PUB = GOUS,BBBBBBBBBBBBBBBBBBBBB,,router TORsinn3r 46.17.96.217 9001 0 0 platform Tor 0.2.3.19-rc on Linux bandwidth 4 5 6 ...andonandon,['reject *:*'],100.1.1.1,\r\n' +DOUBLE_PART = '79.139.135.90,['reject *:*']\r\n100.1.1.1,['reject *:*']\r\n' +DOUBLE_PART2 = '5242880,,router TORsinn3r 46.17.96.217 9001 0 0 platform Tor 0.2.3.19-rc on Linux bandwidth 4 5 6 ...andonandon,['reject *:*'],79.139.135.90,\r\n5555555,,router TORsinn3r 46.17.96.217 9001 0 0 platform Tor 0.2.3.19-rc on Linux bandwidth 4 5 6 ...andonandon,['reject *:*'],100.1.1.1,\r\n' + +SINGLE_ALL_HEAD = 'average_bandwidth,onion_key,fingerprint,_digest,_raw_contents,exit_policy,address,_path\r\n' + SINGLE_ALL + '\r\n' +SINGLE_PART3_HEAD = 'address,fingerprint\r\n' + SINGLE_PART3 +DOUBLE_ALL_HEAD = 'average_bandwidth,onion_key,fingerprint,_digest,_raw_contents,exit_policy,address,_path\r\n' + DOUBLE_ALL +DOUBLE_PART_HEAD = 'address,exit_policy\r\n' + DOUBLE_PART +DOUBLE_PART2_HEAD = 'average_bandwidth,_digest,_raw_contents,exit_policy,address,_path\r\n' + DOUBLE_PART2
class TestExport(unittest.TestCase): def tearDown(self): @@ -17,36 +48,108 @@ class TestExport(unittest.TestCase): """ Tests the export_csv function which takes a single descriptor object. """ + Fields = namedtuple('Fields', 'include_fields exclude_fields')
- # TODO we should be passing descriptor objects not just dicts. - csv_string = '5242880, RSA PUB = JAIK, 79.139.135.90,,['reject *:*'], 0045EB8B820DC410197B' - mocking.mock(export.export_csvs, mocking.return_value(csv_string)) - self.assertEqual(csv_string, export.export_csv(SINGLE_DESCR_DICT)) + # Descriptors must be an iterable + # named tuples replace dictionaries as dict keys must immutable. + ret_vals = {((descriptor,), Fields(include_fields=(), exclude_fields=())):SINGLE_ALL, + ((descriptor,), Fields(include_fields=('address', 'exit_policy'), + exclude_fields=())):SINGLE_PART, + ((descriptor,), Fields(include_fields=(), + exclude_fields=('onion_key', 'fingerprint'))):SINGLE_PART2, + ((descriptor,), Fields(include_fields=('address', 'exit_policy', 'fingerprint'), + exclude_fields=('fingerprint',))):SINGLE_PART, + ((descriptor,), Fields(include_fields=('address', 'fingerprint'), + exclude_fields=('_digest',))):SINGLE_PART3 + } + mocking.mock(export.export_csvs, mocking.return_for_args(ret_vals, kwarg_type=Fields))
- csv_string = '79.139.135.90,,['reject *:*'], 0045EB8B820DC410197B' - mocking.mock(export.export_csvs, mocking.return_value(csv_string)) - self.assertEqual(csv_string, export.export_csv(SINGLE_DESCR_DICT, exclude_fields=['average_bandwidth', 'onion_key'])) + # Used tuples for incl/exclude_fields for parameter matching with ret_vals dict. + self.assertEqual(SINGLE_ALL, export.export_csv(descriptor)) + self.assertEqual(SINGLE_PART, export.export_csv(descriptor, + include_fields=('address', 'exit_policy'))) + self.assertEqual(SINGLE_PART2, export.export_csv(descriptor, + exclude_fields=('onion_key', 'fingerprint'))) + self.assertEqual(SINGLE_PART, export.export_csv(descriptor, + include_fields=('address', 'exit_policy', 'fingerprint'), exclude_fields=('fingerprint',))) + self.assertEqual(SINGLE_PART3, export.export_csv(descriptor, + include_fields=('address', 'fingerprint'), exclude_fields=('_digest',))) + + + def test_export_csvs(self): + """ + Test the export_csvs function which takes a list of descriptor objects. + """
- csv_string = 'RSA PUB = JAIK, 79.139.135.90,' - mocking.mock(export.export_csvs, mocking.return_value(csv_string)) - self.assertEqual(csv_string, export.export_csv(SINGLE_DESCR_DICT, include_fields=['onion_key', 'address'])) + # Single descriptor + #print "Type descriptor: %s" % type(descriptor) + #print "Type SINGLE_ALL: %s" % type(SINGLE_ALL) + #print SINGLE_ALL + #print "Type fn Call: %s" % type(export.export_csvs([descriptor])) + self.assertEquals(SINGLE_ALL + "\r\n", export.export_csvs([descriptor])) + self.assertEqual(SINGLE_PART + "\r\n", export.export_csvs([descriptor], + include_fields=['address', 'exit_policy'])) + self.assertEqual(SINGLE_PART2 + "\r\n", export.export_csvs([descriptor], + exclude_fields=['onion_key', 'fingerprint'])) + self.assertEqual(SINGLE_PART + "\r\n", export.export_csvs([descriptor], + include_fields=['address', 'exit_policy', 'fingerprint'], exclude_fields=['fingerprint']))
- # TODO 1 or two more cases to handle (subcases of overlap/no overlap - # incl & excl.) + # Multiple descriptors + #print "Sample Call: \n %s \n\n" % export.export_csvs([descriptor, descriptor2], header=True)
+ self.assertEqual(DOUBLE_ALL, export.export_csvs([descriptor, descriptor2])) + self.assertEqual(DOUBLE_PART, export.export_csvs([descriptor, descriptor2], + include_fields=['address', 'exit_policy'])) + #print export.export_csvs([descriptor, descriptor2], exclude_fields=['onion_key', 'fingerprint']) + #print "\n %s" % descriptor2.__dict__
- # TODO Make sure to undo mocking here or we won't be testing the next function. + self.assertEqual(DOUBLE_PART2, export.export_csvs([descriptor, descriptor2], + exclude_fields=['onion_key', 'fingerprint'])) + self.assertEqual(DOUBLE_PART, export.export_csvs([descriptor, descriptor2], + include_fields=['address', 'exit_policy', 'fingerprint'], exclude_fields=['fingerprint']))
- def test_export_csvs(self): - """ - Test the export_csvs function which takes a list of descriptor objects. - """ - pass + # Tests with headers + self.assertEqual(SINGLE_ALL_HEAD, export.export_csvs([descriptor], header=True)) + self.assertEqual(SINGLE_PART3_HEAD + "\r\n", export.export_csvs([descriptor], + include_fields=['address', 'fingerprint'], exclude_fields=['_digest'], header=True)) + self.assertEqual(DOUBLE_ALL_HEAD, export.export_csvs([descriptor, descriptor2], header=True)) + self.assertEqual(DOUBLE_PART_HEAD, export.export_csvs([descriptor, descriptor2], + include_fields=['address', 'exit_policy'], header=True)) + self.assertEqual(DOUBLE_PART2_HEAD, export.export_csvs([descriptor, descriptor2], + exclude_fields=['onion_key', 'fingerprint'], header=True)) + + # Other tests + self.assertRaises(ValueError, export.export_csvs, [descriptor, descriptor3]) + self.assertRaises(ValueError, export.export_csvs, [descriptor, descriptor3], + include_fields=['onion_key', 'address', 'fingerprint'], exclude_fields=['onion_key']) + +
def test_export_csv_file(self): """ Tests the export_csv_file function. """ - pass - # mocking.mock(open, mocking.return_for_args(##)) - # mocking.mock(export.export_csvs, ##) + sample_csv_string = 'This, is, a, sample, string.\r\nline, two.\r\n' + sample_csv_string2 = 'Another, sample\r\n,, second,\r\n' + sample_file = cStringIO.StringIO() + + # Must use named tuples again for ret_vals dictionary. + Fields = namedtuple('Fields', 'include_fields exclude_fields header') + + ret_vals = {(descriptor, sample_file):sample_csv_string, + (descriptor, sample_file, Fields(include_fields=('address', 'onion_key'), exclude_fields=('address',), header=False)):sample_csv_string2} + # TODO Ask Danner: mock it once then do both tests (not including assertRaises), or do separate mockings. + # the latter requires that we still include empty incl_fields and excl_fields parameters instead of + # letting them default to []. Same for header. + mocking.mock(export.export_csvs, mocking.return_for_args(ret_vals, kwarg_type=Fields)) + + export.export_csv_file(descriptor, sample_file) + self.assertEqual(sample_csv_string, sample_file.getvalue()) + + sample_file = cStringIO.StringIO + + export.export_csv_file(descriptor, sample_file, include_fields=('address', 'onion_key'), exclude_fields=('address',), header=False) + self.assertEqual(sample_csv_string2, sample_file.getvalue()) + + # Make sure error is Raised when necessary. + self.assertRaises(export.export_csv_file, (descriptor, sample_csv_string))
tor-commits@lists.torproject.org